aboutsummaryrefslogtreecommitdiff
path: root/vendor/github.com/json-iterator/go
diff options
context:
space:
mode:
authorGravatar Yong Tang <yong.tang.github@outlook.com> 2018-01-03 19:11:28 +0800
committerGravatar Miek Gieben <miek@miek.nl> 2018-01-03 11:11:28 +0000
commit7fe5b0bb1f34735859d611e170fa8709adac73ea (patch)
tree9757c7e2a724a0e1ed885ef7bca470c56075e4a6 /vendor/github.com/json-iterator/go
parentbce7f5fbecbc68950dfb2b96b5f85d03a8bf07eb (diff)
downloadcoredns-7fe5b0bb1f34735859d611e170fa8709adac73ea.tar.gz
coredns-7fe5b0bb1f34735859d611e170fa8709adac73ea.tar.zst
coredns-7fe5b0bb1f34735859d611e170fa8709adac73ea.zip
Update k8s client-go to v6.0.0 (#1340)
* Update k8s client-go to v6.0.0 This fix updates k8s client-go to v6.0.0 as CoreDNS is supported in 1.9 and v6.0.0 is the recommended version. There are quite some massive changes that need to be made: 1. k8s.io/client-go/pkg/api/v1 has been changed to k8s.io/api/v1 (repo changed from `client-go` to `api`) 2. kubernetes.Clientset adds one extra layer, so that `kubernetes.Clientset.Services()` and like has been changed to `kubernetes.Clientset.CoreV1().Services()` Also, we have to stick with specific commits of `k8s.io/apimachinery` and the newly introduced `k8s.io/api` because go dep still could not figure out the right version to fetch. Signed-off-by: Yong Tang <yong.tang.github@outlook.com> * Update vendor with `dep ensure --update` and `dep prune` Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
Diffstat (limited to 'vendor/github.com/json-iterator/go')
-rw-r--r--vendor/github.com/json-iterator/go/.codecov.yml3
-rw-r--r--vendor/github.com/json-iterator/go/.gitignore4
-rw-r--r--vendor/github.com/json-iterator/go/.travis.yml14
-rw-r--r--vendor/github.com/json-iterator/go/LICENSE21
-rw-r--r--vendor/github.com/json-iterator/go/README.md86
-rw-r--r--vendor/github.com/json-iterator/go/compatible_test.go40
-rw-r--r--vendor/github.com/json-iterator/go/example_test.go95
-rw-r--r--vendor/github.com/json-iterator/go/feature_adapter.go133
-rw-r--r--vendor/github.com/json-iterator/go/feature_any.go245
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_array.go278
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_bool.go137
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_float.go83
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_int32.go74
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_int64.go74
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_invalid.go82
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_nil.go69
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_number.go104
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_object.go374
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_string.go166
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_uint32.go74
-rw-r--r--vendor/github.com/json-iterator/go/feature_any_uint64.go74
-rw-r--r--vendor/github.com/json-iterator/go/feature_config.go347
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter.go322
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_array.go58
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_float.go341
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_int.go268
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_object.go267
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_skip.go129
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go144
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_skip_strict.go89
-rw-r--r--vendor/github.com/json-iterator/go/feature_iter_string.go215
-rw-r--r--vendor/github.com/json-iterator/go/feature_json_number.go31
-rw-r--r--vendor/github.com/json-iterator/go/feature_pool.go59
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect.go721
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_array.go99
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_extension.go414
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_map.go244
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_native.go764
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_object.go196
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_slice.go147
-rw-r--r--vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go934
-rw-r--r--vendor/github.com/json-iterator/go/feature_stream.go308
-rw-r--r--vendor/github.com/json-iterator/go/feature_stream_float.go96
-rw-r--r--vendor/github.com/json-iterator/go/feature_stream_int.go320
-rw-r--r--vendor/github.com/json-iterator/go/feature_stream_string.go396
-rw-r--r--vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md7
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter.go18
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_1dot8_only_test.go45
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_adapter_test.go84
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_alias_test.go62
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_array_test.go122
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_bool_test.go64
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_float_test.go102
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_int_test.go197
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_map_test.go14
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_null_test.go15
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_object_test.go107
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_any_string_test.go56
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_array_test.go213
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_bool_test.go113
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_customize_test.go341
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_demo_test.go87
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_encode_interface_test.go42
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_enum_marshaler_test.go50
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_fixed_array_test.go37
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_float_test.go210
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_int_test.go567
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_interface_test.go576
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_invalid_test.go138
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_io_test.go65
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_iterator_test.go66
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_large_file_test.go157
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_map_test.go160
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_must_be_valid_test.go71
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_nested_test.go88
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_null_test.go168
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_object_test.go342
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_optional_test.go46
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_raw_message_test.go114
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_reader_test.go57
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_reflect_native_test.go154
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_skip_test.go184
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_sloppy_test.go162
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_stream_test.go69
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_string_test.go261
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_struct_decoder_test.go267
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_struct_encoder_test.go52
-rw-r--r--vendor/github.com/json-iterator/go/jsoniter_wrap_test.go118
-rwxr-xr-xvendor/github.com/json-iterator/go/test.sh12
-rw-r--r--vendor/github.com/json-iterator/go/unmarshal_input_test.go72
90 files changed, 15091 insertions, 0 deletions
diff --git a/vendor/github.com/json-iterator/go/.codecov.yml b/vendor/github.com/json-iterator/go/.codecov.yml
new file mode 100644
index 000000000..955dc0be5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.codecov.yml
@@ -0,0 +1,3 @@
+ignore:
+ - "output_tests/.*"
+
diff --git a/vendor/github.com/json-iterator/go/.gitignore b/vendor/github.com/json-iterator/go/.gitignore
new file mode 100644
index 000000000..ce242daf7
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.gitignore
@@ -0,0 +1,4 @@
+.idea
+/coverage.txt
+/profile.out
+/bug_test.go
diff --git a/vendor/github.com/json-iterator/go/.travis.yml b/vendor/github.com/json-iterator/go/.travis.yml
new file mode 100644
index 000000000..449e67cd0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.8.x
+ - 1.x
+
+before_install:
+ - go get -t -v ./...
+
+script:
+ - ./test.sh
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE
new file mode 100644
index 000000000..2cf4f5ab2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 json-iterator
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md
new file mode 100644
index 000000000..3a0d68098
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/README.md
@@ -0,0 +1,86 @@
+[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
+[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go)
+[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
+[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
+[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
+[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
+[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
+
+A high-performance 100% compatible drop-in replacement of "encoding/json"
+
+```
+Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
+```
+
+# Benchmark
+
+![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
+
+Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
+
+Raw Result (easyjson requires static code generation)
+
+| | ns/op | allocation bytes | allocation times |
+| --- | --- | --- | --- |
+| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
+| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
+| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
+| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
+| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
+| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
+
+# Usage
+
+100% compatibility with standard lib
+
+Replace
+
+```go
+import "encoding/json"
+json.Marshal(&data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Marshal(&data)
+```
+
+Replace
+
+```go
+import "encoding/json"
+json.Unmarshal(input, &data)
+```
+
+with
+
+```go
+import "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Unmarshal(input, &data)
+```
+
+[More documentation](http://jsoniter.com/migrate-from-go-std.html)
+
+# How to get
+
+```
+go get github.com/json-iterator/go
+```
+
+# Contribution Welcomed !
+
+Contributors
+
+* [thockin](https://github.com/thockin)
+* [mattn](https://github.com/mattn)
+* [cch123](https://github.com/cch123)
+* [Oleg Shaldybin](https://github.com/olegshaldybin)
+* [Jason Toffaletti](https://github.com/toffaletti)
+
+Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
diff --git a/vendor/github.com/json-iterator/go/compatible_test.go b/vendor/github.com/json-iterator/go/compatible_test.go
new file mode 100644
index 000000000..4b725c536
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/compatible_test.go
@@ -0,0 +1,40 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+// Standard Encoder has trailing newline.
+func TestEncoderHasTrailingNewline(t *testing.T) {
+ should := require.New(t)
+ var buf, stdbuf bytes.Buffer
+ enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
+ enc.Encode(1)
+ stdenc := json.NewEncoder(&stdbuf)
+ stdenc.Encode(1)
+ should.Equal(stdbuf.Bytes(), buf.Bytes())
+}
+
+// Non-nil but empty map should be ignored.
+func TestOmitempty(t *testing.T) {
+ o := struct {
+ A string `json:"a,omitempty"`
+ B string `json:"b,omitempty"`
+ Annotations map[string]string `json:"annotations,omitempty"`
+ }{
+ A: "a",
+ B: "b",
+ Annotations: map[string]string{},
+ }
+ should := require.New(t)
+ var buf, stdbuf bytes.Buffer
+ enc := ConfigCompatibleWithStandardLibrary.NewEncoder(&buf)
+ enc.Encode(o)
+ stdenc := json.NewEncoder(&stdbuf)
+ stdenc.Encode(o)
+ should.Equal(string(stdbuf.Bytes()), string(buf.Bytes()))
+}
diff --git a/vendor/github.com/json-iterator/go/example_test.go b/vendor/github.com/json-iterator/go/example_test.go
new file mode 100644
index 000000000..1c8f341c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/example_test.go
@@ -0,0 +1,95 @@
+package jsoniter
+
+import (
+ "fmt"
+ "os"
+)
+
+func ExampleMarshal() {
+ type ColorGroup struct {
+ ID int
+ Name string
+ Colors []string
+ }
+ group := ColorGroup{
+ ID: 1,
+ Name: "Reds",
+ Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
+ }
+ b, err := Marshal(group)
+ if err != nil {
+ fmt.Println("error:", err)
+ }
+ os.Stdout.Write(b)
+ // Output:
+ // {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
+}
+
+func ExampleUnmarshal() {
+ var jsonBlob = []byte(`[
+ {"Name": "Platypus", "Order": "Monotremata"},
+ {"Name": "Quoll", "Order": "Dasyuromorphia"}
+ ]`)
+ type Animal struct {
+ Name string
+ Order string
+ }
+ var animals []Animal
+ err := Unmarshal(jsonBlob, &animals)
+ if err != nil {
+ fmt.Println("error:", err)
+ }
+ fmt.Printf("%+v", animals)
+ // Output:
+ // [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
+}
+
+func ExampleConfigFastest_Marshal() {
+ type ColorGroup struct {
+ ID int
+ Name string
+ Colors []string
+ }
+ group := ColorGroup{
+ ID: 1,
+ Name: "Reds",
+ Colors: []string{"Crimson", "Red", "Ruby", "Maroon"},
+ }
+ stream := ConfigFastest.BorrowStream(nil)
+ defer ConfigFastest.ReturnStream(stream)
+ stream.WriteVal(group)
+ if stream.Error != nil {
+ fmt.Println("error:", stream.Error)
+ }
+ os.Stdout.Write(stream.Buffer())
+ // Output:
+ // {"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}
+}
+
+func ExampleConfigFastest_Unmarshal() {
+ var jsonBlob = []byte(`[
+ {"Name": "Platypus", "Order": "Monotremata"},
+ {"Name": "Quoll", "Order": "Dasyuromorphia"}
+ ]`)
+ type Animal struct {
+ Name string
+ Order string
+ }
+ var animals []Animal
+ iter := ConfigFastest.BorrowIterator(jsonBlob)
+ defer ConfigFastest.ReturnIterator(iter)
+ iter.ReadVal(&animals)
+ if iter.Error != nil {
+ fmt.Println("error:", iter.Error)
+ }
+ fmt.Printf("%+v", animals)
+ // Output:
+ // [{Name:Platypus Order:Monotremata} {Name:Quoll Order:Dasyuromorphia}]
+}
+
+func ExampleGet() {
+ val := []byte(`{"ID":1,"Name":"Reds","Colors":["Crimson","Red","Ruby","Maroon"]}`)
+ fmt.Printf(Get(val, "Colors", 0).ToString())
+ // Output:
+ // Crimson
+}
diff --git a/vendor/github.com/json-iterator/go/feature_adapter.go b/vendor/github.com/json-iterator/go/feature_adapter.go
new file mode 100644
index 000000000..0214b711a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_adapter.go
@@ -0,0 +1,133 @@
+package jsoniter
+
+import (
+ "bytes"
+ "io"
+)
+
+// RawMessage to make replace json with jsoniter
+type RawMessage []byte
+
+// Unmarshal adapts to json/encoding Unmarshal API
+//
+// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
+// Refer to https://godoc.org/encoding/json#Unmarshal for more information
+func Unmarshal(data []byte, v interface{}) error {
+ return ConfigDefault.Unmarshal(data, v)
+}
+
+func lastNotSpacePos(data []byte) int {
+ for i := len(data) - 1; i >= 0; i-- {
+ if data[i] != ' ' && data[i] != '\t' && data[i] != '\r' && data[i] != '\n' {
+ return i + 1
+ }
+ }
+ return 0
+}
+
+// UnmarshalFromString convenient method to read from string instead of []byte
+func UnmarshalFromString(str string, v interface{}) error {
+ return ConfigDefault.UnmarshalFromString(str, v)
+}
+
+// Get quick method to get value from deeply nested JSON structure
+func Get(data []byte, path ...interface{}) Any {
+ return ConfigDefault.Get(data, path...)
+}
+
+// Marshal adapts to json/encoding Marshal API
+//
+// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
+// Refer to https://godoc.org/encoding/json#Marshal for more information
+func Marshal(v interface{}) ([]byte, error) {
+ return ConfigDefault.Marshal(v)
+}
+
+// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ return ConfigDefault.MarshalIndent(v, prefix, indent)
+}
+
+// MarshalToString convenient method to write as string instead of []byte
+func MarshalToString(v interface{}) (string, error) {
+ return ConfigDefault.MarshalToString(v)
+}
+
+// NewDecoder adapts to json/stream NewDecoder API.
+//
+// NewDecoder returns a new decoder that reads from r.
+//
+// Instead of a json/encoding Decoder, an Decoder is returned
+// Refer to https://godoc.org/encoding/json#NewDecoder for more information
+func NewDecoder(reader io.Reader) *Decoder {
+ return ConfigDefault.NewDecoder(reader)
+}
+
+// Decoder reads and decodes JSON values from an input stream.
+// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
+type Decoder struct {
+ iter *Iterator
+}
+
+// Decode decode JSON into interface{}
+func (adapter *Decoder) Decode(obj interface{}) error {
+ adapter.iter.ReadVal(obj)
+ err := adapter.iter.Error
+ if err == io.EOF {
+ return nil
+ }
+ return adapter.iter.Error
+}
+
+// More is there more?
+func (adapter *Decoder) More() bool {
+ return adapter.iter.head != adapter.iter.tail
+}
+
+// Buffered remaining buffer
+func (adapter *Decoder) Buffered() io.Reader {
+ remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
+ return bytes.NewReader(remaining)
+}
+
+// UseNumber for number JSON element, use float64 or json.NumberValue (alias of string)
+func (adapter *Decoder) UseNumber() {
+ origCfg := adapter.iter.cfg.configBeforeFrozen
+ origCfg.UseNumber = true
+ adapter.iter.cfg = origCfg.Froze().(*frozenConfig)
+}
+
+// NewEncoder same as json.NewEncoder
+func NewEncoder(writer io.Writer) *Encoder {
+ return ConfigDefault.NewEncoder(writer)
+}
+
+// Encoder same as json.Encoder
+type Encoder struct {
+ stream *Stream
+}
+
+// Encode encode interface{} as JSON to io.Writer
+func (adapter *Encoder) Encode(val interface{}) error {
+ adapter.stream.WriteVal(val)
+ adapter.stream.WriteRaw("\n")
+ adapter.stream.Flush()
+ return adapter.stream.Error
+}
+
+// SetIndent set the indention. Prefix is not supported
+func (adapter *Encoder) SetIndent(prefix, indent string) {
+ adapter.stream.cfg.indentionStep = len(indent)
+}
+
+// SetEscapeHTML escape html by default, set to false to disable
+func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.EscapeHTML = escapeHTML
+ adapter.stream.cfg = config.Froze().(*frozenConfig)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func Valid(data []byte) bool {
+ return ConfigDefault.Valid(data)
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any.go b/vendor/github.com/json-iterator/go/feature_any.go
new file mode 100644
index 000000000..87716d1fc
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any.go
@@ -0,0 +1,245 @@
+package jsoniter
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "reflect"
+)
+
+// Any generic object representation.
+// The lazy json implementation holds []byte and parse lazily.
+type Any interface {
+ LastError() error
+ ValueType() ValueType
+ MustBeValid() Any
+ ToBool() bool
+ ToInt() int
+ ToInt32() int32
+ ToInt64() int64
+ ToUint() uint
+ ToUint32() uint32
+ ToUint64() uint64
+ ToFloat32() float32
+ ToFloat64() float64
+ ToString() string
+ ToVal(val interface{})
+ Get(path ...interface{}) Any
+ // TODO: add Set
+ Size() int
+ Keys() []string
+ GetInterface() interface{}
+ WriteTo(stream *Stream)
+}
+
+type baseAny struct{}
+
+func (any *baseAny) Get(path ...interface{}) Any {
+ return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)}
+}
+
+func (any *baseAny) Size() int {
+ return 0
+}
+
+func (any *baseAny) Keys() []string {
+ return []string{}
+}
+
+func (any *baseAny) ToVal(obj interface{}) {
+ panic("not implemented")
+}
+
+// WrapInt32 turn int32 into Any interface
+func WrapInt32(val int32) Any {
+ return &int32Any{baseAny{}, val}
+}
+
+// WrapInt64 turn int64 into Any interface
+func WrapInt64(val int64) Any {
+ return &int64Any{baseAny{}, val}
+}
+
+// WrapUint32 turn uint32 into Any interface
+func WrapUint32(val uint32) Any {
+ return &uint32Any{baseAny{}, val}
+}
+
+// WrapUint64 turn uint64 into Any interface
+func WrapUint64(val uint64) Any {
+ return &uint64Any{baseAny{}, val}
+}
+
+// WrapFloat64 turn float64 into Any interface
+func WrapFloat64(val float64) Any {
+ return &floatAny{baseAny{}, val}
+}
+
+// WrapString turn string into Any interface
+func WrapString(val string) Any {
+ return &stringAny{baseAny{}, val}
+}
+
+// Wrap turn a go object into Any interface
+func Wrap(val interface{}) Any {
+ if val == nil {
+ return &nilAny{}
+ }
+ asAny, isAny := val.(Any)
+ if isAny {
+ return asAny
+ }
+ typ := reflect.TypeOf(val)
+ switch typ.Kind() {
+ case reflect.Slice:
+ return wrapArray(val)
+ case reflect.Struct:
+ return wrapStruct(val)
+ case reflect.Map:
+ return wrapMap(val)
+ case reflect.String:
+ return WrapString(val.(string))
+ case reflect.Int:
+ return WrapInt64(int64(val.(int)))
+ case reflect.Int8:
+ return WrapInt32(int32(val.(int8)))
+ case reflect.Int16:
+ return WrapInt32(int32(val.(int16)))
+ case reflect.Int32:
+ return WrapInt32(val.(int32))
+ case reflect.Int64:
+ return WrapInt64(val.(int64))
+ case reflect.Uint:
+ return WrapUint64(uint64(val.(uint)))
+ case reflect.Uint8:
+ return WrapUint32(uint32(val.(uint8)))
+ case reflect.Uint16:
+ return WrapUint32(uint32(val.(uint16)))
+ case reflect.Uint32:
+ return WrapUint32(uint32(val.(uint32)))
+ case reflect.Uint64:
+ return WrapUint64(val.(uint64))
+ case reflect.Float32:
+ return WrapFloat64(float64(val.(float32)))
+ case reflect.Float64:
+ return WrapFloat64(val.(float64))
+ case reflect.Bool:
+ if val.(bool) == true {
+ return &trueAny{}
+ }
+ return &falseAny{}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
+}
+
+// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
+func (iter *Iterator) ReadAny() Any {
+ return iter.readAny()
+}
+
+func (iter *Iterator) readAny() Any {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.unreadByte()
+ return &stringAny{baseAny{}, iter.ReadString()}
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return &nilAny{}
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ return &trueAny{}
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ return &falseAny{}
+ case '{':
+ return iter.readObjectAny()
+ case '[':
+ return iter.readArrayAny()
+ case '-':
+ return iter.readNumberAny(false)
+ case 0:
+ return &invalidAny{baseAny{}, errors.New("input is empty")}
+ default:
+ return iter.readNumberAny(true)
+ }
+}
+
+func (iter *Iterator) readNumberAny(positive bool) Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipNumber()
+ lazyBuf := iter.stopCapture()
+ return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readObjectAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipObject()
+ lazyBuf := iter.stopCapture()
+ return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readArrayAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipArray()
+ lazyBuf := iter.stopCapture()
+ return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func locateObjectField(iter *Iterator, target string) []byte {
+ var found []byte
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ if field == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ return true
+ })
+ return found
+}
+
+func locateArrayElement(iter *Iterator, target int) []byte {
+ var found []byte
+ n := 0
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ if n == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ n++
+ return true
+ })
+ return found
+}
+
+func locatePath(iter *Iterator, path []interface{}) Any {
+ for i, pathKeyObj := range path {
+ switch pathKey := pathKeyObj.(type) {
+ case string:
+ valueBytes := locateObjectField(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int:
+ valueBytes := locateArrayElement(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int32:
+ if '*' == pathKey {
+ return iter.readAny().Get(path[i:]...)
+ }
+ return newInvalidAny(path[i:])
+ default:
+ return newInvalidAny(path[i:])
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return &invalidAny{baseAny{}, iter.Error}
+ }
+ return iter.readAny()
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_array.go b/vendor/github.com/json-iterator/go/feature_any_array.go
new file mode 100644
index 000000000..0449e9aa4
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_array.go
@@ -0,0 +1,278 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type arrayLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *arrayLazyAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *arrayLazyAny) ToBool() bool {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.ReadArray()
+}
+
+func (any *arrayLazyAny) ToInt() int {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt32() int32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt64() int64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint() uint {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint32() uint32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint64() uint64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat32() float32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat64() float64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *arrayLazyAny) ToVal(val interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(val)
+}
+
+func (any *arrayLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateArrayElement(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ arr := make([]Any, 0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ found := iter.readAny().Get(path[1:]...)
+ if found.ValueType() != InvalidValue {
+ arr = append(arr, found)
+ }
+ return true
+ })
+ return wrapArray(arr)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ size++
+ iter.Skip()
+ return true
+ })
+ return size
+}
+
+func (any *arrayLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *arrayLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type arrayAny struct {
+ baseAny
+ val reflect.Value
+}
+
+func wrapArray(val interface{}) *arrayAny {
+ return &arrayAny{baseAny{}, reflect.ValueOf(val)}
+}
+
+func (any *arrayAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayAny) LastError() error {
+ return nil
+}
+
+func (any *arrayAny) ToBool() bool {
+ return any.val.Len() != 0
+}
+
+func (any *arrayAny) ToInt() int {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt32() int32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt64() int64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint() uint {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint32() uint32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint64() uint64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat32() float32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat64() float64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToString() string {
+ str, _ := MarshalToString(any.val.Interface())
+ return str
+}
+
+func (any *arrayAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ if firstPath < 0 || firstPath >= any.val.Len() {
+ return newInvalidAny(path)
+ }
+ return Wrap(any.val.Index(firstPath).Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := make([]Any, 0)
+ for i := 0; i < any.val.Len(); i++ {
+ mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll = append(mappedAll, mapped)
+ }
+ }
+ return wrapArray(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *arrayAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *arrayAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_bool.go b/vendor/github.com/json-iterator/go/feature_any_bool.go
new file mode 100644
index 000000000..9452324af
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_bool.go
@@ -0,0 +1,137 @@
+package jsoniter
+
+type trueAny struct {
+ baseAny
+}
+
+func (any *trueAny) LastError() error {
+ return nil
+}
+
+func (any *trueAny) ToBool() bool {
+ return true
+}
+
+func (any *trueAny) ToInt() int {
+ return 1
+}
+
+func (any *trueAny) ToInt32() int32 {
+ return 1
+}
+
+func (any *trueAny) ToInt64() int64 {
+ return 1
+}
+
+func (any *trueAny) ToUint() uint {
+ return 1
+}
+
+func (any *trueAny) ToUint32() uint32 {
+ return 1
+}
+
+func (any *trueAny) ToUint64() uint64 {
+ return 1
+}
+
+func (any *trueAny) ToFloat32() float32 {
+ return 1
+}
+
+func (any *trueAny) ToFloat64() float64 {
+ return 1
+}
+
+func (any *trueAny) ToString() string {
+ return "true"
+}
+
+func (any *trueAny) WriteTo(stream *Stream) {
+ stream.WriteTrue()
+}
+
+func (any *trueAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *trueAny) GetInterface() interface{} {
+ return true
+}
+
+func (any *trueAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *trueAny) MustBeValid() Any {
+ return any
+}
+
+type falseAny struct {
+ baseAny
+}
+
+func (any *falseAny) LastError() error {
+ return nil
+}
+
+func (any *falseAny) ToBool() bool {
+ return false
+}
+
+func (any *falseAny) ToInt() int {
+ return 0
+}
+
+func (any *falseAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *falseAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *falseAny) ToUint() uint {
+ return 0
+}
+
+func (any *falseAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *falseAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *falseAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *falseAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *falseAny) ToString() string {
+ return "false"
+}
+
+func (any *falseAny) WriteTo(stream *Stream) {
+ stream.WriteFalse()
+}
+
+func (any *falseAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *falseAny) GetInterface() interface{} {
+ return false
+}
+
+func (any *falseAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *falseAny) MustBeValid() Any {
+ return any
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_float.go b/vendor/github.com/json-iterator/go/feature_any_float.go
new file mode 100644
index 000000000..35fdb0949
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_float.go
@@ -0,0 +1,83 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type floatAny struct {
+ baseAny
+ val float64
+}
+
+func (any *floatAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *floatAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *floatAny) MustBeValid() Any {
+ return any
+}
+
+func (any *floatAny) LastError() error {
+ return nil
+}
+
+func (any *floatAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *floatAny) ToInt() int {
+ return int(any.val)
+}
+
+func (any *floatAny) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *floatAny) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *floatAny) ToUint() uint {
+ if any.val > 0 {
+ return uint(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint32() uint32 {
+ if any.val > 0 {
+ return uint32(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint64() uint64 {
+ if any.val > 0 {
+ return uint64(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *floatAny) ToFloat64() float64 {
+ return any.val
+}
+
+func (any *floatAny) ToString() string {
+ return strconv.FormatFloat(any.val, 'E', -1, 64)
+}
+
+func (any *floatAny) WriteTo(stream *Stream) {
+ stream.WriteFloat64(any.val)
+}
+
+func (any *floatAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_int32.go b/vendor/github.com/json-iterator/go/feature_any_int32.go
new file mode 100644
index 000000000..1b56f3991
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_int32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int32Any struct {
+ baseAny
+ val int32
+}
+
+func (any *int32Any) LastError() error {
+ return nil
+}
+
+func (any *int32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int32Any) ToInt32() int32 {
+ return any.val
+}
+
+func (any *int32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *int32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int32Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *int32Any) WriteTo(stream *Stream) {
+ stream.WriteInt32(any.val)
+}
+
+func (any *int32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_int64.go b/vendor/github.com/json-iterator/go/feature_any_int64.go
new file mode 100644
index 000000000..c440d72b6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_int64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int64Any struct {
+ baseAny
+ val int64
+}
+
+func (any *int64Any) LastError() error {
+ return nil
+}
+
+func (any *int64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *int64Any) ToInt64() int64 {
+ return any.val
+}
+
+func (any *int64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int64Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int64Any) ToString() string {
+ return strconv.FormatInt(any.val, 10)
+}
+
+func (any *int64Any) WriteTo(stream *Stream) {
+ stream.WriteInt64(any.val)
+}
+
+func (any *int64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_invalid.go b/vendor/github.com/json-iterator/go/feature_any_invalid.go
new file mode 100644
index 000000000..1d859eac3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_invalid.go
@@ -0,0 +1,82 @@
+package jsoniter
+
+import "fmt"
+
+type invalidAny struct {
+ baseAny
+ err error
+}
+
+func newInvalidAny(path []interface{}) *invalidAny {
+ return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
+}
+
+func (any *invalidAny) LastError() error {
+ return any.err
+}
+
+func (any *invalidAny) ValueType() ValueType {
+ return InvalidValue
+}
+
+func (any *invalidAny) MustBeValid() Any {
+ panic(any.err)
+}
+
+func (any *invalidAny) ToBool() bool {
+ return false
+}
+
+func (any *invalidAny) ToInt() int {
+ return 0
+}
+
+func (any *invalidAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *invalidAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *invalidAny) ToUint() uint {
+ return 0
+}
+
+func (any *invalidAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *invalidAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *invalidAny) ToString() string {
+ return ""
+}
+
+func (any *invalidAny) WriteTo(stream *Stream) {
+}
+
+func (any *invalidAny) Get(path ...interface{}) Any {
+ if any.err == nil {
+ return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
+}
+
+func (any *invalidAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *invalidAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_nil.go b/vendor/github.com/json-iterator/go/feature_any_nil.go
new file mode 100644
index 000000000..d04cb54c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_nil.go
@@ -0,0 +1,69 @@
+package jsoniter
+
+type nilAny struct {
+ baseAny
+}
+
+func (any *nilAny) LastError() error {
+ return nil
+}
+
+func (any *nilAny) ValueType() ValueType {
+ return NilValue
+}
+
+func (any *nilAny) MustBeValid() Any {
+ return any
+}
+
+func (any *nilAny) ToBool() bool {
+ return false
+}
+
+func (any *nilAny) ToInt() int {
+ return 0
+}
+
+func (any *nilAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *nilAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *nilAny) ToUint() uint {
+ return 0
+}
+
+func (any *nilAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *nilAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *nilAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *nilAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *nilAny) ToString() string {
+ return ""
+}
+
+func (any *nilAny) WriteTo(stream *Stream) {
+ stream.WriteNil()
+}
+
+func (any *nilAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *nilAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_number.go b/vendor/github.com/json-iterator/go/feature_any_number.go
new file mode 100644
index 000000000..4e1c27641
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_number.go
@@ -0,0 +1,104 @@
+package jsoniter
+
+import "unsafe"
+
+type numberLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *numberLazyAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *numberLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *numberLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *numberLazyAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *numberLazyAny) ToInt() int {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToInt32() int32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt32()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToInt64() int64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt64()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToUint() uint {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToUint32() uint32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint32()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToUint64() uint64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint64()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToFloat32() float32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat32()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToFloat64() float64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat64()
+ any.err = iter.Error
+ return val
+}
+
+func (any *numberLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *numberLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *numberLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_object.go b/vendor/github.com/json-iterator/go/feature_any_object.go
new file mode 100644
index 000000000..c44ef5c98
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_object.go
@@ -0,0 +1,374 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type objectLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *objectLazyAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *objectLazyAny) ToBool() bool {
+ return true
+}
+
+func (any *objectLazyAny) ToInt() int {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *objectLazyAny) ToVal(obj interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(obj)
+}
+
+func (any *objectLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateObjectField(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ mapped := locatePath(iter, path[1:])
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[field] = mapped
+ }
+ return true
+ })
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectLazyAny) Keys() []string {
+ keys := []string{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ keys = append(keys, field)
+ return true
+ })
+ return keys
+}
+
+func (any *objectLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ size++
+ return true
+ })
+ return size
+}
+
+func (any *objectLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *objectLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type objectAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapStruct(val interface{}) *objectAny {
+ return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *objectAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *objectAny) LastError() error {
+ return any.err
+}
+
+func (any *objectAny) ToBool() bool {
+ return any.val.NumField() != 0
+}
+
+func (any *objectAny) ToInt() int {
+ return 0
+}
+
+func (any *objectAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *objectAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ field := any.val.FieldByName(firstPath)
+ if !field.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(field.Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for i := 0; i < any.val.NumField(); i++ {
+ field := any.val.Field(i)
+ if field.CanInterface() {
+ mapped := Wrap(field.Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[any.val.Type().Field(i).Name] = mapped
+ }
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectAny) Keys() []string {
+ keys := make([]string, 0, any.val.NumField())
+ for i := 0; i < any.val.NumField(); i++ {
+ keys = append(keys, any.val.Type().Field(i).Name)
+ }
+ return keys
+}
+
+func (any *objectAny) Size() int {
+ return any.val.NumField()
+}
+
+func (any *objectAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *objectAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
+
+type mapAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapMap(val interface{}) *mapAny {
+ return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *mapAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *mapAny) MustBeValid() Any {
+ return any
+}
+
+func (any *mapAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *mapAny) LastError() error {
+ return any.err
+}
+
+func (any *mapAny) ToBool() bool {
+ return true
+}
+
+func (any *mapAny) ToInt() int {
+ return 0
+}
+
+func (any *mapAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *mapAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *mapAny) ToUint() uint {
+ return 0
+}
+
+func (any *mapAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *mapAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *mapAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *mapAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *mapAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *mapAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for _, key := range any.val.MapKeys() {
+ keyAsStr := key.String()
+ element := Wrap(any.val.MapIndex(key).Interface())
+ mapped := element.Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[keyAsStr] = mapped
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ value := any.val.MapIndex(reflect.ValueOf(firstPath))
+ if !value.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(value.Interface())
+ }
+}
+
+func (any *mapAny) Keys() []string {
+ keys := make([]string, 0, any.val.Len())
+ for _, key := range any.val.MapKeys() {
+ keys = append(keys, key.String())
+ }
+ return keys
+}
+
+func (any *mapAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *mapAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *mapAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_string.go b/vendor/github.com/json-iterator/go/feature_any_string.go
new file mode 100644
index 000000000..abf060bd5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_string.go
@@ -0,0 +1,166 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strconv"
+)
+
+type stringAny struct {
+ baseAny
+ val string
+}
+
+func (any *stringAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)}
+}
+
+func (any *stringAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *stringAny) ValueType() ValueType {
+ return StringValue
+}
+
+func (any *stringAny) MustBeValid() Any {
+ return any
+}
+
+func (any *stringAny) LastError() error {
+ return nil
+}
+
+func (any *stringAny) ToBool() bool {
+ str := any.ToString()
+ if str == "0" {
+ return false
+ }
+ for _, c := range str {
+ switch c {
+ case ' ', '\n', '\r', '\t':
+ default:
+ return true
+ }
+ }
+ return false
+}
+
+func (any *stringAny) ToInt() int {
+ return int(any.ToInt64())
+
+}
+
+func (any *stringAny) ToInt32() int32 {
+ return int32(any.ToInt64())
+}
+
+func (any *stringAny) ToInt64() int64 {
+ if any.val == "" {
+ return 0
+ }
+
+ flag := 1
+ startPos := 0
+ endPos := 0
+ if any.val[0] == '+' || any.val[0] == '-' {
+ startPos = 1
+ }
+
+ if any.val[0] == '-' {
+ flag = -1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
+ return int64(flag) * parsed
+}
+
+func (any *stringAny) ToUint() uint {
+ return uint(any.ToUint64())
+}
+
+func (any *stringAny) ToUint32() uint32 {
+ return uint32(any.ToUint64())
+}
+
+func (any *stringAny) ToUint64() uint64 {
+ if any.val == "" {
+ return 0
+ }
+
+ startPos := 0
+ endPos := 0
+
+ if any.val[0] == '-' {
+ return 0
+ }
+ if any.val[0] == '+' {
+ startPos = 1
+ }
+
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
+ return parsed
+}
+
+func (any *stringAny) ToFloat32() float32 {
+ return float32(any.ToFloat64())
+}
+
+func (any *stringAny) ToFloat64() float64 {
+ if len(any.val) == 0 {
+ return 0
+ }
+
+ // first char invalid
+ if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
+ return 0
+ }
+
+ // extract valid num expression from string
+ // eg 123true => 123, -12.12xxa => -12.12
+ endPos := 1
+ for i := 1; i < len(any.val); i++ {
+ if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
+ endPos = i + 1
+ continue
+ }
+
+ // end position is the first char which is not digit
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ endPos = i
+ break
+ }
+ }
+ parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
+ return parsed
+}
+
+func (any *stringAny) ToString() string {
+ return any.val
+}
+
+func (any *stringAny) WriteTo(stream *Stream) {
+ stream.WriteString(any.val)
+}
+
+func (any *stringAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_uint32.go b/vendor/github.com/json-iterator/go/feature_any_uint32.go
new file mode 100644
index 000000000..656bbd33d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_uint32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint32Any struct {
+ baseAny
+ val uint32
+}
+
+func (any *uint32Any) LastError() error {
+ return nil
+}
+
+func (any *uint32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint32Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint32Any) ToUint32() uint32 {
+ return any.val
+}
+
+func (any *uint32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *uint32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *uint32Any) WriteTo(stream *Stream) {
+ stream.WriteUint32(any.val)
+}
+
+func (any *uint32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_any_uint64.go b/vendor/github.com/json-iterator/go/feature_any_uint64.go
new file mode 100644
index 000000000..7df2fce33
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_any_uint64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint64Any struct {
+ baseAny
+ val uint64
+}
+
+func (any *uint64Any) LastError() error {
+ return nil
+}
+
+func (any *uint64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint64Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *uint64Any) ToUint64() uint64 {
+ return any.val
+}
+
+func (any *uint64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint64Any) ToString() string {
+ return strconv.FormatUint(any.val, 10)
+}
+
+func (any *uint64Any) WriteTo(stream *Stream) {
+ stream.WriteUint64(any.val)
+}
+
+func (any *uint64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/feature_config.go b/vendor/github.com/json-iterator/go/feature_config.go
new file mode 100644
index 000000000..140679536
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_config.go
@@ -0,0 +1,347 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "errors"
+ "io"
+ "reflect"
+ "sync/atomic"
+ "unsafe"
+)
+
+// Config customize how the API should behave.
+// The API is created from Config by Froze.
+type Config struct {
+ IndentionStep int
+ MarshalFloatWith6Digits bool
+ EscapeHTML bool
+ SortMapKeys bool
+ UseNumber bool
+ TagKey string
+ ValidateJsonRawMessage bool
+ ObjectFieldMustBeSimpleString bool
+}
+
+type frozenConfig struct {
+ configBeforeFrozen Config
+ sortMapKeys bool
+ indentionStep int
+ objectFieldMustBeSimpleString bool
+ decoderCache unsafe.Pointer
+ encoderCache unsafe.Pointer
+ extensions []Extension
+ streamPool chan *Stream
+ iteratorPool chan *Iterator
+}
+
+// API the public interface of this package.
+// Primary Marshal and Unmarshal.
+type API interface {
+ IteratorPool
+ StreamPool
+ MarshalToString(v interface{}) (string, error)
+ Marshal(v interface{}) ([]byte, error)
+ MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
+ UnmarshalFromString(str string, v interface{}) error
+ Unmarshal(data []byte, v interface{}) error
+ Get(data []byte, path ...interface{}) Any
+ NewEncoder(writer io.Writer) *Encoder
+ NewDecoder(reader io.Reader) *Decoder
+ Valid(data []byte) bool
+}
+
+// ConfigDefault the default API
+var ConfigDefault = Config{
+ EscapeHTML: true,
+}.Froze()
+
+// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
+var ConfigCompatibleWithStandardLibrary = Config{
+ EscapeHTML: true,
+ SortMapKeys: true,
+ ValidateJsonRawMessage: true,
+}.Froze()
+
+// ConfigFastest marshals float with only 6 digits precision
+var ConfigFastest = Config{
+ EscapeHTML: false,
+ MarshalFloatWith6Digits: true, // will lose precession
+ ObjectFieldMustBeSimpleString: true, // do not unescape object field
+}.Froze()
+
+// Froze forge API from config
+func (cfg Config) Froze() API {
+ // TODO: cache frozen config
+ frozenConfig := &frozenConfig{
+ sortMapKeys: cfg.SortMapKeys,
+ indentionStep: cfg.IndentionStep,
+ objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
+ streamPool: make(chan *Stream, 16),
+ iteratorPool: make(chan *Iterator, 16),
+ }
+ atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{}))
+ atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{}))
+ if cfg.MarshalFloatWith6Digits {
+ frozenConfig.marshalFloatWith6Digits()
+ }
+ if cfg.EscapeHTML {
+ frozenConfig.escapeHTML()
+ }
+ if cfg.UseNumber {
+ frozenConfig.useNumber()
+ }
+ if cfg.ValidateJsonRawMessage {
+ frozenConfig.validateJsonRawMessage()
+ }
+ frozenConfig.configBeforeFrozen = cfg
+ return frozenConfig
+}
+
+func (cfg *frozenConfig) validateJsonRawMessage() {
+ encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+ rawMessage := *(*json.RawMessage)(ptr)
+ iter := cfg.BorrowIterator([]byte(rawMessage))
+ iter.Read()
+ if iter.Error != nil {
+ stream.WriteRaw("null")
+ } else {
+ cfg.ReturnIterator(iter)
+ stream.WriteRaw(string(rawMessage))
+ }
+ }, func(ptr unsafe.Pointer) bool {
+ return false
+ }}
+ cfg.addEncoderToCache(reflect.TypeOf((*json.RawMessage)(nil)).Elem(), encoder)
+ cfg.addEncoderToCache(reflect.TypeOf((*RawMessage)(nil)).Elem(), encoder)
+}
+
+func (cfg *frozenConfig) useNumber() {
+ cfg.addDecoderToCache(reflect.TypeOf((*interface{})(nil)).Elem(), &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.WhatIsNext() == NumberValue {
+ *((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
+ } else {
+ *((*interface{})(ptr)) = iter.Read()
+ }
+ }})
+}
+func (cfg *frozenConfig) getTagKey() string {
+ tagKey := cfg.configBeforeFrozen.TagKey
+ if tagKey == "" {
+ return "json"
+ }
+ return tagKey
+}
+
+func (cfg *frozenConfig) registerExtension(extension Extension) {
+ cfg.extensions = append(cfg.extensions, extension)
+}
+
+type lossyFloat32Encoder struct {
+}
+
+func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32Lossy(*((*float32)(ptr)))
+}
+
+func (encoder *lossyFloat32Encoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type lossyFloat64Encoder struct {
+}
+
+func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64Lossy(*((*float64)(ptr)))
+}
+
+func (encoder *lossyFloat64Encoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+// EnableLossyFloatMarshalling keeps 10**(-6) precision
+// for float variables for better performance.
+func (cfg *frozenConfig) marshalFloatWith6Digits() {
+ // for better performance
+ cfg.addEncoderToCache(reflect.TypeOf((*float32)(nil)).Elem(), &lossyFloat32Encoder{})
+ cfg.addEncoderToCache(reflect.TypeOf((*float64)(nil)).Elem(), &lossyFloat64Encoder{})
+}
+
+type htmlEscapedStringEncoder struct {
+}
+
+func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteStringWithHTMLEscaped(str)
+}
+
+func (encoder *htmlEscapedStringEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+func (cfg *frozenConfig) escapeHTML() {
+ cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{})
+}
+
+func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) {
+ done := false
+ for !done {
+ ptr := atomic.LoadPointer(&cfg.decoderCache)
+ cache := *(*map[reflect.Type]ValDecoder)(ptr)
+ copied := map[reflect.Type]ValDecoder{}
+ for k, v := range cache {
+ copied[k] = v
+ }
+ copied[cacheKey] = decoder
+ done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied))
+ }
+}
+
+func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) {
+ done := false
+ for !done {
+ ptr := atomic.LoadPointer(&cfg.encoderCache)
+ cache := *(*map[reflect.Type]ValEncoder)(ptr)
+ copied := map[reflect.Type]ValEncoder{}
+ for k, v := range cache {
+ copied[k] = v
+ }
+ copied[cacheKey] = encoder
+ done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied))
+ }
+}
+
+func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder {
+ ptr := atomic.LoadPointer(&cfg.decoderCache)
+ cache := *(*map[reflect.Type]ValDecoder)(ptr)
+ return cache[cacheKey]
+}
+
+func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder {
+ ptr := atomic.LoadPointer(&cfg.encoderCache)
+ cache := *(*map[reflect.Type]ValEncoder)(ptr)
+ return cache[cacheKey]
+}
+
+func (cfg *frozenConfig) cleanDecoders() {
+ typeDecoders = map[string]ValDecoder{}
+ fieldDecoders = map[string]ValDecoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) cleanEncoders() {
+ typeEncoders = map[string]ValEncoder{}
+ fieldEncoders = map[string]ValEncoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return "", stream.Error
+ }
+ return string(stream.Buffer()), nil
+}
+
+func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return nil, stream.Error
+ }
+ result := stream.Buffer()
+ copied := make([]byte, len(result))
+ copy(copied, result)
+ return copied, nil
+}
+
+func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ if prefix != "" {
+ panic("prefix is not supported")
+ }
+ for _, r := range indent {
+ if r != ' ' {
+ panic("indent can only be space")
+ }
+ }
+ newCfg := cfg.configBeforeFrozen
+ newCfg.IndentionStep = len(indent)
+ return newCfg.Froze().Marshal(v)
+}
+
+func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
+ data := []byte(str)
+ data = data[:lastNotSpacePos(data)]
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ if iter.head == iter.tail {
+ iter.loadMore()
+ }
+ if iter.Error == io.EOF {
+ return nil
+ }
+ if iter.Error == nil {
+ iter.ReportError("UnmarshalFromString", "there are bytes left after unmarshal")
+ }
+ return iter.Error
+}
+
+func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ return locatePath(iter, path)
+}
+
+func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
+ data = data[:lastNotSpacePos(data)]
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ typ := reflect.TypeOf(v)
+ if typ.Kind() != reflect.Ptr {
+ // return non-pointer error
+ return errors.New("the second param must be ptr type")
+ }
+ iter.ReadVal(v)
+ if iter.head == iter.tail {
+ iter.loadMore()
+ }
+ if iter.Error == io.EOF {
+ return nil
+ }
+ if iter.Error == nil {
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ }
+ return iter.Error
+}
+
+func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
+ stream := NewStream(cfg, writer, 512)
+ return &Encoder{stream}
+}
+
+func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
+ iter := Parse(cfg, reader, 512)
+ return &Decoder{iter}
+}
+
+func (cfg *frozenConfig) Valid(data []byte) bool {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.Skip()
+ return iter.Error == nil
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter.go b/vendor/github.com/json-iterator/go/feature_iter.go
new file mode 100644
index 000000000..95ae54fbf
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter.go
@@ -0,0 +1,322 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+// ValueType the type for JSON element
+type ValueType int
+
+const (
+ // InvalidValue invalid JSON element
+ InvalidValue ValueType = iota
+ // StringValue JSON element "string"
+ StringValue
+ // NumberValue JSON element 100 or 0.10
+ NumberValue
+ // NilValue JSON element null
+ NilValue
+ // BoolValue JSON element true or false
+ BoolValue
+ // ArrayValue JSON element []
+ ArrayValue
+ // ObjectValue JSON element {}
+ ObjectValue
+)
+
+var hexDigits []byte
+var valueTypes []ValueType
+
+func init() {
+ hexDigits = make([]byte, 256)
+ for i := 0; i < len(hexDigits); i++ {
+ hexDigits[i] = 255
+ }
+ for i := '0'; i <= '9'; i++ {
+ hexDigits[i] = byte(i - '0')
+ }
+ for i := 'a'; i <= 'f'; i++ {
+ hexDigits[i] = byte((i - 'a') + 10)
+ }
+ for i := 'A'; i <= 'F'; i++ {
+ hexDigits[i] = byte((i - 'A') + 10)
+ }
+ valueTypes = make([]ValueType, 256)
+ for i := 0; i < len(valueTypes); i++ {
+ valueTypes[i] = InvalidValue
+ }
+ valueTypes['"'] = StringValue
+ valueTypes['-'] = NumberValue
+ valueTypes['0'] = NumberValue
+ valueTypes['1'] = NumberValue
+ valueTypes['2'] = NumberValue
+ valueTypes['3'] = NumberValue
+ valueTypes['4'] = NumberValue
+ valueTypes['5'] = NumberValue
+ valueTypes['6'] = NumberValue
+ valueTypes['7'] = NumberValue
+ valueTypes['8'] = NumberValue
+ valueTypes['9'] = NumberValue
+ valueTypes['t'] = BoolValue
+ valueTypes['f'] = BoolValue
+ valueTypes['n'] = NilValue
+ valueTypes['['] = ArrayValue
+ valueTypes['{'] = ObjectValue
+}
+
+// Iterator is a io.Reader like object, with JSON specific read functions.
+// Error is not returned as return value, but stored as Error member on this iterator instance.
+type Iterator struct {
+ cfg *frozenConfig
+ reader io.Reader
+ buf []byte
+ head int
+ tail int
+ captureStartedAt int
+ captured []byte
+ Error error
+ Attachment interface{} // open for customized decoder
+}
+
+// NewIterator creates an empty Iterator instance
+func NewIterator(cfg API) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: nil,
+ head: 0,
+ tail: 0,
+ }
+}
+
+// Parse creates an Iterator instance from io.Reader
+func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: reader,
+ buf: make([]byte, bufSize),
+ head: 0,
+ tail: 0,
+ }
+}
+
+// ParseBytes creates an Iterator instance from byte array
+func ParseBytes(cfg API, input []byte) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: input,
+ head: 0,
+ tail: len(input),
+ }
+}
+
+// ParseString creates an Iterator instance from string
+func ParseString(cfg API, input string) *Iterator {
+ return ParseBytes(cfg, []byte(input))
+}
+
+// Pool returns a pool can provide more iterator with same configuration
+func (iter *Iterator) Pool() IteratorPool {
+ return iter.cfg
+}
+
+// Reset reuse iterator instance by specifying another reader
+func (iter *Iterator) Reset(reader io.Reader) *Iterator {
+ iter.reader = reader
+ iter.head = 0
+ iter.tail = 0
+ return iter
+}
+
+// ResetBytes reuse iterator instance by specifying another byte array as input
+func (iter *Iterator) ResetBytes(input []byte) *Iterator {
+ iter.reader = nil
+ iter.buf = input
+ iter.head = 0
+ iter.tail = len(input)
+ return iter
+}
+
+// WhatIsNext gets ValueType of relatively next json element
+func (iter *Iterator) WhatIsNext() ValueType {
+ valueType := valueTypes[iter.nextToken()]
+ iter.unreadByte()
+ return valueType
+}
+
+func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i
+ return false
+ }
+ return true
+}
+
+func (iter *Iterator) isObjectEnd() bool {
+ c := iter.nextToken()
+ if c == ',' {
+ return false
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
+ return true
+}
+
+func (iter *Iterator) nextToken() byte {
+ // a variation of skip whitespaces, returning the next non-whitespace token
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i + 1
+ return c
+ }
+ if !iter.loadMore() {
+ return 0
+ }
+ }
+}
+
+// ReportError record a error in iterator instance with current position.
+func (iter *Iterator) ReportError(operation string, msg string) {
+ if iter.Error != nil {
+ if iter.Error != io.EOF {
+ return
+ }
+ }
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ peekEnd := iter.head + 10
+ if peekEnd > iter.tail {
+ peekEnd = iter.tail
+ }
+ parsing := string(iter.buf[peekStart:peekEnd])
+ contextStart := iter.head - 50
+ if contextStart < 0 {
+ contextStart = 0
+ }
+ contextEnd := iter.head + 50
+ if contextEnd > iter.tail {
+ contextEnd = iter.tail
+ }
+ context := string(iter.buf[contextStart:contextEnd])
+ iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
+ operation, msg, iter.head-peekStart, parsing, context)
+}
+
+// CurrentBuffer gets current buffer as string for debugging purpose
+func (iter *Iterator) CurrentBuffer() string {
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
+ string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
+}
+
+func (iter *Iterator) readByte() (ret byte) {
+ if iter.head == iter.tail {
+ if iter.loadMore() {
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+ }
+ return 0
+ }
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+}
+
+func (iter *Iterator) loadMore() bool {
+ if iter.reader == nil {
+ if iter.Error == nil {
+ iter.head = iter.tail
+ iter.Error = io.EOF
+ }
+ return false
+ }
+ if iter.captured != nil {
+ iter.captured = append(iter.captured,
+ iter.buf[iter.captureStartedAt:iter.tail]...)
+ iter.captureStartedAt = 0
+ }
+ for {
+ n, err := iter.reader.Read(iter.buf)
+ if n == 0 {
+ if err != nil {
+ if iter.Error == nil {
+ iter.Error = err
+ }
+ return false
+ }
+ } else {
+ iter.head = 0
+ iter.tail = n
+ return true
+ }
+ }
+}
+
+func (iter *Iterator) unreadByte() {
+ if iter.Error != nil {
+ return
+ }
+ iter.head--
+ return
+}
+
+// Read read the next JSON element as generic interface{}.
+func (iter *Iterator) Read() interface{} {
+ valueType := iter.WhatIsNext()
+ switch valueType {
+ case StringValue:
+ return iter.ReadString()
+ case NumberValue:
+ if iter.cfg.configBeforeFrozen.UseNumber {
+ return json.Number(iter.readNumberAsString())
+ }
+ return iter.ReadFloat64()
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ return nil
+ case BoolValue:
+ return iter.ReadBool()
+ case ArrayValue:
+ arr := []interface{}{}
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ arr = append(arr, elem)
+ return true
+ })
+ return arr
+ case ObjectValue:
+ obj := map[string]interface{}{}
+ iter.ReadMapCB(func(Iter *Iterator, field string) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ obj[field] = elem
+ return true
+ })
+ return obj
+ default:
+ iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
+ return nil
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_array.go b/vendor/github.com/json-iterator/go/feature_iter_array.go
new file mode 100644
index 000000000..6188cb457
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_array.go
@@ -0,0 +1,58 @@
+package jsoniter
+
+// ReadArray read array element, tells if the array has more element to read.
+func (iter *Iterator) ReadArray() (ret bool) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false // null
+ case '[':
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ return true
+ }
+ return false
+ case ']':
+ return false
+ case ',':
+ return true
+ default:
+ iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
+ return
+ }
+}
+
+// ReadArrayCB read array with callback
+func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
+ c := iter.nextToken()
+ if c == '[' {
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ if !callback(iter) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != ']' {
+ iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
+ return false
+ }
+ return true
+ }
+ return true
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_float.go b/vendor/github.com/json-iterator/go/feature_iter_float.go
new file mode 100644
index 000000000..86f459912
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_float.go
@@ -0,0 +1,341 @@
+package jsoniter
+
+import (
+ "io"
+ "math/big"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+var floatDigits []int8
+
+const invalidCharForNumber = int8(-1)
+const endOfNumber = int8(-2)
+const dotInNumber = int8(-3)
+
+func init() {
+ floatDigits = make([]int8, 256)
+ for i := 0; i < len(floatDigits); i++ {
+ floatDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ floatDigits[i] = i - int8('0')
+ }
+ floatDigits[','] = endOfNumber
+ floatDigits[']'] = endOfNumber
+ floatDigits['}'] = endOfNumber
+ floatDigits[' '] = endOfNumber
+ floatDigits['\t'] = endOfNumber
+ floatDigits['\n'] = endOfNumber
+ floatDigits['.'] = dotInNumber
+}
+
+// ReadBigFloat read big.Float
+func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ prec := 64
+ if len(str) > prec {
+ prec = len(str)
+ }
+ val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
+ if err != nil {
+ iter.Error = err
+ return nil
+ }
+ return val
+}
+
+// ReadBigInt read big.Int
+func (iter *Iterator) ReadBigInt() (ret *big.Int) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ ret = big.NewInt(0)
+ var success bool
+ ret, success = ret.SetString(str, 10)
+ if !success {
+ iter.ReportError("ReadBigInt", "invalid big int")
+ return nil
+ }
+ return ret
+}
+
+//ReadFloat32 read float32
+func (iter *Iterator) ReadFloat32() (ret float32) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat32()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat32()
+}
+
+func (iter *Iterator) readPositiveFloat32() (ret float32) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat32", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat32", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat32", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float32(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float32(float64(value) / float64(pow10[decimalPlaces]))
+ }
+ // too many decimal places
+ return iter.readFloat32SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat32SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat32SlowPath()
+}
+
+func (iter *Iterator) readNumberAsString() (ret string) {
+ strBuf := [16]byte{}
+ str := strBuf[0:0]
+load_loop:
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ str = append(str, c)
+ continue
+ default:
+ iter.head = i
+ break load_loop
+ }
+ }
+ if !iter.loadMore() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ if len(str) == 0 {
+ iter.ReportError("readNumberAsString", "invalid number")
+ }
+ return *(*string)(unsafe.Pointer(&str))
+}
+
+func (iter *Iterator) readFloat32SlowPath() (ret float32) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat32SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return float32(val)
+}
+
+// ReadFloat64 read float64
+func (iter *Iterator) ReadFloat64() (ret float64) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat64()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat64()
+}
+
+func (iter *Iterator) readPositiveFloat64() (ret float64) {
+ value := uint64(0)
+ c := byte(' ')
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat64", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat64", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat64", "leading zero is invalid")
+ return
+ }
+ }
+ value = uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float64(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float64(value) / float64(pow10[decimalPlaces])
+ }
+ // too many decimal places
+ return iter.readFloat64SlowPath()
+ case invalidCharForNumber:
+ fallthrough
+ case dotInNumber:
+ return iter.readFloat64SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat64SlowPath()
+}
+
+func (iter *Iterator) readFloat64SlowPath() (ret float64) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat64SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return val
+}
+
+func validateFloat(str string) string {
+ // strconv.ParseFloat is not validating `1.` or `1.e1`
+ if len(str) == 0 {
+ return "empty number"
+ }
+ if str[0] == '-' {
+ return "-- is not valid"
+ }
+ dotPos := strings.IndexByte(str, '.')
+ if dotPos != -1 {
+ if dotPos == len(str)-1 {
+ return "dot can not be last character"
+ }
+ switch str[dotPos+1] {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ return "missing digit after dot"
+ }
+ }
+ return ""
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_int.go b/vendor/github.com/json-iterator/go/feature_iter_int.go
new file mode 100644
index 000000000..6137348cd
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_int.go
@@ -0,0 +1,268 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var intDigits []int8
+
+const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
+const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
+
+func init() {
+ intDigits = make([]int8, 256)
+ for i := 0; i < len(intDigits); i++ {
+ intDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ intDigits[i] = i - int8('0')
+ }
+}
+
+// ReadUint read uint
+func (iter *Iterator) ReadUint() uint {
+ return uint(iter.ReadUint64())
+}
+
+// ReadInt read int
+func (iter *Iterator) ReadInt() int {
+ return int(iter.ReadInt64())
+}
+
+// ReadInt8 read int8
+func (iter *Iterator) ReadInt8() (ret int8) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt8+1 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int8(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt8 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int8(val)
+}
+
+// ReadUint8 read uint8
+func (iter *Iterator) ReadUint8() (ret uint8) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint8 {
+ iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint8(val)
+}
+
+// ReadInt16 read int16
+func (iter *Iterator) ReadInt16() (ret int16) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt16+1 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int16(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt16 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int16(val)
+}
+
+// ReadUint16 read uint16
+func (iter *Iterator) ReadUint16() (ret uint16) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint16 {
+ iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint16(val)
+}
+
+// ReadInt32 read int32
+func (iter *Iterator) ReadInt32() (ret int32) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt32+1 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int32(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt32 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int32(val)
+}
+
+// ReadUint32 read uint32
+func (iter *Iterator) ReadUint32() (ret uint32) {
+ return iter.readUint32(iter.nextToken())
+}
+
+func (iter *Iterator) readUint32(c byte) (ret uint32) {
+ defer func() {
+ if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
+ iter.ReportError("readUint32", "can not decode float as int")
+ }
+ }()
+ ind := intDigits[c]
+ if ind == 0 {
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint32(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ return value*10 + uint32(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ return value*100 + uint32(ind2)*10 + uint32(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ return value
+ }
+ if value > uint32SafeToMultiply10 {
+ value2 := (value << 3) + (value << 1) + uint32(ind)
+ if value2 < value {
+ iter.ReportError("readUint32", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint32(ind)
+ }
+ if !iter.loadMore() {
+ return value
+ }
+ }
+}
+
+// ReadInt64 read int64
+func (iter *Iterator) ReadInt64() (ret int64) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint64(iter.readByte())
+ if val > math.MaxInt64+1 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return -int64(val)
+ }
+ val := iter.readUint64(c)
+ if val > math.MaxInt64 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return int64(val)
+}
+
+// ReadUint64 read uint64
+func (iter *Iterator) ReadUint64() uint64 {
+ return iter.readUint64(iter.nextToken())
+}
+
+func (iter *Iterator) readUint64(c byte) (ret uint64) {
+ defer func() {
+ if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
+ iter.ReportError("readUint64", "can not decode float as int")
+ }
+ }()
+ ind := intDigits[c]
+ if ind == 0 {
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint64(ind)
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ return value
+ }
+ if value > uint64SafeToMultiple10 {
+ value2 := (value << 3) + (value << 1) + uint64(ind)
+ if value2 < value {
+ iter.ReportError("readUint64", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ if !iter.loadMore() {
+ return value
+ }
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_object.go b/vendor/github.com/json-iterator/go/feature_iter_object.go
new file mode 100644
index 000000000..dfd91fa60
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_object.go
@@ -0,0 +1,267 @@
+package jsoniter
+
+import (
+ "fmt"
+ "unicode"
+ "unsafe"
+)
+
+// ReadObject read one field from object.
+// If object ended, returns empty string.
+// Otherwise, returns the field name.
+func (iter *Iterator) ReadObject() (ret string) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return "" // null
+ case '{':
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ if iter.cfg.objectFieldMustBeSimpleString {
+ return string(iter.readObjectFieldAsBytes())
+ } else {
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ }
+ }
+ if c == '}' {
+ return "" // end of object
+ }
+ iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
+ return
+ case ',':
+ if iter.cfg.objectFieldMustBeSimpleString {
+ return string(iter.readObjectFieldAsBytes())
+ } else {
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ }
+ case '}':
+ return "" // end of object
+ default:
+ iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
+ return
+ }
+}
+
+func (iter *Iterator) readFieldHash() int32 {
+ hash := int64(0x811c9dc5)
+ c := iter.nextToken()
+ if c == '"' {
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ b := iter.buf[i]
+ if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' {
+ iter.head = i
+ for _, b := range iter.readStringSlowPath() {
+ if 'A' <= b && b <= 'Z' {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return int32(hash)
+ }
+ if b == '"' {
+ iter.head = i + 1
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return int32(hash)
+ }
+ if 'A' <= b && b <= 'Z' {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ if !iter.loadMore() {
+ iter.ReportError("readFieldHash", `incomplete field name`)
+ return 0
+ }
+ }
+ }
+ iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
+ return 0
+}
+
+func calcHash(str string) int32 {
+ hash := int64(0x811c9dc5)
+ for _, b := range str {
+ hash ^= int64(unicode.ToLower(b))
+ hash *= 0x1000193
+ }
+ return int32(hash)
+}
+
+// ReadObjectCB read object with callback, the key is ascii only and field name not copied
+func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ var fieldBytes []byte
+ var field string
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes = iter.readObjectFieldAsBytes()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ } else {
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes = iter.readObjectFieldAsBytes()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ } else {
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadObjectCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+// ReadMapCB read map with callback, the key can be any string
+func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return false
+ }
+ if !callback(iter, field) {
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `object not ended with }`)
+ return false
+ }
+ return true
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c}))
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectStart() bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '}' {
+ return false
+ }
+ iter.unreadByte()
+ return true
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false
+ }
+ iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
+ str := iter.ReadStringAsSlice()
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if iter.buf[iter.head] != ':' {
+ iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
+ return
+ }
+ iter.head++
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if ret == nil {
+ return str
+ }
+ return ret
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip.go b/vendor/github.com/json-iterator/go/feature_iter_skip.go
new file mode 100644
index 000000000..f58beb913
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_skip.go
@@ -0,0 +1,129 @@
+package jsoniter
+
+import "fmt"
+
+// ReadNil reads a json object as nil and
+// returns whether it's a nil or not
+func (iter *Iterator) ReadNil() (ret bool) {
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return true
+ }
+ iter.unreadByte()
+ return false
+}
+
+// ReadBool reads a json object as BoolValue
+func (iter *Iterator) ReadBool() (ret bool) {
+ c := iter.nextToken()
+ if c == 't' {
+ iter.skipThreeBytes('r', 'u', 'e')
+ return true
+ }
+ if c == 'f' {
+ iter.skipFourBytes('a', 'l', 's', 'e')
+ return false
+ }
+ iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
+ return
+}
+
+// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
+// The []byte can be kept, it is a copy of data.
+func (iter *Iterator) SkipAndReturnBytes() []byte {
+ iter.startCapture(iter.head)
+ iter.Skip()
+ return iter.stopCapture()
+}
+
+type captureBuffer struct {
+ startedAt int
+ captured []byte
+}
+
+func (iter *Iterator) startCapture(captureStartedAt int) {
+ if iter.captured != nil {
+ panic("already in capture mode")
+ }
+ iter.captureStartedAt = captureStartedAt
+ iter.captured = make([]byte, 0, 32)
+}
+
+func (iter *Iterator) stopCapture() []byte {
+ if iter.captured == nil {
+ panic("not in capture mode")
+ }
+ captured := iter.captured
+ remaining := iter.buf[iter.captureStartedAt:iter.head]
+ iter.captureStartedAt = -1
+ iter.captured = nil
+ if len(captured) == 0 {
+ copied := make([]byte, len(remaining))
+ copy(copied, remaining)
+ return copied
+ }
+ captured = append(captured, remaining...)
+ return captured
+}
+
+// Skip skips a json object and positions to relatively the next json object
+func (iter *Iterator) Skip() {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.skipString()
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ case '0':
+ iter.unreadByte()
+ iter.ReadFloat32()
+ case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.skipNumber()
+ case '[':
+ iter.skipArray()
+ case '{':
+ iter.skipObject()
+ default:
+ iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
+ return
+ }
+}
+
+func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b4 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+}
+
+func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go
new file mode 100644
index 000000000..8fcdc3b69
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go
@@ -0,0 +1,144 @@
+//+build jsoniter_sloppy
+
+package jsoniter
+
+// sloppy but faster implementation, do not validate the input json
+
+func (iter *Iterator) skipNumber() {
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\r', '\t', ',', '}', ']':
+ iter.head = i
+ return
+ }
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipArray() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '[': // If open symbol, increase level
+ level++
+ case ']': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete array")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipObject() {
+ level := 1
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '{': // If open symbol, increase level
+ level++
+ case '}': // If close symbol, increase level
+ level--
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete object")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipString() {
+ for {
+ end, escaped := iter.findStringEnd()
+ if end == -1 {
+ if !iter.loadMore() {
+ iter.ReportError("skipString", "incomplete string")
+ return
+ }
+ if escaped {
+ iter.head = 1 // skip the first char as last char read is \
+ }
+ } else {
+ iter.head = end
+ return
+ }
+ }
+}
+
+// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
+// Tries to find the end of string
+// Support if string contains escaped quote symbols.
+func (iter *Iterator) findStringEnd() (int, bool) {
+ escaped := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ if !escaped {
+ return i + 1, false
+ }
+ j := i - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return i + 1, true
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+ }
+ } else if c == '\\' {
+ escaped = true
+ }
+ }
+ j := iter.tail - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return -1, false // do not end with \
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+
+ }
+ return -1, true // end with \
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go
new file mode 100644
index 000000000..f67bc2e83
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go
@@ -0,0 +1,89 @@
+//+build !jsoniter_sloppy
+
+package jsoniter
+
+import "fmt"
+
+func (iter *Iterator) skipNumber() {
+ if !iter.trySkipNumber() {
+ iter.unreadByte()
+ iter.ReadFloat32()
+ }
+}
+
+func (iter *Iterator) trySkipNumber() bool {
+ dotFound := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ case '.':
+ if dotFound {
+ iter.ReportError("validateNumber", `more than one dot found in number`)
+ return true // already failed
+ }
+ if i+1 == iter.tail {
+ return false
+ }
+ c = iter.buf[i+1]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ iter.ReportError("validateNumber", `missing digit after dot`)
+ return true // already failed
+ }
+ dotFound = true
+ default:
+ switch c {
+ case ',', ']', '}', ' ', '\t', '\n', '\r':
+ if iter.head == i {
+ return false // if - without following digits
+ }
+ iter.head = i
+ return true // must be valid
+ }
+ return false // may be invalid
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipString() {
+ if !iter.trySkipString() {
+ iter.unreadByte()
+ iter.ReadString()
+ }
+}
+
+func (iter *Iterator) trySkipString() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ iter.head = i + 1
+ return true // valid
+ } else if c == '\\' {
+ return false
+ } else if c < ' ' {
+ iter.ReportError("trySkipString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return true // already failed
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipObject() {
+ iter.unreadByte()
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ return true
+ })
+}
+
+func (iter *Iterator) skipArray() {
+ iter.unreadByte()
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ iter.Skip()
+ return true
+ })
+}
diff --git a/vendor/github.com/json-iterator/go/feature_iter_string.go b/vendor/github.com/json-iterator/go/feature_iter_string.go
new file mode 100644
index 000000000..adc487ea8
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_iter_string.go
@@ -0,0 +1,215 @@
+package jsoniter
+
+import (
+ "fmt"
+ "unicode/utf16"
+)
+
+// ReadString read string from iterator
+func (iter *Iterator) ReadString() (ret string) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ ret = string(iter.buf[iter.head:i])
+ iter.head = i + 1
+ return ret
+ } else if c == '\\' {
+ break
+ } else if c < ' ' {
+ iter.ReportError("ReadString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return
+ }
+ }
+ return iter.readStringSlowPath()
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return ""
+ }
+ iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readStringSlowPath() (ret string) {
+ var str []byte
+ var c byte
+ for iter.Error == nil {
+ c = iter.readByte()
+ if c == '"' {
+ return string(str)
+ }
+ if c == '\\' {
+ c = iter.readByte()
+ str = iter.readEscapedChar(c, str)
+ } else {
+ str = append(str, c)
+ }
+ }
+ iter.ReportError("readStringSlowPath", "unexpected end of input")
+ return
+}
+
+func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
+ switch c {
+ case 'u':
+ r := iter.readU4()
+ if utf16.IsSurrogate(r) {
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != '\\' {
+ iter.unreadByte()
+ str = appendRune(str, r)
+ return str
+ }
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != 'u' {
+ str = appendRune(str, r)
+ return iter.readEscapedChar(c, str)
+ }
+ r2 := iter.readU4()
+ if iter.Error != nil {
+ return nil
+ }
+ combined := utf16.DecodeRune(r, r2)
+ if combined == '\uFFFD' {
+ str = appendRune(str, r)
+ str = appendRune(str, r2)
+ } else {
+ str = appendRune(str, combined)
+ }
+ } else {
+ str = appendRune(str, r)
+ }
+ case '"':
+ str = append(str, '"')
+ case '\\':
+ str = append(str, '\\')
+ case '/':
+ str = append(str, '/')
+ case 'b':
+ str = append(str, '\b')
+ case 'f':
+ str = append(str, '\f')
+ case 'n':
+ str = append(str, '\n')
+ case 'r':
+ str = append(str, '\r')
+ case 't':
+ str = append(str, '\t')
+ default:
+ iter.ReportError("readEscapedChar",
+ `invalid escape char after \`)
+ return nil
+ }
+ return str
+}
+
+// ReadStringAsSlice read string from iterator without copying into string form.
+// The []byte can not be kept, as it will change after next iterator call.
+func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ // for: field name, base64, number
+ if iter.buf[i] == '"' {
+ // fast path: reuse the underlying buffer
+ ret = iter.buf[iter.head:i]
+ iter.head = i + 1
+ return ret
+ }
+ }
+ readLen := iter.tail - iter.head
+ copied := make([]byte, readLen, readLen*2)
+ copy(copied, iter.buf[iter.head:iter.tail])
+ iter.head = iter.tail
+ for iter.Error == nil {
+ c := iter.readByte()
+ if c == '"' {
+ return copied
+ }
+ copied = append(copied, c)
+ }
+ return copied
+ }
+ iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readU4() (ret rune) {
+ for i := 0; i < 4; i++ {
+ c := iter.readByte()
+ if iter.Error != nil {
+ return
+ }
+ if c >= '0' && c <= '9' {
+ ret = ret*16 + rune(c-'0')
+ } else if c >= 'a' && c <= 'f' {
+ ret = ret*16 + rune(c-'a'+10)
+ } else if c >= 'A' && c <= 'F' {
+ ret = ret*16 + rune(c-'A'+10)
+ } else {
+ iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
+ return
+ }
+ }
+ return ret
+}
+
+const (
+ t1 = 0x00 // 0000 0000
+ tx = 0x80 // 1000 0000
+ t2 = 0xC0 // 1100 0000
+ t3 = 0xE0 // 1110 0000
+ t4 = 0xF0 // 1111 0000
+ t5 = 0xF8 // 1111 1000
+
+ maskx = 0x3F // 0011 1111
+ mask2 = 0x1F // 0001 1111
+ mask3 = 0x0F // 0000 1111
+ mask4 = 0x07 // 0000 0111
+
+ rune1Max = 1<<7 - 1
+ rune2Max = 1<<11 - 1
+ rune3Max = 1<<16 - 1
+
+ surrogateMin = 0xD800
+ surrogateMax = 0xDFFF
+
+ maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
+ runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
+)
+
+func appendRune(p []byte, r rune) []byte {
+ // Negative values are erroneous. Making it unsigned addresses the problem.
+ switch i := uint32(r); {
+ case i <= rune1Max:
+ p = append(p, byte(r))
+ return p
+ case i <= rune2Max:
+ p = append(p, t2|byte(r>>6))
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ case i > maxRune, surrogateMin <= i && i <= surrogateMax:
+ r = runeError
+ fallthrough
+ case i <= rune3Max:
+ p = append(p, t3|byte(r>>12))
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ default:
+ p = append(p, t4|byte(r>>18))
+ p = append(p, tx|byte(r>>12)&maskx)
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_json_number.go b/vendor/github.com/json-iterator/go/feature_json_number.go
new file mode 100644
index 000000000..e187b200a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_json_number.go
@@ -0,0 +1,31 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "strconv"
+)
+
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+func CastJsonNumber(val interface{}) (string, bool) {
+ switch typedVal := val.(type) {
+ case json.Number:
+ return string(typedVal), true
+ case Number:
+ return string(typedVal), true
+ }
+ return "", false
+}
diff --git a/vendor/github.com/json-iterator/go/feature_pool.go b/vendor/github.com/json-iterator/go/feature_pool.go
new file mode 100644
index 000000000..52d38e685
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_pool.go
@@ -0,0 +1,59 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// IteratorPool a thread safe pool of iterators with same configuration
+type IteratorPool interface {
+ BorrowIterator(data []byte) *Iterator
+ ReturnIterator(iter *Iterator)
+}
+
+// StreamPool a thread safe pool of streams with same configuration
+type StreamPool interface {
+ BorrowStream(writer io.Writer) *Stream
+ ReturnStream(stream *Stream)
+}
+
+func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
+ select {
+ case stream := <-cfg.streamPool:
+ stream.Reset(writer)
+ return stream
+ default:
+ return NewStream(cfg, writer, 512)
+ }
+}
+
+func (cfg *frozenConfig) ReturnStream(stream *Stream) {
+ stream.Error = nil
+ stream.Attachment = nil
+ select {
+ case cfg.streamPool <- stream:
+ return
+ default:
+ return
+ }
+}
+
+func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
+ select {
+ case iter := <-cfg.iteratorPool:
+ iter.ResetBytes(data)
+ return iter
+ default:
+ return ParseBytes(cfg, data)
+ }
+}
+
+func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
+ iter.Error = nil
+ iter.Attachment = nil
+ select {
+ case cfg.iteratorPool <- iter:
+ return
+ default:
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect.go b/vendor/github.com/json-iterator/go/feature_reflect.go
new file mode 100644
index 000000000..1bd8987f2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect.go
@@ -0,0 +1,721 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "time"
+ "unsafe"
+)
+
+// ValDecoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValDecoder with json.Decoder.
+// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
+//
+// Reflection on type to create decoders, which is then cached
+// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
+// 1. create instance of new value, for example *int will need a int to be allocated
+// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
+// 3. assignment to map, both key and value will be reflect.Value
+// For a simple struct binding, it will be reflect.Value free and allocation free
+type ValDecoder interface {
+ Decode(ptr unsafe.Pointer, iter *Iterator)
+}
+
+// ValEncoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValEncoder with json.Encoder.
+// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
+type ValEncoder interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+ Encode(ptr unsafe.Pointer, stream *Stream)
+ EncodeInterface(val interface{}, stream *Stream)
+}
+
+type checkIsEmpty interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+}
+
+// WriteToStream the default implementation for TypeEncoder method EncodeInterface
+func WriteToStream(val interface{}, stream *Stream, encoder ValEncoder) {
+ e := (*emptyInterface)(unsafe.Pointer(&val))
+ if e.word == nil {
+ stream.WriteNil()
+ return
+ }
+ if reflect.TypeOf(val).Kind() == reflect.Ptr {
+ encoder.Encode(unsafe.Pointer(&e.word), stream)
+ } else {
+ encoder.Encode(e.word, stream)
+ }
+}
+
+var jsonNumberType reflect.Type
+var jsoniterNumberType reflect.Type
+var jsonRawMessageType reflect.Type
+var jsoniterRawMessageType reflect.Type
+var anyType reflect.Type
+var marshalerType reflect.Type
+var unmarshalerType reflect.Type
+var textMarshalerType reflect.Type
+var textUnmarshalerType reflect.Type
+
+func init() {
+ jsonNumberType = reflect.TypeOf((*json.Number)(nil)).Elem()
+ jsoniterNumberType = reflect.TypeOf((*Number)(nil)).Elem()
+ jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
+ jsoniterRawMessageType = reflect.TypeOf((*RawMessage)(nil)).Elem()
+ anyType = reflect.TypeOf((*Any)(nil)).Elem()
+ marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
+ unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
+ textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
+ textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
+}
+
+type OptionalDecoder struct {
+ ValueType reflect.Type
+ ValueDecoder ValDecoder
+}
+
+func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*unsafe.Pointer)(ptr)) = nil
+ } else {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ value := reflect.New(decoder.ValueType)
+ newPtr := extractInterface(value.Interface()).word
+ decoder.ValueDecoder.Decode(newPtr, iter)
+ *((*uintptr)(ptr)) = uintptr(newPtr)
+ } else {
+ //reuse existing instance
+ decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+ }
+}
+
+type deferenceDecoder struct {
+ // only to deference a pointer
+ valueType reflect.Type
+ valueDecoder ValDecoder
+}
+
+func (decoder *deferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ value := reflect.New(decoder.valueType)
+ newPtr := extractInterface(value.Interface()).word
+ decoder.valueDecoder.Decode(newPtr, iter)
+ *((*uintptr)(ptr)) = uintptr(newPtr)
+ } else {
+ //reuse existing instance
+ decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+}
+
+type OptionalEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*unsafe.Pointer)(ptr)) == nil
+}
+
+type optionalMapEncoder struct {
+ valueEncoder ValEncoder
+}
+
+func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ p := *((*unsafe.Pointer)(ptr))
+ return p == nil || encoder.valueEncoder.IsEmpty(p)
+}
+
+type placeholderEncoder struct {
+ cfg *frozenConfig
+ cacheKey reflect.Type
+}
+
+func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.getRealEncoder().Encode(ptr, stream)
+}
+
+func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ encoder.getRealEncoder().EncodeInterface(val, stream)
+}
+
+func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.getRealEncoder().IsEmpty(ptr)
+}
+
+func (encoder *placeholderEncoder) getRealEncoder() ValEncoder {
+ for i := 0; i < 500; i++ {
+ realDecoder := encoder.cfg.getEncoderFromCache(encoder.cacheKey)
+ _, isPlaceholder := realDecoder.(*placeholderEncoder)
+ if isPlaceholder {
+ time.Sleep(10 * time.Millisecond)
+ } else {
+ return realDecoder
+ }
+ }
+ panic(fmt.Sprintf("real encoder not found for cache key: %v", encoder.cacheKey))
+}
+
+type placeholderDecoder struct {
+ cfg *frozenConfig
+ cacheKey reflect.Type
+}
+
+func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ for i := 0; i < 500; i++ {
+ realDecoder := decoder.cfg.getDecoderFromCache(decoder.cacheKey)
+ _, isPlaceholder := realDecoder.(*placeholderDecoder)
+ if isPlaceholder {
+ time.Sleep(10 * time.Millisecond)
+ } else {
+ realDecoder.Decode(ptr, iter)
+ return
+ }
+ }
+ panic(fmt.Sprintf("real decoder not found for cache key: %v", decoder.cacheKey))
+}
+
+// emptyInterface is the header for an interface{} value.
+type emptyInterface struct {
+ typ unsafe.Pointer
+ word unsafe.Pointer
+}
+
+// emptyInterface is the header for an interface with method (not interface{})
+type nonEmptyInterface struct {
+ // see ../runtime/iface.go:/Itab
+ itab *struct {
+ ityp unsafe.Pointer // static interface type
+ typ unsafe.Pointer // dynamic concrete type
+ link unsafe.Pointer
+ bad int32
+ unused int32
+ fun [100000]unsafe.Pointer // method table
+ }
+ word unsafe.Pointer
+}
+
+// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
+func (iter *Iterator) ReadVal(obj interface{}) {
+ typ := reflect.TypeOf(obj)
+ cacheKey := typ.Elem()
+ decoder, err := decoderOfType(iter.cfg, cacheKey)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ e := (*emptyInterface)(unsafe.Pointer(&obj))
+ decoder.Decode(e.word, iter)
+}
+
+// WriteVal copy the go interface into underlying JSON, same as json.Marshal
+func (stream *Stream) WriteVal(val interface{}) {
+ if nil == val {
+ stream.WriteNil()
+ return
+ }
+ typ := reflect.TypeOf(val)
+ cacheKey := typ
+ encoder, err := encoderOfType(stream.cfg, cacheKey)
+ if err != nil {
+ stream.Error = err
+ return
+ }
+ encoder.EncodeInterface(val, stream)
+}
+
+type prefix string
+
+func (p prefix) addToDecoder(decoder ValDecoder, err error) (ValDecoder, error) {
+ if err != nil {
+ return nil, fmt.Errorf("%s: %s", p, err.Error())
+ }
+ return decoder, err
+}
+
+func (p prefix) addToEncoder(encoder ValEncoder, err error) (ValEncoder, error) {
+ if err != nil {
+ return nil, fmt.Errorf("%s: %s", p, err.Error())
+ }
+ return encoder, err
+}
+
+func decoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ cacheKey := typ
+ decoder := cfg.getDecoderFromCache(cacheKey)
+ if decoder != nil {
+ return decoder, nil
+ }
+ decoder = getTypeDecoderFromExtension(typ)
+ if decoder != nil {
+ cfg.addDecoderToCache(cacheKey, decoder)
+ return decoder, nil
+ }
+ decoder = &placeholderDecoder{cfg: cfg, cacheKey: cacheKey}
+ cfg.addDecoderToCache(cacheKey, decoder)
+ decoder, err := createDecoderOfType(cfg, typ)
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ cfg.addDecoderToCache(cacheKey, decoder)
+ return decoder, err
+}
+
+func createDecoderOfType(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ typeName := typ.String()
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}, nil
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}, nil
+ }
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}, nil
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}, nil
+ }
+ if typ.Implements(unmarshalerType) {
+ templateInterface := reflect.New(typ).Elem().Interface()
+ var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)}
+ if typ.Kind() == reflect.Ptr {
+ decoder = &OptionalDecoder{typ.Elem(), decoder}
+ }
+ return decoder, nil
+ }
+ if reflect.PtrTo(typ).Implements(unmarshalerType) {
+ templateInterface := reflect.New(typ).Interface()
+ var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)}
+ return decoder, nil
+ }
+ if typ.Implements(textUnmarshalerType) {
+ templateInterface := reflect.New(typ).Elem().Interface()
+ var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)}
+ if typ.Kind() == reflect.Ptr {
+ decoder = &OptionalDecoder{typ.Elem(), decoder}
+ }
+ return decoder, nil
+ }
+ if reflect.PtrTo(typ).Implements(textUnmarshalerType) {
+ templateInterface := reflect.New(typ).Interface()
+ var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)}
+ return decoder, nil
+ }
+ if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 {
+ sliceDecoder, err := prefix("[slice]").addToDecoder(decoderOfSlice(cfg, typ))
+ if err != nil {
+ return nil, err
+ }
+ return &base64Codec{sliceDecoder: sliceDecoder}, nil
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{}, nil
+ }
+ switch typ.Kind() {
+ case reflect.String:
+ if typeName != "string" {
+ return decoderOfType(cfg, reflect.TypeOf((*string)(nil)).Elem())
+ }
+ return &stringCodec{}, nil
+ case reflect.Int:
+ if typeName != "int" {
+ return decoderOfType(cfg, reflect.TypeOf((*int)(nil)).Elem())
+ }
+ return &intCodec{}, nil
+ case reflect.Int8:
+ if typeName != "int8" {
+ return decoderOfType(cfg, reflect.TypeOf((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}, nil
+ case reflect.Int16:
+ if typeName != "int16" {
+ return decoderOfType(cfg, reflect.TypeOf((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}, nil
+ case reflect.Int32:
+ if typeName != "int32" {
+ return decoderOfType(cfg, reflect.TypeOf((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}, nil
+ case reflect.Int64:
+ if typeName != "int64" {
+ return decoderOfType(cfg, reflect.TypeOf((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}, nil
+ case reflect.Uint:
+ if typeName != "uint" {
+ return decoderOfType(cfg, reflect.TypeOf((*uint)(nil)).Elem())
+ }
+ return &uintCodec{}, nil
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return decoderOfType(cfg, reflect.TypeOf((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}, nil
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return decoderOfType(cfg, reflect.TypeOf((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}, nil
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return decoderOfType(cfg, reflect.TypeOf((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}, nil
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return decoderOfType(cfg, reflect.TypeOf((*uintptr)(nil)).Elem())
+ }
+ return &uintptrCodec{}, nil
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return decoderOfType(cfg, reflect.TypeOf((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}, nil
+ case reflect.Float32:
+ if typeName != "float32" {
+ return decoderOfType(cfg, reflect.TypeOf((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}, nil
+ case reflect.Float64:
+ if typeName != "float64" {
+ return decoderOfType(cfg, reflect.TypeOf((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}, nil
+ case reflect.Bool:
+ if typeName != "bool" {
+ return decoderOfType(cfg, reflect.TypeOf((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}, nil
+ case reflect.Interface:
+ if typ.NumMethod() == 0 {
+ return &emptyInterfaceCodec{}, nil
+ }
+ return &nonEmptyInterfaceCodec{}, nil
+ case reflect.Struct:
+ return prefix(fmt.Sprintf("[%s]", typeName)).addToDecoder(decoderOfStruct(cfg, typ))
+ case reflect.Array:
+ return prefix("[array]").addToDecoder(decoderOfArray(cfg, typ))
+ case reflect.Slice:
+ return prefix("[slice]").addToDecoder(decoderOfSlice(cfg, typ))
+ case reflect.Map:
+ return prefix("[map]").addToDecoder(decoderOfMap(cfg, typ))
+ case reflect.Ptr:
+ return prefix("[optional]").addToDecoder(decoderOfOptional(cfg, typ))
+ default:
+ return nil, fmt.Errorf("unsupported type: %v", typ)
+ }
+}
+
+func encoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ cacheKey := typ
+ encoder := cfg.getEncoderFromCache(cacheKey)
+ if encoder != nil {
+ return encoder, nil
+ }
+ encoder = getTypeEncoderFromExtension(typ)
+ if encoder != nil {
+ cfg.addEncoderToCache(cacheKey, encoder)
+ return encoder, nil
+ }
+ encoder = &placeholderEncoder{cfg: cfg, cacheKey: cacheKey}
+ cfg.addEncoderToCache(cacheKey, encoder)
+ encoder, err := createEncoderOfType(cfg, typ)
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ cfg.addEncoderToCache(cacheKey, encoder)
+ return encoder, err
+}
+
+func createEncoderOfType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}, nil
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}, nil
+ }
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}, nil
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}, nil
+ }
+ if typ.Implements(marshalerType) {
+ checkIsEmpty, err := createCheckIsEmpty(typ)
+ if err != nil {
+ return nil, err
+ }
+ templateInterface := reflect.New(typ).Elem().Interface()
+ var encoder ValEncoder = &marshalerEncoder{
+ templateInterface: extractInterface(templateInterface),
+ checkIsEmpty: checkIsEmpty,
+ }
+ if typ.Kind() == reflect.Ptr {
+ encoder = &OptionalEncoder{encoder}
+ }
+ return encoder, nil
+ }
+ if reflect.PtrTo(typ).Implements(marshalerType) {
+ checkIsEmpty, err := createCheckIsEmpty(reflect.PtrTo(typ))
+ if err != nil {
+ return nil, err
+ }
+ templateInterface := reflect.New(typ).Interface()
+ var encoder ValEncoder = &marshalerEncoder{
+ templateInterface: extractInterface(templateInterface),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder, nil
+ }
+ if typ.Implements(textMarshalerType) {
+ checkIsEmpty, err := createCheckIsEmpty(typ)
+ if err != nil {
+ return nil, err
+ }
+ templateInterface := reflect.New(typ).Elem().Interface()
+ var encoder ValEncoder = &textMarshalerEncoder{
+ templateInterface: extractInterface(templateInterface),
+ checkIsEmpty: checkIsEmpty,
+ }
+ if typ.Kind() == reflect.Ptr {
+ encoder = &OptionalEncoder{encoder}
+ }
+ return encoder, nil
+ }
+ if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 {
+ return &base64Codec{}, nil
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{}, nil
+ }
+ return createEncoderOfSimpleType(cfg, typ)
+}
+
+func createCheckIsEmpty(typ reflect.Type) (checkIsEmpty, error) {
+ kind := typ.Kind()
+ switch kind {
+ case reflect.String:
+ return &stringCodec{}, nil
+ case reflect.Int:
+ return &intCodec{}, nil
+ case reflect.Int8:
+ return &int8Codec{}, nil
+ case reflect.Int16:
+ return &int16Codec{}, nil
+ case reflect.Int32:
+ return &int32Codec{}, nil
+ case reflect.Int64:
+ return &int64Codec{}, nil
+ case reflect.Uint:
+ return &uintCodec{}, nil
+ case reflect.Uint8:
+ return &uint8Codec{}, nil
+ case reflect.Uint16:
+ return &uint16Codec{}, nil
+ case reflect.Uint32:
+ return &uint32Codec{}, nil
+ case reflect.Uintptr:
+ return &uintptrCodec{}, nil
+ case reflect.Uint64:
+ return &uint64Codec{}, nil
+ case reflect.Float32:
+ return &float32Codec{}, nil
+ case reflect.Float64:
+ return &float64Codec{}, nil
+ case reflect.Bool:
+ return &boolCodec{}, nil
+ case reflect.Interface:
+ if typ.NumMethod() == 0 {
+ return &emptyInterfaceCodec{}, nil
+ }
+ return &nonEmptyInterfaceCodec{}, nil
+ case reflect.Struct:
+ return &structEncoder{}, nil
+ case reflect.Array:
+ return &arrayEncoder{}, nil
+ case reflect.Slice:
+ return &sliceEncoder{}, nil
+ case reflect.Map:
+ return &mapEncoder{}, nil
+ case reflect.Ptr:
+ return &OptionalEncoder{}, nil
+ default:
+ return nil, fmt.Errorf("unsupported type: %v", typ)
+ }
+}
+
+func createEncoderOfSimpleType(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ typeName := typ.String()
+ kind := typ.Kind()
+ switch kind {
+ case reflect.String:
+ if typeName != "string" {
+ return encoderOfType(cfg, reflect.TypeOf((*string)(nil)).Elem())
+ }
+ return &stringCodec{}, nil
+ case reflect.Int:
+ if typeName != "int" {
+ return encoderOfType(cfg, reflect.TypeOf((*int)(nil)).Elem())
+ }
+ return &intCodec{}, nil
+ case reflect.Int8:
+ if typeName != "int8" {
+ return encoderOfType(cfg, reflect.TypeOf((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}, nil
+ case reflect.Int16:
+ if typeName != "int16" {
+ return encoderOfType(cfg, reflect.TypeOf((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}, nil
+ case reflect.Int32:
+ if typeName != "int32" {
+ return encoderOfType(cfg, reflect.TypeOf((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}, nil
+ case reflect.Int64:
+ if typeName != "int64" {
+ return encoderOfType(cfg, reflect.TypeOf((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}, nil
+ case reflect.Uint:
+ if typeName != "uint" {
+ return encoderOfType(cfg, reflect.TypeOf((*uint)(nil)).Elem())
+ }
+ return &uintCodec{}, nil
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return encoderOfType(cfg, reflect.TypeOf((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}, nil
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return encoderOfType(cfg, reflect.TypeOf((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}, nil
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return encoderOfType(cfg, reflect.TypeOf((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}, nil
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return encoderOfType(cfg, reflect.TypeOf((*uintptr)(nil)).Elem())
+ }
+ return &uintptrCodec{}, nil
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return encoderOfType(cfg, reflect.TypeOf((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}, nil
+ case reflect.Float32:
+ if typeName != "float32" {
+ return encoderOfType(cfg, reflect.TypeOf((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}, nil
+ case reflect.Float64:
+ if typeName != "float64" {
+ return encoderOfType(cfg, reflect.TypeOf((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}, nil
+ case reflect.Bool:
+ if typeName != "bool" {
+ return encoderOfType(cfg, reflect.TypeOf((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}, nil
+ case reflect.Interface:
+ if typ.NumMethod() == 0 {
+ return &emptyInterfaceCodec{}, nil
+ }
+ return &nonEmptyInterfaceCodec{}, nil
+ case reflect.Struct:
+ return prefix(fmt.Sprintf("[%s]", typeName)).addToEncoder(encoderOfStruct(cfg, typ))
+ case reflect.Array:
+ return prefix("[array]").addToEncoder(encoderOfArray(cfg, typ))
+ case reflect.Slice:
+ return prefix("[slice]").addToEncoder(encoderOfSlice(cfg, typ))
+ case reflect.Map:
+ return prefix("[map]").addToEncoder(encoderOfMap(cfg, typ))
+ case reflect.Ptr:
+ return prefix("[optional]").addToEncoder(encoderOfOptional(cfg, typ))
+ default:
+ return nil, fmt.Errorf("unsupported type: %v", typ)
+ }
+}
+
+func decoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ elemType := typ.Elem()
+ decoder, err := decoderOfType(cfg, elemType)
+ if err != nil {
+ return nil, err
+ }
+ return &OptionalDecoder{elemType, decoder}, nil
+}
+
+func encoderOfOptional(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ elemType := typ.Elem()
+ elemEncoder, err := encoderOfType(cfg, elemType)
+ if err != nil {
+ return nil, err
+ }
+ encoder := &OptionalEncoder{elemEncoder}
+ if elemType.Kind() == reflect.Map {
+ encoder = &OptionalEncoder{encoder}
+ }
+ return encoder, nil
+}
+
+func decoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ decoder, err := decoderOfType(cfg, typ.Elem())
+ if err != nil {
+ return nil, err
+ }
+ mapInterface := reflect.New(typ).Interface()
+ return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)}, nil
+}
+
+func extractInterface(val interface{}) emptyInterface {
+ return *((*emptyInterface)(unsafe.Pointer(&val)))
+}
+
+func encoderOfMap(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ elemType := typ.Elem()
+ encoder, err := encoderOfType(cfg, elemType)
+ if err != nil {
+ return nil, err
+ }
+ mapInterface := reflect.New(typ).Elem().Interface()
+ if cfg.sortMapKeys {
+ return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
+ }
+ return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))}, nil
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_array.go b/vendor/github.com/json-iterator/go/feature_reflect_array.go
new file mode 100644
index 000000000..d661fb6fe
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_array.go
@@ -0,0 +1,99 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+func decoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ decoder, err := decoderOfType(cfg, typ.Elem())
+ if err != nil {
+ return nil, err
+ }
+ return &arrayDecoder{typ, typ.Elem(), decoder}, nil
+}
+
+func encoderOfArray(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ encoder, err := encoderOfType(cfg, typ.Elem())
+ if err != nil {
+ return nil, err
+ }
+ if typ.Elem().Kind() == reflect.Map {
+ encoder = &OptionalEncoder{encoder}
+ }
+ return &arrayEncoder{typ, typ.Elem(), encoder}, nil
+}
+
+type arrayEncoder struct {
+ arrayType reflect.Type
+ elemType reflect.Type
+ elemEncoder ValEncoder
+}
+
+func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteArrayStart()
+ elemPtr := unsafe.Pointer(ptr)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ for i := 1; i < encoder.arrayType.Len(); i++ {
+ stream.WriteMore()
+ elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size())
+ encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
+ }
+}
+
+func (encoder *arrayEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ // special optimization for interface{}
+ e := (*emptyInterface)(unsafe.Pointer(&val))
+ if e.word == nil {
+ stream.WriteArrayStart()
+ stream.WriteNil()
+ stream.WriteArrayEnd()
+ return
+ }
+ elemType := encoder.arrayType.Elem()
+ if encoder.arrayType.Len() == 1 && (elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map) {
+ ptr := uintptr(e.word)
+ e.word = unsafe.Pointer(&ptr)
+ }
+ if reflect.TypeOf(val).Kind() == reflect.Ptr {
+ encoder.Encode(unsafe.Pointer(&e.word), stream)
+ } else {
+ encoder.Encode(e.word, stream)
+ }
+}
+
+func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type arrayDecoder struct {
+ arrayType reflect.Type
+ elemType reflect.Type
+ elemDecoder ValDecoder
+}
+
+func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
+ }
+}
+
+func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ offset := uintptr(0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ if offset < decoder.arrayType.Size() {
+ decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(ptr)+offset), iter)
+ offset += decoder.elemType.Size()
+ } else {
+ iter.Skip()
+ }
+ return true
+ })
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_extension.go b/vendor/github.com/json-iterator/go/feature_reflect_extension.go
new file mode 100644
index 000000000..177df2c81
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_extension.go
@@ -0,0 +1,414 @@
+package jsoniter
+
+import (
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode"
+ "unsafe"
+)
+
+var typeDecoders = map[string]ValDecoder{}
+var fieldDecoders = map[string]ValDecoder{}
+var typeEncoders = map[string]ValEncoder{}
+var fieldEncoders = map[string]ValEncoder{}
+var extensions = []Extension{}
+
+// StructDescriptor describe how should we encode/decode the struct
+type StructDescriptor struct {
+ onePtrEmbedded bool
+ onePtrOptimization bool
+ Type reflect.Type
+ Fields []*Binding
+}
+
+// GetField get one field from the descriptor by its name.
+// Can not use map here to keep field orders.
+func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
+ for _, binding := range structDescriptor.Fields {
+ if binding.Field.Name == fieldName {
+ return binding
+ }
+ }
+ return nil
+}
+
+// Binding describe how should we encode/decode the struct field
+type Binding struct {
+ levels []int
+ Field *reflect.StructField
+ FromNames []string
+ ToNames []string
+ Encoder ValEncoder
+ Decoder ValDecoder
+}
+
+// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
+// Can also rename fields by UpdateStructDescriptor.
+type Extension interface {
+ UpdateStructDescriptor(structDescriptor *StructDescriptor)
+ CreateDecoder(typ reflect.Type) ValDecoder
+ CreateEncoder(typ reflect.Type) ValEncoder
+ DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder
+ DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder
+}
+
+// DummyExtension embed this type get dummy implementation for all methods of Extension
+type DummyExtension struct {
+}
+
+// UpdateStructDescriptor No-op
+func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateDecoder No-op
+func (extension *DummyExtension) CreateDecoder(typ reflect.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder No-op
+func (extension *DummyExtension) CreateEncoder(typ reflect.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension *DummyExtension) DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension *DummyExtension) DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type funcDecoder struct {
+ fun DecoderFunc
+}
+
+func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.fun(ptr, iter)
+}
+
+type funcEncoder struct {
+ fun EncoderFunc
+ isEmptyFunc func(ptr unsafe.Pointer) bool
+}
+
+func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.fun(ptr, stream)
+}
+
+func (encoder *funcEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ if encoder.isEmptyFunc == nil {
+ return false
+ }
+ return encoder.isEmptyFunc(ptr)
+}
+
+// DecoderFunc the function form of TypeDecoder
+type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
+
+// EncoderFunc the function form of TypeEncoder
+type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
+
+// RegisterTypeDecoderFunc register TypeDecoder for a type with function
+func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
+ typeDecoders[typ] = &funcDecoder{fun}
+}
+
+// RegisterTypeDecoder register TypeDecoder for a typ
+func RegisterTypeDecoder(typ string, decoder ValDecoder) {
+ typeDecoders[typ] = decoder
+}
+
+// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
+func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
+ RegisterFieldDecoder(typ, field, &funcDecoder{fun})
+}
+
+// RegisterFieldDecoder register TypeDecoder for a struct field
+func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
+ fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
+}
+
+// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
+func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
+}
+
+// RegisterTypeEncoder register TypeEncoder for a type
+func RegisterTypeEncoder(typ string, encoder ValEncoder) {
+ typeEncoders[typ] = encoder
+}
+
+// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
+func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
+}
+
+// RegisterFieldEncoder register TypeEncoder for a struct field
+func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
+ fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
+}
+
+// RegisterExtension register extension
+func RegisterExtension(extension Extension) {
+ extensions = append(extensions, extension)
+}
+
+func getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
+ decoder := _getTypeDecoderFromExtension(typ)
+ if decoder != nil {
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ }
+ return decoder
+}
+func _getTypeDecoderFromExtension(typ reflect.Type) ValDecoder {
+ for _, extension := range extensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ typeName := typ.String()
+ decoder := typeDecoders[typeName]
+ if decoder != nil {
+ return decoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ decoder := typeDecoders[typ.Elem().String()]
+ if decoder != nil {
+ return &OptionalDecoder{typ.Elem(), decoder}
+ }
+ }
+ return nil
+}
+
+func getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
+ encoder := _getTypeEncoderFromExtension(typ)
+ if encoder != nil {
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ }
+ return encoder
+}
+
+func _getTypeEncoderFromExtension(typ reflect.Type) ValEncoder {
+ for _, extension := range extensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ typeName := typ.String()
+ encoder := typeEncoders[typeName]
+ if encoder != nil {
+ return encoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ encoder := typeEncoders[typ.Elem().String()]
+ if encoder != nil {
+ return &OptionalEncoder{encoder}
+ }
+ }
+ return nil
+}
+
+func describeStruct(cfg *frozenConfig, typ reflect.Type) (*StructDescriptor, error) {
+ embeddedBindings := []*Binding{}
+ bindings := []*Binding{}
+ for i := 0; i < typ.NumField(); i++ {
+ field := typ.Field(i)
+ tag := field.Tag.Get(cfg.getTagKey())
+ tagParts := strings.Split(tag, ",")
+ if tag == "-" {
+ continue
+ }
+ if field.Anonymous && (tag == "" || tagParts[0] == "") {
+ if field.Type.Kind() == reflect.Struct {
+ structDescriptor, err := describeStruct(cfg, field.Type)
+ if err != nil {
+ return nil, err
+ }
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
+ binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ } else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
+ structDescriptor, err := describeStruct(cfg, field.Type.Elem())
+ if err != nil {
+ return nil, err
+ }
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &OptionalEncoder{binding.Encoder}
+ binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty}
+ binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder}
+ binding.Decoder = &structFieldDecoder{&field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ }
+ }
+ fieldNames := calcFieldNames(field.Name, tagParts[0], tag)
+ fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name)
+ decoder := fieldDecoders[fieldCacheKey]
+ if decoder == nil {
+ var err error
+ decoder, err = decoderOfType(cfg, field.Type)
+ if len(fieldNames) > 0 && err != nil {
+ return nil, err
+ }
+ }
+ encoder := fieldEncoders[fieldCacheKey]
+ if encoder == nil {
+ var err error
+ encoder, err = encoderOfType(cfg, field.Type)
+ if len(fieldNames) > 0 && err != nil {
+ return nil, err
+ }
+ // map is stored as pointer in the struct,
+ // and treat nil or empty map as empty field
+ if encoder != nil && field.Type.Kind() == reflect.Map {
+ encoder = &optionalMapEncoder{encoder}
+ }
+ }
+ binding := &Binding{
+ Field: &field,
+ FromNames: fieldNames,
+ ToNames: fieldNames,
+ Decoder: decoder,
+ Encoder: encoder,
+ }
+ binding.levels = []int{i}
+ bindings = append(bindings, binding)
+ }
+ return createStructDescriptor(cfg, typ, bindings, embeddedBindings), nil
+}
+func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
+ onePtrEmbedded := false
+ onePtrOptimization := false
+ if typ.NumField() == 1 {
+ firstField := typ.Field(0)
+ switch firstField.Type.Kind() {
+ case reflect.Ptr:
+ if firstField.Anonymous && firstField.Type.Elem().Kind() == reflect.Struct {
+ onePtrEmbedded = true
+ }
+ fallthrough
+ case reflect.Map:
+ onePtrOptimization = true
+ case reflect.Struct:
+ onePtrOptimization = isStructOnePtr(firstField.Type)
+ }
+ }
+ structDescriptor := &StructDescriptor{
+ onePtrEmbedded: onePtrEmbedded,
+ onePtrOptimization: onePtrOptimization,
+ Type: typ,
+ Fields: bindings,
+ }
+ for _, extension := range extensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ processTags(structDescriptor, cfg)
+ // merge normal & embedded bindings & sort with original order
+ allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
+ sort.Sort(allBindings)
+ structDescriptor.Fields = allBindings
+ return structDescriptor
+}
+
+func isStructOnePtr(typ reflect.Type) bool {
+ if typ.NumField() == 1 {
+ firstField := typ.Field(0)
+ switch firstField.Type.Kind() {
+ case reflect.Ptr:
+ return true
+ case reflect.Map:
+ return true
+ case reflect.Struct:
+ return isStructOnePtr(firstField.Type)
+ }
+ }
+ return false
+}
+
+type sortableBindings []*Binding
+
+func (bindings sortableBindings) Len() int {
+ return len(bindings)
+}
+
+func (bindings sortableBindings) Less(i, j int) bool {
+ left := bindings[i].levels
+ right := bindings[j].levels
+ k := 0
+ for {
+ if left[k] < right[k] {
+ return true
+ } else if left[k] > right[k] {
+ return false
+ }
+ k++
+ }
+}
+
+func (bindings sortableBindings) Swap(i, j int) {
+ bindings[i], bindings[j] = bindings[j], bindings[i]
+}
+
+func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
+ for _, binding := range structDescriptor.Fields {
+ shouldOmitEmpty := false
+ tagParts := strings.Split(binding.Field.Tag.Get(cfg.getTagKey()), ",")
+ for _, tagPart := range tagParts[1:] {
+ if tagPart == "omitempty" {
+ shouldOmitEmpty = true
+ } else if tagPart == "string" {
+ if binding.Field.Type.Kind() == reflect.String {
+ binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
+ binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
+ } else {
+ binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
+ binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
+ }
+ }
+ }
+ binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
+ binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
+ }
+}
+
+func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
+ // ignore?
+ if wholeTag == "-" {
+ return []string{}
+ }
+ // rename?
+ var fieldNames []string
+ if tagProvidedFieldName == "" {
+ fieldNames = []string{originalFieldName}
+ } else {
+ fieldNames = []string{tagProvidedFieldName}
+ }
+ // private?
+ isNotExported := unicode.IsLower(rune(originalFieldName[0]))
+ if isNotExported {
+ fieldNames = []string{}
+ }
+ return fieldNames
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_map.go b/vendor/github.com/json-iterator/go/feature_reflect_map.go
new file mode 100644
index 000000000..005671e01
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_map.go
@@ -0,0 +1,244 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/json"
+ "reflect"
+ "sort"
+ "strconv"
+ "unsafe"
+)
+
+type mapDecoder struct {
+ mapType reflect.Type
+ keyType reflect.Type
+ elemType reflect.Type
+ elemDecoder ValDecoder
+ mapInterface emptyInterface
+}
+
+func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ // dark magic to cast unsafe.Pointer back to interface{} using reflect.Type
+ mapInterface := decoder.mapInterface
+ mapInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
+ realVal := reflect.ValueOf(*realInterface).Elem()
+ if iter.ReadNil() {
+ realVal.Set(reflect.Zero(decoder.mapType))
+ return
+ }
+ if realVal.IsNil() {
+ realVal.Set(reflect.MakeMap(realVal.Type()))
+ }
+ iter.ReadMapCB(func(iter *Iterator, keyStr string) bool {
+ elem := reflect.New(decoder.elemType)
+ decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter)
+ // to put into map, we have to use reflection
+ keyType := decoder.keyType
+ // TODO: remove this from loop
+ switch {
+ case keyType.Kind() == reflect.String:
+ realVal.SetMapIndex(reflect.ValueOf(keyStr).Convert(keyType), elem.Elem())
+ return true
+ case keyType.Implements(textUnmarshalerType):
+ textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler)
+ err := textUnmarshaler.UnmarshalText([]byte(keyStr))
+ if err != nil {
+ iter.ReportError("read map key as TextUnmarshaler", err.Error())
+ return false
+ }
+ realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem())
+ return true
+ case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
+ textUnmarshaler := reflect.New(keyType).Interface().(encoding.TextUnmarshaler)
+ err := textUnmarshaler.UnmarshalText([]byte(keyStr))
+ if err != nil {
+ iter.ReportError("read map key as TextUnmarshaler", err.Error())
+ return false
+ }
+ realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler).Elem(), elem.Elem())
+ return true
+ default:
+ switch keyType.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ n, err := strconv.ParseInt(keyStr, 10, 64)
+ if err != nil || reflect.Zero(keyType).OverflowInt(n) {
+ iter.ReportError("read map key as int64", "read int64 failed")
+ return false
+ }
+ realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
+ return true
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ n, err := strconv.ParseUint(keyStr, 10, 64)
+ if err != nil || reflect.Zero(keyType).OverflowUint(n) {
+ iter.ReportError("read map key as uint64", "read uint64 failed")
+ return false
+ }
+ realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem())
+ return true
+ }
+ }
+ iter.ReportError("read map key", "unexpected map key type "+keyType.String())
+ return true
+ })
+}
+
+type mapEncoder struct {
+ mapType reflect.Type
+ elemType reflect.Type
+ elemEncoder ValEncoder
+ mapInterface emptyInterface
+}
+
+func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ mapInterface := encoder.mapInterface
+ mapInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
+ realVal := reflect.ValueOf(*realInterface)
+ stream.WriteObjectStart()
+ for i, key := range realVal.MapKeys() {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ encodeMapKey(key, stream)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ stream.writeByte(':')
+ }
+ val := realVal.MapIndex(key).Interface()
+ encoder.elemEncoder.EncodeInterface(val, stream)
+ }
+ stream.WriteObjectEnd()
+}
+
+func encodeMapKey(key reflect.Value, stream *Stream) {
+ if key.Kind() == reflect.String {
+ stream.WriteString(key.String())
+ return
+ }
+ if tm, ok := key.Interface().(encoding.TextMarshaler); ok {
+ buf, err := tm.MarshalText()
+ if err != nil {
+ stream.Error = err
+ return
+ }
+ stream.writeByte('"')
+ stream.Write(buf)
+ stream.writeByte('"')
+ return
+ }
+ switch key.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ stream.writeByte('"')
+ stream.WriteInt64(key.Int())
+ stream.writeByte('"')
+ return
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ stream.writeByte('"')
+ stream.WriteUint64(key.Uint())
+ stream.writeByte('"')
+ return
+ }
+ stream.Error = &json.UnsupportedTypeError{Type: key.Type()}
+}
+
+func (encoder *mapEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ mapInterface := encoder.mapInterface
+ mapInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
+ realVal := reflect.ValueOf(*realInterface)
+ return realVal.Len() == 0
+}
+
+type sortKeysMapEncoder struct {
+ mapType reflect.Type
+ elemType reflect.Type
+ elemEncoder ValEncoder
+ mapInterface emptyInterface
+}
+
+func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ mapInterface := encoder.mapInterface
+ mapInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
+ realVal := reflect.ValueOf(*realInterface)
+
+ // Extract and sort the keys.
+ keys := realVal.MapKeys()
+ sv := stringValues(make([]reflectWithString, len(keys)))
+ for i, v := range keys {
+ sv[i].v = v
+ if err := sv[i].resolve(); err != nil {
+ stream.Error = err
+ return
+ }
+ }
+ sort.Sort(sv)
+
+ stream.WriteObjectStart()
+ for i, key := range sv {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ stream.WriteVal(key.s) // might need html escape, so can not WriteString directly
+ if stream.indention > 0 {
+ stream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ stream.writeByte(':')
+ }
+ val := realVal.MapIndex(key.v).Interface()
+ encoder.elemEncoder.EncodeInterface(val, stream)
+ }
+ stream.WriteObjectEnd()
+}
+
+// stringValues is a slice of reflect.Value holding *reflect.StringValue.
+// It implements the methods to sort by string.
+type stringValues []reflectWithString
+
+type reflectWithString struct {
+ v reflect.Value
+ s string
+}
+
+func (w *reflectWithString) resolve() error {
+ if w.v.Kind() == reflect.String {
+ w.s = w.v.String()
+ return nil
+ }
+ if tm, ok := w.v.Interface().(encoding.TextMarshaler); ok {
+ buf, err := tm.MarshalText()
+ w.s = string(buf)
+ return err
+ }
+ switch w.v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ w.s = strconv.FormatInt(w.v.Int(), 10)
+ return nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ w.s = strconv.FormatUint(w.v.Uint(), 10)
+ return nil
+ }
+ return &json.UnsupportedTypeError{Type: w.v.Type()}
+}
+
+func (sv stringValues) Len() int { return len(sv) }
+func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv stringValues) Less(i, j int) bool { return sv[i].s < sv[j].s }
+
+func (encoder *sortKeysMapEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ mapInterface := encoder.mapInterface
+ mapInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&mapInterface))
+ realVal := reflect.ValueOf(*realInterface)
+ return realVal.Len() == 0
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_native.go b/vendor/github.com/json-iterator/go/feature_reflect_native.go
new file mode 100644
index 000000000..95bd1e87c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_native.go
@@ -0,0 +1,764 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/base64"
+ "encoding/json"
+ "reflect"
+ "unsafe"
+)
+
+type stringCodec struct {
+}
+
+func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = iter.ReadString()
+}
+
+func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteString(str)
+}
+
+func (codec *stringCodec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+type intCodec struct {
+}
+
+func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int)(ptr)) = iter.ReadInt()
+ }
+}
+
+func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt(*((*int)(ptr)))
+}
+
+func (codec *intCodec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int)(ptr)) == 0
+}
+
+type uintptrCodec struct {
+}
+
+func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uintptr)(ptr)) = uintptr(iter.ReadUint64())
+ }
+}
+
+func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint64(uint64(*((*uintptr)(ptr))))
+}
+
+func (codec *uintptrCodec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uintptrCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uintptr)(ptr)) == 0
+}
+
+type int8Codec struct {
+}
+
+func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int8)(ptr)) = iter.ReadInt8()
+ }
+}
+
+func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt8(*((*int8)(ptr)))
+}
+
+func (codec *int8Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int8)(ptr)) == 0
+}
+
+type int16Codec struct {
+}
+
+func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int16)(ptr)) = iter.ReadInt16()
+ }
+}
+
+func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt16(*((*int16)(ptr)))
+}
+
+func (codec *int16Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int16)(ptr)) == 0
+}
+
+type int32Codec struct {
+}
+
+func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int32)(ptr)) = iter.ReadInt32()
+ }
+}
+
+func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt32(*((*int32)(ptr)))
+}
+
+func (codec *int32Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int32)(ptr)) == 0
+}
+
+type int64Codec struct {
+}
+
+func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int64)(ptr)) = iter.ReadInt64()
+ }
+}
+
+func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt64(*((*int64)(ptr)))
+}
+
+func (codec *int64Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int64)(ptr)) == 0
+}
+
+type uintCodec struct {
+}
+
+func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint)(ptr)) = iter.ReadUint()
+ return
+ }
+}
+
+func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint(*((*uint)(ptr)))
+}
+
+func (codec *uintCodec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint)(ptr)) == 0
+}
+
+type uint8Codec struct {
+}
+
+func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint8)(ptr)) = iter.ReadUint8()
+ }
+}
+
+func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint8(*((*uint8)(ptr)))
+}
+
+func (codec *uint8Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint8)(ptr)) == 0
+}
+
+type uint16Codec struct {
+}
+
+func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint16)(ptr)) = iter.ReadUint16()
+ }
+}
+
+func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint16(*((*uint16)(ptr)))
+}
+
+func (codec *uint16Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint16)(ptr)) == 0
+}
+
+type uint32Codec struct {
+}
+
+func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint32)(ptr)) = iter.ReadUint32()
+ }
+}
+
+func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint32(*((*uint32)(ptr)))
+}
+
+func (codec *uint32Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint32)(ptr)) == 0
+}
+
+type uint64Codec struct {
+}
+
+func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint64)(ptr)) = iter.ReadUint64()
+ }
+}
+
+func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint64(*((*uint64)(ptr)))
+}
+
+func (codec *uint64Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint64)(ptr)) == 0
+}
+
+type float32Codec struct {
+}
+
+func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float32)(ptr)) = iter.ReadFloat32()
+ }
+}
+
+func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32(*((*float32)(ptr)))
+}
+
+func (codec *float32Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type float64Codec struct {
+}
+
+func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float64)(ptr)) = iter.ReadFloat64()
+ }
+}
+
+func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64(*((*float64)(ptr)))
+}
+
+func (codec *float64Codec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+type boolCodec struct {
+}
+
+func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*bool)(ptr)) = iter.ReadBool()
+ }
+}
+
+func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteBool(*((*bool)(ptr)))
+}
+
+func (codec *boolCodec) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, codec)
+}
+
+func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return !(*((*bool)(ptr)))
+}
+
+type emptyInterfaceCodec struct {
+}
+
+func (codec *emptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ existing := *((*interface{})(ptr))
+
+ // Checking for both typed and untyped nil pointers.
+ if existing != nil &&
+ reflect.TypeOf(existing).Kind() == reflect.Ptr &&
+ !reflect.ValueOf(existing).IsNil() {
+
+ var ptrToExisting interface{}
+ for {
+ elem := reflect.ValueOf(existing).Elem()
+ if elem.Kind() != reflect.Ptr || elem.IsNil() {
+ break
+ }
+ ptrToExisting = existing
+ existing = elem.Interface()
+ }
+
+ if iter.ReadNil() {
+ if ptrToExisting != nil {
+ nilPtr := reflect.Zero(reflect.TypeOf(ptrToExisting).Elem())
+ reflect.ValueOf(ptrToExisting).Elem().Set(nilPtr)
+ } else {
+ *((*interface{})(ptr)) = nil
+ }
+ } else {
+ iter.ReadVal(existing)
+ }
+
+ return
+ }
+
+ if iter.ReadNil() {
+ *((*interface{})(ptr)) = nil
+ } else {
+ *((*interface{})(ptr)) = iter.Read()
+ }
+}
+
+func (codec *emptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteVal(*((*interface{})(ptr)))
+}
+
+func (codec *emptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteVal(val)
+}
+
+func (codec *emptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ emptyInterface := (*emptyInterface)(ptr)
+ return emptyInterface.typ == nil
+}
+
+type nonEmptyInterfaceCodec struct {
+}
+
+func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ nonEmptyInterface := (*nonEmptyInterface)(ptr)
+ if nonEmptyInterface.itab == nil {
+ iter.ReportError("read non-empty interface", "do not know which concrete type to decode to")
+ return
+ }
+ var i interface{}
+ e := (*emptyInterface)(unsafe.Pointer(&i))
+ e.typ = nonEmptyInterface.itab.typ
+ e.word = nonEmptyInterface.word
+ iter.ReadVal(&i)
+ if e.word == nil {
+ nonEmptyInterface.itab = nil
+ }
+ nonEmptyInterface.word = e.word
+}
+
+func (codec *nonEmptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ nonEmptyInterface := (*nonEmptyInterface)(ptr)
+ var i interface{}
+ if nonEmptyInterface.itab != nil {
+ e := (*emptyInterface)(unsafe.Pointer(&i))
+ e.typ = nonEmptyInterface.itab.typ
+ e.word = nonEmptyInterface.word
+ }
+ stream.WriteVal(i)
+}
+
+func (codec *nonEmptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteVal(val)
+}
+
+func (codec *nonEmptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ nonEmptyInterface := (*nonEmptyInterface)(ptr)
+ return nonEmptyInterface.word == nil
+}
+
+type anyCodec struct {
+}
+
+func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*Any)(ptr)) = iter.ReadAny()
+}
+
+func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ (*((*Any)(ptr))).WriteTo(stream)
+}
+
+func (codec *anyCodec) EncodeInterface(val interface{}, stream *Stream) {
+ (val.(Any)).WriteTo(stream)
+}
+
+func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return (*((*Any)(ptr))).Size() == 0
+}
+
+type jsonNumberCodec struct {
+}
+
+func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*json.Number)(ptr)) = json.Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*json.Number)(ptr)) = ""
+ default:
+ *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*json.Number)(ptr))))
+}
+
+func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteRaw(string(val.(json.Number)))
+}
+
+func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.Number)(ptr))) == 0
+}
+
+type jsoniterNumberCodec struct {
+}
+
+func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*Number)(ptr)) = Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*Number)(ptr)) = ""
+ default:
+ *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*Number)(ptr))))
+}
+
+func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteRaw(string(val.(Number)))
+}
+
+func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*Number)(ptr))) == 0
+}
+
+type jsonRawMessageCodec struct {
+}
+
+func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
+}
+
+func (codec *jsonRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteRaw(string(val.(json.RawMessage)))
+}
+
+func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+}
+
+type jsoniterRawMessageCodec struct {
+}
+
+func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
+}
+
+func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteRaw(string(*((*RawMessage)(ptr))))
+}
+
+func (codec *jsoniterRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) {
+ stream.WriteRaw(string(val.(RawMessage)))
+}
+
+func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*RawMessage)(ptr))) == 0
+}
+
+type base64Codec struct {
+ sliceDecoder ValDecoder
+}
+
+func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ ptrSlice := (*sliceHeader)(ptr)
+ ptrSlice.Len = 0
+ ptrSlice.Cap = 0
+ ptrSlice.Data = nil
+ return
+ }
+ switch iter.WhatIsNext() {
+ case StringValue:
+ encoding := base64.StdEncoding
+ src := iter.SkipAndReturnBytes()
+ src = src[1 : len(src)-1]
+ decodedLen := encoding.DecodedLen(len(src))
+ dst := make([]byte, decodedLen)
+ len, err := encoding.Decode(dst, src)
+ if err != nil {
+ iter.ReportError("decode base64", err.Error())
+ } else {
+ dst = dst[:len]
+ dstSlice := (*sliceHeader)(unsafe.Pointer(&dst))
+ ptrSlice := (*sliceHeader)(ptr)
+ ptrSlice.Data = dstSlice.Data
+ ptrSlice.Cap = dstSlice.Cap
+ ptrSlice.Len = dstSlice.Len
+ }
+ case ArrayValue:
+ codec.sliceDecoder.Decode(ptr, iter)
+ default:
+ iter.ReportError("base64Codec", "invalid input")
+ }
+}
+
+func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ src := *((*[]byte)(ptr))
+ if len(src) == 0 {
+ stream.WriteNil()
+ return
+ }
+ encoding := base64.StdEncoding
+ stream.writeByte('"')
+ toGrow := encoding.EncodedLen(len(src))
+ stream.ensure(toGrow)
+ encoding.Encode(stream.buf[stream.n:], src)
+ stream.n += toGrow
+ stream.writeByte('"')
+}
+
+func (codec *base64Codec) EncodeInterface(val interface{}, stream *Stream) {
+ ptr := extractInterface(val).word
+ src := *((*[]byte)(ptr))
+ if len(src) == 0 {
+ stream.WriteNil()
+ return
+ }
+ encoding := base64.StdEncoding
+ stream.writeByte('"')
+ toGrow := encoding.EncodedLen(len(src))
+ stream.ensure(toGrow)
+ encoding.Encode(stream.buf[stream.n:], src)
+ stream.n += toGrow
+ stream.writeByte('"')
+}
+
+func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*[]byte)(ptr))) == 0
+}
+
+type stringModeNumberDecoder struct {
+ elemDecoder ValDecoder
+}
+
+func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.elemDecoder.Decode(ptr, iter)
+ if iter.Error != nil {
+ return
+ }
+ c = iter.readByte()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
+
+type stringModeStringDecoder struct {
+ elemDecoder ValDecoder
+ cfg *frozenConfig
+}
+
+func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.elemDecoder.Decode(ptr, iter)
+ str := *((*string)(ptr))
+ tempIter := decoder.cfg.BorrowIterator([]byte(str))
+ defer decoder.cfg.ReturnIterator(tempIter)
+ *((*string)(ptr)) = tempIter.ReadString()
+}
+
+type stringModeNumberEncoder struct {
+ elemEncoder ValEncoder
+}
+
+func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.elemEncoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *stringModeNumberEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
+
+type stringModeStringEncoder struct {
+ elemEncoder ValEncoder
+ cfg *frozenConfig
+}
+
+func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ tempStream := encoder.cfg.BorrowStream(nil)
+ defer encoder.cfg.ReturnStream(tempStream)
+ encoder.elemEncoder.Encode(ptr, tempStream)
+ stream.WriteString(string(tempStream.Buffer()))
+}
+
+func (encoder *stringModeStringEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
+
+type marshalerEncoder struct {
+ templateInterface emptyInterface
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ templateInterface := encoder.templateInterface
+ templateInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
+ marshaler, ok := (*realInterface).(json.Marshaler)
+ if !ok {
+ stream.WriteVal(nil)
+ return
+ }
+
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+func (encoder *marshalerEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type textMarshalerEncoder struct {
+ templateInterface emptyInterface
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ templateInterface := encoder.templateInterface
+ templateInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
+ marshaler := (*realInterface).(encoding.TextMarshaler)
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.WriteString(string(bytes))
+ }
+}
+
+func (encoder *textMarshalerEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type unmarshalerDecoder struct {
+ templateInterface emptyInterface
+}
+
+func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ templateInterface := decoder.templateInterface
+ templateInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
+ unmarshaler := (*realInterface).(json.Unmarshaler)
+ iter.nextToken()
+ iter.unreadByte() // skip spaces
+ bytes := iter.SkipAndReturnBytes()
+ err := unmarshaler.UnmarshalJSON(bytes)
+ if err != nil {
+ iter.ReportError("unmarshalerDecoder", err.Error())
+ }
+}
+
+type textUnmarshalerDecoder struct {
+ templateInterface emptyInterface
+}
+
+func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ templateInterface := decoder.templateInterface
+ templateInterface.word = ptr
+ realInterface := (*interface{})(unsafe.Pointer(&templateInterface))
+ unmarshaler := (*realInterface).(encoding.TextUnmarshaler)
+ str := iter.ReadString()
+ err := unmarshaler.UnmarshalText([]byte(str))
+ if err != nil {
+ iter.ReportError("textUnmarshalerDecoder", err.Error())
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_object.go b/vendor/github.com/json-iterator/go/feature_reflect_object.go
new file mode 100644
index 000000000..59b1235c0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_object.go
@@ -0,0 +1,196 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "strings"
+ "unsafe"
+)
+
+func encoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ type bindingTo struct {
+ binding *Binding
+ toName string
+ ignored bool
+ }
+ orderedBindings := []*bindingTo{}
+ structDescriptor, err := describeStruct(cfg, typ)
+ if err != nil {
+ return nil, err
+ }
+ for _, binding := range structDescriptor.Fields {
+ for _, toName := range binding.ToNames {
+ new := &bindingTo{
+ binding: binding,
+ toName: toName,
+ }
+ for _, old := range orderedBindings {
+ if old.toName != toName {
+ continue
+ }
+ old.ignored, new.ignored = resolveConflictBinding(cfg, old.binding, new.binding)
+ }
+ orderedBindings = append(orderedBindings, new)
+ }
+ }
+ if len(orderedBindings) == 0 {
+ return &emptyStructEncoder{}, nil
+ }
+ finalOrderedFields := []structFieldTo{}
+ for _, bindingTo := range orderedBindings {
+ if !bindingTo.ignored {
+ finalOrderedFields = append(finalOrderedFields, structFieldTo{
+ encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
+ toName: bindingTo.toName,
+ })
+ }
+ }
+ return &structEncoder{structDescriptor.onePtrEmbedded, structDescriptor.onePtrOptimization, finalOrderedFields}, nil
+}
+
+func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
+ newTagged := new.Field.Tag.Get(cfg.getTagKey()) != ""
+ oldTagged := old.Field.Tag.Get(cfg.getTagKey()) != ""
+ if newTagged {
+ if oldTagged {
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ } else {
+ return true, false
+ }
+ } else {
+ if oldTagged {
+ return true, false
+ }
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ }
+}
+
+func decoderOfStruct(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ bindings := map[string]*Binding{}
+ structDescriptor, err := describeStruct(cfg, typ)
+ if err != nil {
+ return nil, err
+ }
+ for _, binding := range structDescriptor.Fields {
+ for _, fromName := range binding.FromNames {
+ old := bindings[fromName]
+ if old == nil {
+ bindings[fromName] = binding
+ continue
+ }
+ ignoreOld, ignoreNew := resolveConflictBinding(cfg, old, binding)
+ if ignoreOld {
+ delete(bindings, fromName)
+ }
+ if !ignoreNew {
+ bindings[fromName] = binding
+ }
+ }
+ }
+ fields := map[string]*structFieldDecoder{}
+ for k, binding := range bindings {
+ fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
+ }
+ return createStructDecoder(typ, fields)
+}
+
+type structFieldEncoder struct {
+ field *reflect.StructField
+ fieldEncoder ValEncoder
+ omitempty bool
+}
+
+func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset)
+ encoder.fieldEncoder.Encode(fieldPtr, stream)
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%s: %s", encoder.field.Name, stream.Error.Error())
+ }
+}
+
+func (encoder *structFieldEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset)
+ return encoder.fieldEncoder.IsEmpty(fieldPtr)
+}
+
+type structEncoder struct {
+ onePtrEmbedded bool
+ onePtrOptimization bool
+ fields []structFieldTo
+}
+
+type structFieldTo struct {
+ encoder *structFieldEncoder
+ toName string
+}
+
+func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ isNotFirst := false
+ for _, field := range encoder.fields {
+ if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
+ continue
+ }
+ if isNotFirst {
+ stream.WriteMore()
+ }
+ stream.WriteObjectField(field.toName)
+ field.encoder.Encode(ptr, stream)
+ isNotFirst = true
+ }
+ stream.WriteObjectEnd()
+}
+
+func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ e := (*emptyInterface)(unsafe.Pointer(&val))
+ if encoder.onePtrOptimization {
+ if e.word == nil && encoder.onePtrEmbedded {
+ stream.WriteObjectStart()
+ stream.WriteObjectEnd()
+ return
+ }
+ ptr := uintptr(e.word)
+ e.word = unsafe.Pointer(&ptr)
+ }
+ if reflect.TypeOf(val).Kind() == reflect.Ptr {
+ encoder.Encode(unsafe.Pointer(&e.word), stream)
+ } else {
+ encoder.Encode(e.word, stream)
+ }
+}
+
+func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type emptyStructEncoder struct {
+}
+
+func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyObject()
+}
+
+func (encoder *emptyStructEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_slice.go b/vendor/github.com/json-iterator/go/feature_reflect_slice.go
new file mode 100644
index 000000000..51a8daecf
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_slice.go
@@ -0,0 +1,147 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+func decoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValDecoder, error) {
+ decoder, err := decoderOfType(cfg, typ.Elem())
+ if err != nil {
+ return nil, err
+ }
+ return &sliceDecoder{typ, typ.Elem(), decoder}, nil
+}
+
+func encoderOfSlice(cfg *frozenConfig, typ reflect.Type) (ValEncoder, error) {
+ encoder, err := encoderOfType(cfg, typ.Elem())
+ if err != nil {
+ return nil, err
+ }
+ if typ.Elem().Kind() == reflect.Map {
+ encoder = &OptionalEncoder{encoder}
+ }
+ return &sliceEncoder{typ, typ.Elem(), encoder}, nil
+}
+
+type sliceEncoder struct {
+ sliceType reflect.Type
+ elemType reflect.Type
+ elemEncoder ValEncoder
+}
+
+func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ slice := (*sliceHeader)(ptr)
+ if slice.Data == nil {
+ stream.WriteNil()
+ return
+ }
+ if slice.Len == 0 {
+ stream.WriteEmptyArray()
+ return
+ }
+ stream.WriteArrayStart()
+ elemPtr := unsafe.Pointer(slice.Data)
+ encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
+ for i := 1; i < slice.Len; i++ {
+ stream.WriteMore()
+ elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size())
+ encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
+ }
+}
+
+func (encoder *sliceEncoder) EncodeInterface(val interface{}, stream *Stream) {
+ WriteToStream(val, stream, encoder)
+}
+
+func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ slice := (*sliceHeader)(ptr)
+ return slice.Len == 0
+}
+
+type sliceDecoder struct {
+ sliceType reflect.Type
+ elemType reflect.Type
+ elemDecoder ValDecoder
+}
+
+// sliceHeader is a safe version of SliceHeader used within this package.
+type sliceHeader struct {
+ Data unsafe.Pointer
+ Len int
+ Cap int
+}
+
+func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
+ }
+}
+
+func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ slice := (*sliceHeader)(ptr)
+ if iter.ReadNil() {
+ slice.Len = 0
+ slice.Cap = 0
+ slice.Data = nil
+ return
+ }
+ reuseSlice(slice, decoder.sliceType, 4)
+ slice.Len = 0
+ offset := uintptr(0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ growOne(slice, decoder.sliceType, decoder.elemType)
+ decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter)
+ offset += decoder.elemType.Size()
+ return true
+ })
+}
+
+// grow grows the slice s so that it can hold extra more values, allocating
+// more capacity if needed. It also returns the old and new slice lengths.
+func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) {
+ newLen := slice.Len + 1
+ if newLen <= slice.Cap {
+ slice.Len = newLen
+ return
+ }
+ newCap := slice.Cap
+ if newCap == 0 {
+ newCap = 1
+ } else {
+ for newCap < newLen {
+ if slice.Len < 1024 {
+ newCap += newCap
+ } else {
+ newCap += newCap / 4
+ }
+ }
+ }
+ newVal := reflect.MakeSlice(sliceType, newLen, newCap)
+ dst := unsafe.Pointer(newVal.Pointer())
+ // copy old array into new array
+ originalBytesCount := slice.Len * int(elementType.Size())
+ srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount})
+ dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount})
+ copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader))
+ slice.Data = dst
+ slice.Len = newLen
+ slice.Cap = newCap
+}
+
+func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) {
+ if expectedCap <= slice.Cap {
+ return
+ }
+ newVal := reflect.MakeSlice(sliceType, 0, expectedCap)
+ dst := unsafe.Pointer(newVal.Pointer())
+ slice.Data = dst
+ slice.Cap = expectedCap
+}
diff --git a/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go
new file mode 100644
index 000000000..e6ced77c2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go
@@ -0,0 +1,934 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "strings"
+ "unsafe"
+)
+
+func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) (ValDecoder, error) {
+ knownHash := map[int32]struct{}{
+ 0: {},
+ }
+ switch len(fields) {
+ case 0:
+ return &skipObjectDecoder{typ}, nil
+ case 1:
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}, nil
+ }
+ case 2:
+ var fieldHash1 int32
+ var fieldHash2 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldHash1 == 0 {
+ fieldHash1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else {
+ fieldHash2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ }
+ }
+ return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}, nil
+ case 3:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ }
+ }
+ return &threeFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3}, nil
+ case 4:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ }
+ }
+ return &fourFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4}, nil
+ case 5:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ }
+ }
+ return &fiveFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5}, nil
+ case 6:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldName6 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ }
+ }
+ return &sixFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6}, nil
+ case 7:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldName6 int32
+ var fieldName7 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ }
+ }
+ return &sevenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7}, nil
+ case 8:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldName6 int32
+ var fieldName7 int32
+ var fieldName8 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ }
+ }
+ return &eightFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7, fieldName8, fieldDecoder8}, nil
+ case 9:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldName6 int32
+ var fieldName7 int32
+ var fieldName8 int32
+ var fieldName9 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ }
+ }
+ return &nineFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9}, nil
+ case 10:
+ var fieldName1 int32
+ var fieldName2 int32
+ var fieldName3 int32
+ var fieldName4 int32
+ var fieldName5 int32
+ var fieldName6 int32
+ var fieldName7 int32
+ var fieldName8 int32
+ var fieldName9 int32
+ var fieldName10 int32
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ var fieldDecoder10 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName)
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields}, nil
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else if fieldName9 == 0 {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ } else {
+ fieldName10 = fieldHash
+ fieldDecoder10 = fieldDecoder
+ }
+ }
+ return &tenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1, fieldName2, fieldDecoder2, fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4, fieldName5, fieldDecoder5, fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7, fieldName8, fieldDecoder8, fieldName9, fieldDecoder9,
+ fieldName10, fieldDecoder10}, nil
+ }
+ return &generalStructDecoder{typ, fields}, nil
+}
+
+type generalStructDecoder struct {
+ typ reflect.Type
+ fields map[string]*structFieldDecoder
+}
+
+func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ var fieldBytes []byte
+ var field string
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes = iter.readObjectFieldAsBytes()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ } else {
+ field = iter.ReadString()
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ }
+ fieldDecoder := decoder.fields[strings.ToLower(field)]
+ if fieldDecoder == nil {
+ iter.Skip()
+ } else {
+ fieldDecoder.Decode(ptr, iter)
+ }
+ for iter.nextToken() == ',' {
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes := iter.readObjectFieldAsBytes()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ } else {
+ field = iter.ReadString()
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ }
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ if fieldDecoder == nil {
+ iter.Skip()
+ } else {
+ fieldDecoder.Decode(ptr, iter)
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type skipObjectDecoder struct {
+ typ reflect.Type
+}
+
+func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valueType := iter.WhatIsNext()
+ if valueType != ObjectValue && valueType != NilValue {
+ iter.ReportError("skipObjectDecoder", "expect object or null")
+ return
+ }
+ iter.Skip()
+}
+
+type oneFieldStructDecoder struct {
+ typ reflect.Type
+ fieldHash int32
+ fieldDecoder *structFieldDecoder
+}
+
+func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ if iter.readFieldHash() == decoder.fieldHash {
+ decoder.fieldDecoder.Decode(ptr, iter)
+ } else {
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type twoFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+}
+
+func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type threeFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+}
+
+func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fourFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+}
+
+func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type fiveFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+}
+
+func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sixFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int32
+ fieldDecoder6 *structFieldDecoder
+}
+
+func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type sevenFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int32
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int32
+ fieldDecoder7 *structFieldDecoder
+}
+
+func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type eightFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int32
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int32
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int32
+ fieldDecoder8 *structFieldDecoder
+}
+
+func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type nineFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int32
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int32
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int32
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int32
+ fieldDecoder9 *structFieldDecoder
+}
+
+func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type tenFieldsStructDecoder struct {
+ typ reflect.Type
+ fieldHash1 int32
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int32
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int32
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int32
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int32
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int32
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int32
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int32
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int32
+ fieldDecoder9 *structFieldDecoder
+ fieldHash10 int32
+ fieldDecoder10 *structFieldDecoder
+}
+
+func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ case decoder.fieldHash10:
+ decoder.fieldDecoder10.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.typ, iter.Error.Error())
+ }
+}
+
+type structFieldDecoder struct {
+ field *reflect.StructField
+ fieldDecoder ValDecoder
+}
+
+func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ fieldPtr := unsafe.Pointer(uintptr(ptr) + decoder.field.Offset)
+ decoder.fieldDecoder.Decode(fieldPtr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error())
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_stream.go b/vendor/github.com/json-iterator/go/feature_stream.go
new file mode 100644
index 000000000..97355eb5b
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_stream.go
@@ -0,0 +1,308 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// stream is a io.Writer like object, with JSON specific write functions.
+// Error is not returned as return value, but stored as Error member on this stream instance.
+type Stream struct {
+ cfg *frozenConfig
+ out io.Writer
+ buf []byte
+ n int
+ Error error
+ indention int
+ Attachment interface{} // open for customized encoder
+}
+
+// NewStream create new stream instance.
+// cfg can be jsoniter.ConfigDefault.
+// out can be nil if write to internal buffer.
+// bufSize is the initial size for the internal buffer in bytes.
+func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
+ return &Stream{
+ cfg: cfg.(*frozenConfig),
+ out: out,
+ buf: make([]byte, bufSize),
+ n: 0,
+ Error: nil,
+ indention: 0,
+ }
+}
+
+// Pool returns a pool can provide more stream with same configuration
+func (stream *Stream) Pool() StreamPool {
+ return stream.cfg
+}
+
+// Reset reuse this stream instance by assign a new writer
+func (stream *Stream) Reset(out io.Writer) {
+ stream.out = out
+ stream.n = 0
+}
+
+// Available returns how many bytes are unused in the buffer.
+func (stream *Stream) Available() int {
+ return len(stream.buf) - stream.n
+}
+
+// Buffered returns the number of bytes that have been written into the current buffer.
+func (stream *Stream) Buffered() int {
+ return stream.n
+}
+
+// Buffer if writer is nil, use this method to take the result
+func (stream *Stream) Buffer() []byte {
+ return stream.buf[:stream.n]
+}
+
+// Write writes the contents of p into the buffer.
+// It returns the number of bytes written.
+// If nn < len(p), it also returns an error explaining
+// why the write is short.
+func (stream *Stream) Write(p []byte) (nn int, err error) {
+ for len(p) > stream.Available() && stream.Error == nil {
+ if stream.out == nil {
+ stream.growAtLeast(len(p))
+ } else {
+ var n int
+ if stream.Buffered() == 0 {
+ // Large write, empty buffer.
+ // Write directly from p to avoid copy.
+ n, stream.Error = stream.out.Write(p)
+ } else {
+ n = copy(stream.buf[stream.n:], p)
+ stream.n += n
+ stream.Flush()
+ }
+ nn += n
+ p = p[n:]
+ }
+ }
+ if stream.Error != nil {
+ return nn, stream.Error
+ }
+ n := copy(stream.buf[stream.n:], p)
+ stream.n += n
+ nn += n
+ return nn, nil
+}
+
+// WriteByte writes a single byte.
+func (stream *Stream) writeByte(c byte) {
+ if stream.Error != nil {
+ return
+ }
+ if stream.Available() < 1 {
+ stream.growAtLeast(1)
+ }
+ stream.buf[stream.n] = c
+ stream.n++
+}
+
+func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
+ if stream.Error != nil {
+ return
+ }
+ if stream.Available() < 2 {
+ stream.growAtLeast(2)
+ }
+ stream.buf[stream.n] = c1
+ stream.buf[stream.n+1] = c2
+ stream.n += 2
+}
+
+func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
+ if stream.Error != nil {
+ return
+ }
+ if stream.Available() < 3 {
+ stream.growAtLeast(3)
+ }
+ stream.buf[stream.n] = c1
+ stream.buf[stream.n+1] = c2
+ stream.buf[stream.n+2] = c3
+ stream.n += 3
+}
+
+func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
+ if stream.Error != nil {
+ return
+ }
+ if stream.Available() < 4 {
+ stream.growAtLeast(4)
+ }
+ stream.buf[stream.n] = c1
+ stream.buf[stream.n+1] = c2
+ stream.buf[stream.n+2] = c3
+ stream.buf[stream.n+3] = c4
+ stream.n += 4
+}
+
+func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
+ if stream.Error != nil {
+ return
+ }
+ if stream.Available() < 5 {
+ stream.growAtLeast(5)
+ }
+ stream.buf[stream.n] = c1
+ stream.buf[stream.n+1] = c2
+ stream.buf[stream.n+2] = c3
+ stream.buf[stream.n+3] = c4
+ stream.buf[stream.n+4] = c5
+ stream.n += 5
+}
+
+// Flush writes any buffered data to the underlying io.Writer.
+func (stream *Stream) Flush() error {
+ if stream.out == nil {
+ return nil
+ }
+ if stream.Error != nil {
+ return stream.Error
+ }
+ if stream.n == 0 {
+ return nil
+ }
+ n, err := stream.out.Write(stream.buf[0:stream.n])
+ if n < stream.n && err == nil {
+ err = io.ErrShortWrite
+ }
+ if err != nil {
+ if n > 0 && n < stream.n {
+ copy(stream.buf[0:stream.n-n], stream.buf[n:stream.n])
+ }
+ stream.n -= n
+ stream.Error = err
+ return err
+ }
+ stream.n = 0
+ return nil
+}
+
+func (stream *Stream) ensure(minimal int) {
+ available := stream.Available()
+ if available < minimal {
+ stream.growAtLeast(minimal)
+ }
+}
+
+func (stream *Stream) growAtLeast(minimal int) {
+ if stream.out != nil {
+ stream.Flush()
+ if stream.Available() >= minimal {
+ return
+ }
+ }
+ toGrow := len(stream.buf)
+ if toGrow < minimal {
+ toGrow = minimal
+ }
+ newBuf := make([]byte, len(stream.buf)+toGrow)
+ copy(newBuf, stream.Buffer())
+ stream.buf = newBuf
+}
+
+// WriteRaw write string out without quotes, just like []byte
+func (stream *Stream) WriteRaw(s string) {
+ stream.ensure(len(s))
+ if stream.Error != nil {
+ return
+ }
+ n := copy(stream.buf[stream.n:], s)
+ stream.n += n
+}
+
+// WriteNil write null to stream
+func (stream *Stream) WriteNil() {
+ stream.writeFourBytes('n', 'u', 'l', 'l')
+}
+
+// WriteTrue write true to stream
+func (stream *Stream) WriteTrue() {
+ stream.writeFourBytes('t', 'r', 'u', 'e')
+}
+
+// WriteFalse write false to stream
+func (stream *Stream) WriteFalse() {
+ stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
+}
+
+// WriteBool write true or false into stream
+func (stream *Stream) WriteBool(val bool) {
+ if val {
+ stream.WriteTrue()
+ } else {
+ stream.WriteFalse()
+ }
+}
+
+// WriteObjectStart write { with possible indention
+func (stream *Stream) WriteObjectStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('{')
+ stream.writeIndention(0)
+}
+
+// WriteObjectField write "field": with possible indention
+func (stream *Stream) WriteObjectField(field string) {
+ stream.WriteString(field)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(':', ' ')
+ } else {
+ stream.writeByte(':')
+ }
+}
+
+// WriteObjectEnd write } with possible indention
+func (stream *Stream) WriteObjectEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte('}')
+}
+
+// WriteEmptyObject write {}
+func (stream *Stream) WriteEmptyObject() {
+ stream.writeByte('{')
+ stream.writeByte('}')
+}
+
+// WriteMore write , with possible indention
+func (stream *Stream) WriteMore() {
+ stream.writeByte(',')
+ stream.writeIndention(0)
+}
+
+// WriteArrayStart write [ with possible indention
+func (stream *Stream) WriteArrayStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('[')
+ stream.writeIndention(0)
+}
+
+// WriteEmptyArray write []
+func (stream *Stream) WriteEmptyArray() {
+ stream.writeTwoBytes('[', ']')
+}
+
+// WriteArrayEnd write ] with possible indention
+func (stream *Stream) WriteArrayEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte(']')
+}
+
+func (stream *Stream) writeIndention(delta int) {
+ if stream.indention == 0 {
+ return
+ }
+ stream.writeByte('\n')
+ toWrite := stream.indention - delta
+ stream.ensure(toWrite)
+ for i := 0; i < toWrite && stream.n < len(stream.buf); i++ {
+ stream.buf[stream.n] = ' '
+ stream.n++
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_stream_float.go b/vendor/github.com/json-iterator/go/feature_stream_float.go
new file mode 100644
index 000000000..9a404e11d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_stream_float.go
@@ -0,0 +1,96 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var pow10 []uint64
+
+func init() {
+ pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
+}
+
+// WriteFloat32 write float32 to stream
+func (stream *Stream) WriteFloat32(val float32) {
+ abs := math.Abs(float64(val))
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 32))
+}
+
+// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat32Lossy(val float32) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat32(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(float64(val)*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ stream.ensure(10)
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[stream.n-1] == '0' {
+ stream.n--
+ }
+}
+
+// WriteFloat64 write float64 to stream
+func (stream *Stream) WriteFloat64(val float64) {
+ abs := math.Abs(val)
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if abs < 1e-6 || abs >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 64))
+}
+
+// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat64Lossy(val float64) {
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat64(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(val*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ stream.ensure(10)
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[stream.n-1] == '0' {
+ stream.n--
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/feature_stream_int.go b/vendor/github.com/json-iterator/go/feature_stream_int.go
new file mode 100644
index 000000000..7cfd522c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_stream_int.go
@@ -0,0 +1,320 @@
+package jsoniter
+
+var digits []uint32
+
+func init() {
+ digits = make([]uint32, 1000)
+ for i := uint32(0); i < 1000; i++ {
+ digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
+ if i < 10 {
+ digits[i] += 2 << 24
+ } else if i < 100 {
+ digits[i] += 1 << 24
+ }
+ }
+}
+
+func writeFirstBuf(buf []byte, v uint32, n int) int {
+ start := v >> 24
+ if start == 0 {
+ buf[n] = byte(v >> 16)
+ n++
+ buf[n] = byte(v >> 8)
+ n++
+ } else if start == 1 {
+ buf[n] = byte(v >> 8)
+ n++
+ }
+ buf[n] = byte(v)
+ n++
+ return n
+}
+
+func writeBuf(buf []byte, v uint32, n int) {
+ buf[n] = byte(v >> 16)
+ buf[n+1] = byte(v >> 8)
+ buf[n+2] = byte(v)
+}
+
+// WriteUint8 write uint8 to stream
+func (stream *Stream) WriteUint8(val uint8) {
+ stream.ensure(3)
+ stream.n = writeFirstBuf(stream.buf, digits[val], stream.n)
+}
+
+// WriteInt8 write int8 to stream
+func (stream *Stream) WriteInt8(nval int8) {
+ stream.ensure(4)
+ n := stream.n
+ var val uint8
+ if nval < 0 {
+ val = uint8(-nval)
+ stream.buf[n] = '-'
+ n++
+ } else {
+ val = uint8(nval)
+ }
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+}
+
+// WriteUint16 write uint16 to stream
+func (stream *Stream) WriteUint16(val uint16) {
+ stream.ensure(5)
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], stream.n)
+ return
+ }
+ r1 := val - q1*1000
+ n := writeFirstBuf(stream.buf, digits[q1], stream.n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+}
+
+// WriteInt16 write int16 to stream
+func (stream *Stream) WriteInt16(nval int16) {
+ stream.ensure(6)
+ n := stream.n
+ var val uint16
+ if nval < 0 {
+ val = uint16(-nval)
+ stream.buf[n] = '-'
+ n++
+ } else {
+ val = uint16(nval)
+ }
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+ return
+ }
+ r1 := val - q1*1000
+ n = writeFirstBuf(stream.buf, digits[q1], n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+}
+
+// WriteUint32 write uint32 to stream
+func (stream *Stream) WriteUint32(val uint32) {
+ stream.ensure(10)
+ n := stream.n
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ n := writeFirstBuf(stream.buf, digits[q1], n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q2], n)
+ } else {
+ r3 := q2 - q3*1000
+ stream.buf[n] = byte(q3 + '0')
+ n++
+ writeBuf(stream.buf, digits[r3], n)
+ n += 3
+ }
+ writeBuf(stream.buf, digits[r2], n)
+ writeBuf(stream.buf, digits[r1], n+3)
+ stream.n = n + 6
+}
+
+// WriteInt32 write int32 to stream
+func (stream *Stream) WriteInt32(nval int32) {
+ stream.ensure(11)
+ n := stream.n
+ var val uint32
+ if nval < 0 {
+ val = uint32(-nval)
+ stream.buf[n] = '-'
+ n++
+ } else {
+ val = uint32(nval)
+ }
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ n := writeFirstBuf(stream.buf, digits[q1], n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q2], n)
+ } else {
+ r3 := q2 - q3*1000
+ stream.buf[n] = byte(q3 + '0')
+ n++
+ writeBuf(stream.buf, digits[r3], n)
+ n += 3
+ }
+ writeBuf(stream.buf, digits[r2], n)
+ writeBuf(stream.buf, digits[r1], n+3)
+ stream.n = n + 6
+}
+
+// WriteUint64 write uint64 to stream
+func (stream *Stream) WriteUint64(val uint64) {
+ stream.ensure(20)
+ n := stream.n
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ n := writeFirstBuf(stream.buf, digits[q1], n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q2], n)
+ writeBuf(stream.buf, digits[r2], n)
+ writeBuf(stream.buf, digits[r1], n+3)
+ stream.n = n + 6
+ return
+ }
+ r3 := q2 - q3*1000
+ q4 := q3 / 1000
+ if q4 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q3], n)
+ writeBuf(stream.buf, digits[r3], n)
+ writeBuf(stream.buf, digits[r2], n+3)
+ writeBuf(stream.buf, digits[r1], n+6)
+ stream.n = n + 9
+ return
+ }
+ r4 := q3 - q4*1000
+ q5 := q4 / 1000
+ if q5 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q4], n)
+ writeBuf(stream.buf, digits[r4], n)
+ writeBuf(stream.buf, digits[r3], n+3)
+ writeBuf(stream.buf, digits[r2], n+6)
+ writeBuf(stream.buf, digits[r1], n+9)
+ stream.n = n + 12
+ return
+ }
+ r5 := q4 - q5*1000
+ q6 := q5 / 1000
+ if q6 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q5], n)
+ } else {
+ n = writeFirstBuf(stream.buf, digits[q6], n)
+ r6 := q5 - q6*1000
+ writeBuf(stream.buf, digits[r6], n)
+ n += 3
+ }
+ writeBuf(stream.buf, digits[r5], n)
+ writeBuf(stream.buf, digits[r4], n+3)
+ writeBuf(stream.buf, digits[r3], n+6)
+ writeBuf(stream.buf, digits[r2], n+9)
+ writeBuf(stream.buf, digits[r1], n+12)
+ stream.n = n + 15
+}
+
+// WriteInt64 write int64 to stream
+func (stream *Stream) WriteInt64(nval int64) {
+ stream.ensure(20)
+ n := stream.n
+ var val uint64
+ if nval < 0 {
+ val = uint64(-nval)
+ stream.buf[n] = '-'
+ n++
+ } else {
+ val = uint64(nval)
+ }
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.n = writeFirstBuf(stream.buf, digits[val], n)
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ n := writeFirstBuf(stream.buf, digits[q1], n)
+ writeBuf(stream.buf, digits[r1], n)
+ stream.n = n + 3
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q2], n)
+ writeBuf(stream.buf, digits[r2], n)
+ writeBuf(stream.buf, digits[r1], n+3)
+ stream.n = n + 6
+ return
+ }
+ r3 := q2 - q3*1000
+ q4 := q3 / 1000
+ if q4 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q3], n)
+ writeBuf(stream.buf, digits[r3], n)
+ writeBuf(stream.buf, digits[r2], n+3)
+ writeBuf(stream.buf, digits[r1], n+6)
+ stream.n = n + 9
+ return
+ }
+ r4 := q3 - q4*1000
+ q5 := q4 / 1000
+ if q5 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q4], n)
+ writeBuf(stream.buf, digits[r4], n)
+ writeBuf(stream.buf, digits[r3], n+3)
+ writeBuf(stream.buf, digits[r2], n+6)
+ writeBuf(stream.buf, digits[r1], n+9)
+ stream.n = n + 12
+ return
+ }
+ r5 := q4 - q5*1000
+ q6 := q5 / 1000
+ if q6 == 0 {
+ n = writeFirstBuf(stream.buf, digits[q5], n)
+ } else {
+ stream.buf[n] = byte(q6 + '0')
+ n++
+ r6 := q5 - q6*1000
+ writeBuf(stream.buf, digits[r6], n)
+ n += 3
+ }
+ writeBuf(stream.buf, digits[r5], n)
+ writeBuf(stream.buf, digits[r4], n+3)
+ writeBuf(stream.buf, digits[r3], n+6)
+ writeBuf(stream.buf, digits[r2], n+9)
+ writeBuf(stream.buf, digits[r1], n+12)
+ stream.n = n + 15
+}
+
+// WriteInt write int to stream
+func (stream *Stream) WriteInt(val int) {
+ stream.WriteInt64(int64(val))
+}
+
+// WriteUint write uint to stream
+func (stream *Stream) WriteUint(val uint) {
+ stream.WriteUint64(uint64(val))
+}
diff --git a/vendor/github.com/json-iterator/go/feature_stream_string.go b/vendor/github.com/json-iterator/go/feature_stream_string.go
new file mode 100644
index 000000000..334282f05
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/feature_stream_string.go
@@ -0,0 +1,396 @@
+package jsoniter
+
+import (
+ "unicode/utf8"
+)
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML <script> tags, without any additional escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), the backslash character ("\"), HTML opening and closing
+// tags ("<" and ">"), and the ampersand ("&").
+var htmlSafeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': false,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': false,
+ '=': true,
+ '>': false,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
+
+// safeSet holds the value true if the ASCII character with the given array
+// position can be represented inside a JSON string without any further
+// escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), and the backslash character ("\").
+var safeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': true,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': true,
+ '=': true,
+ '>': true,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
+
+var hex = "0123456789abcdef"
+
+// WriteStringWithHTMLEscaped write string to stream with html special characters escaped
+func (stream *Stream) WriteStringWithHTMLEscaped(s string) {
+ stream.ensure(32)
+ valLen := len(s)
+ toWriteLen := valLen
+ bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
+ if stream.n+toWriteLen > bufLengthMinusTwo {
+ toWriteLen = bufLengthMinusTwo - stream.n
+ }
+ n := stream.n
+ stream.buf[n] = '"'
+ n++
+ // write string, the fast path, without utf8 and escape support
+ i := 0
+ for ; i < toWriteLen; i++ {
+ c := s[i]
+ if c < utf8.RuneSelf && htmlSafeSet[c] {
+ stream.buf[n] = c
+ n++
+ } else {
+ break
+ }
+ }
+ if i == valLen {
+ stream.buf[n] = '"'
+ n++
+ stream.n = n
+ return
+ }
+ stream.n = n
+ writeStringSlowPathWithHTMLEscaped(stream, i, s, valLen)
+}
+
+func writeStringSlowPathWithHTMLEscaped(stream *Stream, i int, s string, valLen int) {
+ start := i
+ // for the remaining parts, we process them char by char
+ for i < valLen {
+ if b := s[i]; b < utf8.RuneSelf {
+ if htmlSafeSet[b] {
+ i++
+ continue
+ }
+ if start < i {
+ stream.WriteRaw(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ stream.writeTwoBytes('\\', b)
+ case '\n':
+ stream.writeTwoBytes('\\', 'n')
+ case '\r':
+ stream.writeTwoBytes('\\', 'r')
+ case '\t':
+ stream.writeTwoBytes('\\', 't')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ stream.WriteRaw(`\u00`)
+ stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ stream.WriteRaw(s[start:i])
+ }
+ stream.WriteRaw(`\ufffd`)
+ i++
+ start = i
+ continue
+ }
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ stream.WriteRaw(s[start:i])
+ }
+ stream.WriteRaw(`\u202`)
+ stream.writeByte(hex[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ stream.WriteRaw(s[start:])
+ }
+ stream.writeByte('"')
+}
+
+// WriteString write string to stream without html escape
+func (stream *Stream) WriteString(s string) {
+ stream.ensure(32)
+ valLen := len(s)
+ toWriteLen := valLen
+ bufLengthMinusTwo := len(stream.buf) - 2 // make room for the quotes
+ if stream.n+toWriteLen > bufLengthMinusTwo {
+ toWriteLen = bufLengthMinusTwo - stream.n
+ }
+ n := stream.n
+ stream.buf[n] = '"'
+ n++
+ // write string, the fast path, without utf8 and escape support
+ i := 0
+ for ; i < toWriteLen; i++ {
+ c := s[i]
+ if c > 31 && c != '"' && c != '\\' {
+ stream.buf[n] = c
+ n++
+ } else {
+ break
+ }
+ }
+ if i == valLen {
+ stream.buf[n] = '"'
+ n++
+ stream.n = n
+ return
+ }
+ stream.n = n
+ writeStringSlowPath(stream, i, s, valLen)
+}
+
+func writeStringSlowPath(stream *Stream, i int, s string, valLen int) {
+ start := i
+ // for the remaining parts, we process them char by char
+ for i < valLen {
+ if b := s[i]; b < utf8.RuneSelf {
+ if safeSet[b] {
+ i++
+ continue
+ }
+ if start < i {
+ stream.WriteRaw(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ stream.writeTwoBytes('\\', b)
+ case '\n':
+ stream.writeTwoBytes('\\', 'n')
+ case '\r':
+ stream.writeTwoBytes('\\', 'r')
+ case '\t':
+ stream.writeTwoBytes('\\', 't')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ stream.WriteRaw(`\u00`)
+ stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ i++
+ continue
+ }
+ if start < len(s) {
+ stream.WriteRaw(s[start:])
+ }
+ stream.writeByte('"')
+}
diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
new file mode 100644
index 000000000..3095662b0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
@@ -0,0 +1,7 @@
+| json type \ dest type | bool | int | uint | float |string|
+| --- | --- | --- | --- |--|--|
+| number | positive => true <br/> negative => true <br/> zero => false| 23.2 => 23 <br/> -32.1 => -32| 12.1 => 12 <br/> -12.1 => 0|as normal|same as origin|
+| string | empty string => false <br/> string "0" => false <br/> other strings => true | "123.32" => 123 <br/> "-123.4" => -123 <br/> "123.23xxxw" => 123 <br/> "abcde12" => 0 <br/> "-32.1" => -32| 13.2 => 13 <br/> -1.1 => 0 |12.1 => 12.1 <br/> -12.3 => -12.3<br/> 12.4xxa => 12.4 <br/> +1.1e2 =>110 |same as origin|
+| bool | true => true <br/> false => false| true => 1 <br/> false => 0 | true => 1 <br/> false => 0 |true => 1 <br/>false => 0|true => "true" <br/> false => "false"|
+| object | true | 0 | 0 |0|originnal json|
+| array | empty array => false <br/> nonempty array => true| [] => 0 <br/> [1,2] => 1 | [] => 0 <br/> [1,2] => 1 |[] => 0<br/>[1,2] => 1|original json| \ No newline at end of file
diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go
new file mode 100644
index 000000000..c2934f916
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter.go
@@ -0,0 +1,18 @@
+// Package jsoniter implements encoding and decoding of JSON as defined in
+// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
+// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
+// and variable type declarations (if any).
+// jsoniter interfaces gives 100% compatibility with code using standard lib.
+//
+// "JSON and Go"
+// (https://golang.org/doc/articles/json_and_go.html)
+// gives a description of how Marshal/Unmarshal operate
+// between arbitrary or predefined json objects and bytes,
+// and it applies to jsoniter.Marshal/Unmarshal as well.
+//
+// Besides, jsoniter.Iterator provides a different set of interfaces
+// iterating given bytes/string/reader
+// and yielding parsed elements one by one.
+// This set of interfaces reads input as required and gives
+// better performance.
+package jsoniter
diff --git a/vendor/github.com/json-iterator/go/jsoniter_1dot8_only_test.go b/vendor/github.com/json-iterator/go/jsoniter_1dot8_only_test.go
new file mode 100644
index 000000000..1ac618653
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_1dot8_only_test.go
@@ -0,0 +1,45 @@
+// +build go1.8
+
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+ "unicode/utf8"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_new_encoder(t *testing.T) {
+ should := require.New(t)
+ buf1 := &bytes.Buffer{}
+ encoder1 := json.NewEncoder(buf1)
+ encoder1.SetEscapeHTML(false)
+ encoder1.Encode([]int{1})
+ should.Equal("[1]\n", buf1.String())
+ buf2 := &bytes.Buffer{}
+ encoder2 := NewEncoder(buf2)
+ encoder2.SetEscapeHTML(false)
+ encoder2.Encode([]int{1})
+ should.Equal("[1]\n", buf2.String())
+}
+
+func Test_string_encode_with_std_without_html_escape(t *testing.T) {
+ api := Config{EscapeHTML: false}.Froze()
+ should := require.New(t)
+ for i := 0; i < utf8.RuneSelf; i++ {
+ input := string([]byte{byte(i)})
+ buf := &bytes.Buffer{}
+ encoder := json.NewEncoder(buf)
+ encoder.SetEscapeHTML(false)
+ err := encoder.Encode(input)
+ should.Nil(err)
+ stdOutput := buf.String()
+ stdOutput = stdOutput[:len(stdOutput)-1]
+ jsoniterOutputBytes, err := api.Marshal(input)
+ should.Nil(err)
+ jsoniterOutput := string(jsoniterOutputBytes)
+ should.Equal(stdOutput, jsoniterOutput)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_adapter_test.go b/vendor/github.com/json-iterator/go/jsoniter_adapter_test.go
new file mode 100644
index 000000000..30f03deb6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_adapter_test.go
@@ -0,0 +1,84 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "github.com/stretchr/testify/require"
+ "io/ioutil"
+ "testing"
+)
+
+func Test_new_decoder(t *testing.T) {
+ should := require.New(t)
+ decoder1 := json.NewDecoder(bytes.NewBufferString(`[1][2]`))
+ decoder2 := NewDecoder(bytes.NewBufferString(`[1][2]`))
+ arr1 := []int{}
+ should.Nil(decoder1.Decode(&arr1))
+ should.Equal([]int{1}, arr1)
+ arr2 := []int{}
+ should.True(decoder1.More())
+ buffered, _ := ioutil.ReadAll(decoder1.Buffered())
+ should.Equal("[2]", string(buffered))
+ should.Nil(decoder2.Decode(&arr2))
+ should.Equal([]int{1}, arr2)
+ should.True(decoder2.More())
+ buffered, _ = ioutil.ReadAll(decoder2.Buffered())
+ should.Equal("[2]", string(buffered))
+
+ should.Nil(decoder1.Decode(&arr1))
+ should.Equal([]int{2}, arr1)
+ should.False(decoder1.More())
+ should.Nil(decoder2.Decode(&arr2))
+ should.Equal([]int{2}, arr2)
+ should.False(decoder2.More())
+}
+
+func Test_use_number(t *testing.T) {
+ should := require.New(t)
+ decoder1 := json.NewDecoder(bytes.NewBufferString(`123`))
+ decoder1.UseNumber()
+ decoder2 := NewDecoder(bytes.NewBufferString(`123`))
+ decoder2.UseNumber()
+ var obj1 interface{}
+ should.Nil(decoder1.Decode(&obj1))
+ should.Equal(json.Number("123"), obj1)
+ var obj2 interface{}
+ should.Nil(decoder2.Decode(&obj2))
+ should.Equal(json.Number("123"), obj2)
+}
+
+func Test_use_number_for_unmarshal(t *testing.T) {
+ should := require.New(t)
+ api := Config{UseNumber: true}.Froze()
+ var obj interface{}
+ should.Nil(api.UnmarshalFromString("123", &obj))
+ should.Equal(json.Number("123"), obj)
+}
+
+func Test_marshal_indent(t *testing.T) {
+ should := require.New(t)
+ obj := struct {
+ F1 int
+ F2 []int
+ }{1, []int{2, 3, 4}}
+ output, err := json.MarshalIndent(obj, "", " ")
+ should.Nil(err)
+ should.Equal("{\n \"F1\": 1,\n \"F2\": [\n 2,\n 3,\n 4\n ]\n}", string(output))
+ output, err = MarshalIndent(obj, "", " ")
+ should.Nil(err)
+ should.Equal("{\n \"F1\": 1,\n \"F2\": [\n 2,\n 3,\n 4\n ]\n}", string(output))
+}
+
+func Test_marshal_indent_map(t *testing.T) {
+ should := require.New(t)
+ obj := map[int]int{1: 2}
+ output, err := json.MarshalIndent(obj, "", " ")
+ should.Nil(err)
+ should.Equal("{\n \"1\": 2\n}", string(output))
+ output, err = MarshalIndent(obj, "", " ")
+ should.Nil(err)
+ should.Equal("{\n \"1\": 2\n}", string(output))
+ output, err = ConfigCompatibleWithStandardLibrary.MarshalIndent(obj, "", " ")
+ should.Nil(err)
+ should.Equal("{\n \"1\": 2\n}", string(output))
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_alias_test.go b/vendor/github.com/json-iterator/go/jsoniter_alias_test.go
new file mode 100644
index 000000000..246651f75
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_alias_test.go
@@ -0,0 +1,62 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_alias(t *testing.T) {
+ should := require.New(t)
+ type myint int
+ type myint8 int8
+ type myint16 int16
+ type myint32 int32
+ type myint64 int64
+ type myuint uint
+ type myuint8 uint8
+ type myuint16 uint16
+ type myuint32 uint32
+ type myuint64 uint64
+ type myfloat32 float32
+ type myfloat64 float64
+ type mystring string
+ type mybool bool
+ type myuintptr uintptr
+ var a struct {
+ A myint8 `json:"a"`
+ B myint16 `json:"stream"`
+ C myint32 `json:"c"`
+ D myint64 `json:"d"`
+ E myuint8 `json:"e"`
+ F myuint16 `json:"f"`
+ G myuint32 `json:"g"`
+ H myuint64 `json:"h"`
+ I myfloat32 `json:"i"`
+ J myfloat64 `json:"j"`
+ K mystring `json:"k"`
+ L myint `json:"l"`
+ M myuint `json:"m"`
+ N mybool `json:"n"`
+ O myuintptr `json:"o"`
+ }
+
+ should.Nil(UnmarshalFromString(`{"a" : 1, "stream" : 1, "c": 1, "d" : 1, "e" : 1, "f" : 1, "g" : 1, "h": 1, "i" : 1, "j" : 1, "k" :"xxxx", "l" : 1, "m":1, "n": true, "o" : 1}`, &a))
+ should.Equal(myfloat32(1), a.I)
+ should.Equal(myfloat64(1), a.J)
+ should.Equal(myint8(1), a.A)
+ should.Equal(myint16(1), a.B)
+ should.Equal(myint32(1), a.C)
+ should.Equal(myint64(1), a.D)
+ should.Equal(myuint8(1), a.E)
+ should.Equal(myuint16(1), a.F)
+ should.Equal(myuint32(1), a.G)
+ should.Equal(myuint64(1), a.H)
+ should.Equal(mystring("xxxx"), a.K)
+ should.Equal(mybool(true), a.N)
+ should.Equal(myuintptr(1), a.O)
+ b, err := Marshal(a)
+ should.Nil(err)
+ should.Equal(`{"a":1,"stream":1,"c":1,"d":1,"e":1,"f":1,"g":1,"h":1,"i":1,"j":1,"k":"xxxx","l":1,"m":1,"n":true,"o":1}`, string(b))
+
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_array_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_array_test.go
new file mode 100644
index 000000000..dbffd822d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_array_test.go
@@ -0,0 +1,122 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_empty_array_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[]"))
+ should.Equal(ArrayValue, any.Get().ValueType())
+ should.Equal(InvalidValue, any.Get(0.3).ValueType())
+ should.Equal(0, any.Size())
+ should.Equal(ArrayValue, any.ValueType())
+ should.Nil(any.LastError())
+ should.Equal(0, any.ToInt())
+ should.Equal(int32(0), any.ToInt32())
+ should.Equal(int64(0), any.ToInt64())
+ should.Equal(uint(0), any.ToUint())
+ should.Equal(uint32(0), any.ToUint32())
+ should.Equal(uint64(0), any.ToUint64())
+ should.Equal(float32(0), any.ToFloat32())
+ should.Equal(float64(0), any.ToFloat64())
+}
+
+func Test_read_one_element_array_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[1]"))
+ should.Equal(1, any.Size())
+}
+
+func Test_read_two_element_array_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[1,2]"))
+ should.Equal(1, any.Get(0).ToInt())
+ should.Equal(2, any.Size())
+ should.True(any.ToBool())
+ should.Equal(1, any.ToInt())
+ should.Equal([]interface{}{float64(1), float64(2)}, any.GetInterface())
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("[1,2]", string(stream.Buffer()))
+ arr := []int{}
+ any.ToVal(&arr)
+ should.Equal([]int{1, 2}, arr)
+}
+
+func Test_wrap_array_and_convert_to_any(t *testing.T) {
+ should := require.New(t)
+ any := Wrap([]int{1, 2, 3})
+ any2 := Wrap([]int{})
+
+ should.Equal("[1,2,3]", any.ToString())
+ should.True(any.ToBool())
+ should.False(any2.ToBool())
+
+ should.Equal(1, any.ToInt())
+ should.Equal(0, any2.ToInt())
+ should.Equal(int32(1), any.ToInt32())
+ should.Equal(int32(0), any2.ToInt32())
+ should.Equal(int64(1), any.ToInt64())
+ should.Equal(int64(0), any2.ToInt64())
+ should.Equal(uint(1), any.ToUint())
+ should.Equal(uint(0), any2.ToUint())
+ should.Equal(uint32(1), any.ToUint32())
+ should.Equal(uint32(0), any2.ToUint32())
+ should.Equal(uint64(1), any.ToUint64())
+ should.Equal(uint64(0), any2.ToUint64())
+ should.Equal(float32(1), any.ToFloat32())
+ should.Equal(float32(0), any2.ToFloat32())
+ should.Equal(float64(1), any.ToFloat64())
+ should.Equal(float64(0), any2.ToFloat64())
+ should.Equal(3, any.Size())
+ should.Equal(0, any2.Size())
+
+ var i interface{} = []int{1, 2, 3}
+ should.Equal(i, any.GetInterface())
+}
+
+func Test_array_lazy_any_get(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[1,[2,3],4]"))
+ should.Equal(3, any.Get(1, 1).ToInt())
+ should.Equal("[1,[2,3],4]", any.ToString())
+}
+
+func Test_array_lazy_any_get_all(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[[1],[2],[3,4]]"))
+ should.Equal("[1,2,3]", any.Get('*', 0).ToString())
+ any = Get([]byte("[[[1],[2],[3,4]]]"), 0, '*', 0)
+ should.Equal("[1,2,3]", any.ToString())
+}
+
+func Test_array_wrapper_any_get_all(t *testing.T) {
+ should := require.New(t)
+ any := wrapArray([][]int{
+ {1, 2},
+ {3, 4},
+ {5, 6},
+ })
+ should.Equal("[1,3,5]", any.Get('*', 0).ToString())
+ should.Equal(ArrayValue, any.ValueType())
+ should.True(any.ToBool())
+ should.Equal(1, any.Get(0, 0).ToInt())
+}
+
+func Test_array_lazy_any_get_invalid(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[]"))
+ should.Equal(InvalidValue, any.Get(1, 1).ValueType())
+ should.NotNil(any.Get(1, 1).LastError())
+ should.Equal(InvalidValue, any.Get("1").ValueType())
+ should.NotNil(any.Get("1").LastError())
+}
+
+func Test_invalid_array(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("["), 0)
+ should.Equal(InvalidValue, any.ValueType())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_bool_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_bool_test.go
new file mode 100644
index 000000000..a71743a64
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_bool_test.go
@@ -0,0 +1,64 @@
+package jsoniter
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+var boolConvertMap = map[string]bool{
+ "null": false,
+ "true": true,
+ "false": false,
+
+ `"true"`: true,
+ `"false"`: true,
+
+ "123": true,
+ `"123"`: true,
+ "0": false,
+ `"0"`: false,
+ "-1": true,
+ `"-1"`: true,
+
+ "1.1": true,
+ "0.0": false,
+ "-1.1": true,
+ `""`: false,
+ "[1,2]": true,
+ "[]": false,
+ "{}": true,
+ `{"abc":1}`: true,
+}
+
+func Test_read_bool_as_any(t *testing.T) {
+ should := require.New(t)
+
+ var any Any
+ for k, v := range boolConvertMap {
+ any = Get([]byte(k))
+ if v {
+ should.True(any.ToBool(), fmt.Sprintf("origin val is %v", k))
+ } else {
+ should.False(any.ToBool(), fmt.Sprintf("origin val is %v", k))
+ }
+ }
+
+}
+
+func Test_write_bool_to_stream(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("true"))
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("true", string(stream.Buffer()))
+ should.Equal(any.ValueType(), BoolValue)
+
+ any = Get([]byte("false"))
+ stream = NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("false", string(stream.Buffer()))
+
+ should.Equal(any.ValueType(), BoolValue)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_float_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_float_test.go
new file mode 100644
index 000000000..391ac32a8
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_float_test.go
@@ -0,0 +1,102 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+var floatConvertMap = map[string]float64{
+ "null": 0,
+ "true": 1,
+ "false": 0,
+
+ `"true"`: 0,
+ `"false"`: 0,
+
+ "1e1": 10,
+ "1e+1": 10,
+ "1e-1": .1,
+ "1E1": 10,
+ "1E+1": 10,
+ "1E-1": .1,
+
+ "-1e1": -10,
+ "-1e+1": -10,
+ "-1e-1": -.1,
+ "-1E1": -10,
+ "-1E+1": -10,
+ "-1E-1": -.1,
+
+ `"1e1"`: 10,
+ `"1e+1"`: 10,
+ `"1e-1"`: .1,
+ `"1E1"`: 10,
+ `"1E+1"`: 10,
+ `"1E-1"`: .1,
+
+ `"-1e1"`: -10,
+ `"-1e+1"`: -10,
+ `"-1e-1"`: -.1,
+ `"-1E1"`: -10,
+ `"-1E+1"`: -10,
+ `"-1E-1"`: -.1,
+
+ "123": 123,
+ `"123true"`: 123,
+ `"+"`: 0,
+ `"-"`: 0,
+
+ `"-123true"`: -123,
+ `"-99.9true"`: -99.9,
+ "0": 0,
+ `"0"`: 0,
+ "-1": -1,
+
+ "1.1": 1.1,
+ "0.0": 0,
+ "-1.1": -1.1,
+ `"+1.1"`: 1.1,
+ `""`: 0,
+ "[1,2]": 1,
+ "[]": 0,
+ "{}": 0,
+ `{"abc":1}`: 0,
+}
+
+func Test_read_any_to_float(t *testing.T) {
+ should := require.New(t)
+ for k, v := range floatConvertMap {
+ any := Get([]byte(k))
+ should.Equal(float64(v), any.ToFloat64(), "the original val is "+k)
+ }
+
+ for k, v := range floatConvertMap {
+ any := Get([]byte(k))
+ should.Equal(float32(v), any.ToFloat32(), "the original val is "+k)
+ }
+}
+
+func Test_read_float_to_any(t *testing.T) {
+ should := require.New(t)
+ any := WrapFloat64(12.3)
+ anyFloat64 := float64(12.3)
+ //negaAnyFloat64 := float64(-1.1)
+ any2 := WrapFloat64(-1.1)
+ should.Equal(float64(12.3), any.ToFloat64())
+ //should.Equal("12.3", any.ToString())
+ should.True(any.ToBool())
+ should.Equal(float32(anyFloat64), any.ToFloat32())
+ should.Equal(int(anyFloat64), any.ToInt())
+ should.Equal(int32(anyFloat64), any.ToInt32())
+ should.Equal(int64(anyFloat64), any.ToInt64())
+ should.Equal(uint(anyFloat64), any.ToUint())
+ should.Equal(uint32(anyFloat64), any.ToUint32())
+ should.Equal(uint64(anyFloat64), any.ToUint64())
+ should.Equal(uint(0), any2.ToUint())
+ should.Equal(uint32(0), any2.ToUint32())
+ should.Equal(uint64(0), any2.ToUint64())
+ should.Equal(any.ValueType(), NumberValue)
+
+ should.Equal("1.23E+01", any.ToString())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_int_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_int_test.go
new file mode 100644
index 000000000..232808528
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_int_test.go
@@ -0,0 +1,197 @@
+package jsoniter
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+var intConvertMap = map[string]int{
+ "null": 0,
+ "321.1": 321,
+ "-321.1": -321,
+ `"1.1"`: 1,
+ `"-321.1"`: -321,
+ "0.0": 0,
+ "0": 0,
+ `"0"`: 0,
+ `"0.0"`: 0,
+ "-1.1": -1,
+ "true": 1,
+ "false": 0,
+ `"true"`: 0,
+ `"false"`: 0,
+ `"true123"`: 0,
+ `"123true"`: 123,
+ `"-123true"`: -123,
+ `"1.2332e6"`: 1,
+ `""`: 0,
+ "+": 0,
+ "-": 0,
+ "[]": 0,
+ "[1,2]": 1,
+ `["1","2"]`: 1,
+ // object in php cannot convert to int
+ "{}": 0,
+}
+
+func Test_read_any_to_int(t *testing.T) {
+ should := require.New(t)
+
+ // int
+ for k, v := range intConvertMap {
+ any := Get([]byte(k))
+ should.Equal(v, any.ToInt(), fmt.Sprintf("origin val %v", k))
+ }
+
+ // int32
+ for k, v := range intConvertMap {
+ any := Get([]byte(k))
+ should.Equal(int32(v), any.ToInt32(), fmt.Sprintf("original val is %v", k))
+ }
+
+ // int64
+ for k, v := range intConvertMap {
+ any := Get([]byte(k))
+ should.Equal(int64(v), any.ToInt64(), fmt.Sprintf("original val is %v", k))
+ }
+
+}
+
+var uintConvertMap = map[string]int{
+ "null": 0,
+ "321.1": 321,
+ `"1.1"`: 1,
+ `"-123.1"`: 0,
+ "0.0": 0,
+ "0": 0,
+ `"0"`: 0,
+ `"0.0"`: 0,
+ `"00.0"`: 0,
+ "true": 1,
+ "false": 0,
+ `"true"`: 0,
+ `"false"`: 0,
+ `"true123"`: 0,
+ `"+1"`: 1,
+ `"123true"`: 123,
+ `"-123true"`: 0,
+ `"1.2332e6"`: 1,
+ `""`: 0,
+ "+": 0,
+ "-": 0,
+ ".": 0,
+ "[]": 0,
+ "[1,2]": 1,
+ "{}": 0,
+ "{1,2}": 0,
+ "-1.1": 0,
+ "-321.1": 0,
+}
+
+func Test_read_any_to_uint(t *testing.T) {
+ should := require.New(t)
+
+ for k, v := range uintConvertMap {
+ any := Get([]byte(k))
+ should.Equal(uint64(v), any.ToUint64(), fmt.Sprintf("origin val %v", k))
+ }
+
+ for k, v := range uintConvertMap {
+ any := Get([]byte(k))
+ should.Equal(uint32(v), any.ToUint32(), fmt.Sprintf("origin val %v", k))
+ }
+
+ for k, v := range uintConvertMap {
+ any := Get([]byte(k))
+ should.Equal(uint(v), any.ToUint(), fmt.Sprintf("origin val %v", k))
+ }
+
+}
+
+func Test_read_int64_to_any(t *testing.T) {
+ should := require.New(t)
+ any := WrapInt64(12345)
+ should.Equal(12345, any.ToInt())
+ should.Equal(int32(12345), any.ToInt32())
+ should.Equal(int64(12345), any.ToInt64())
+ should.Equal(uint(12345), any.ToUint())
+ should.Equal(uint32(12345), any.ToUint32())
+ should.Equal(uint64(12345), any.ToUint64())
+ should.Equal(float32(12345), any.ToFloat32())
+ should.Equal(float64(12345), any.ToFloat64())
+ should.Equal("12345", any.ToString())
+ should.Equal(true, any.ToBool())
+ should.Equal(any.ValueType(), NumberValue)
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("12345", string(stream.Buffer()))
+}
+func Test_read_int32_to_any(t *testing.T) {
+ should := require.New(t)
+ any := WrapInt32(12345)
+ should.Equal(12345, any.ToInt())
+ should.Equal(int32(12345), any.ToInt32())
+ should.Equal(int64(12345), any.ToInt64())
+ should.Equal(uint(12345), any.ToUint())
+ should.Equal(uint32(12345), any.ToUint32())
+ should.Equal(uint64(12345), any.ToUint64())
+ should.Equal(float32(12345), any.ToFloat32())
+ should.Equal(float64(12345), any.ToFloat64())
+ should.Equal("12345", any.ToString())
+ should.Equal(true, any.ToBool())
+ should.Equal(any.ValueType(), NumberValue)
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("12345", string(stream.Buffer()))
+}
+
+func Test_read_uint32_to_any(t *testing.T) {
+ should := require.New(t)
+ any := WrapUint32(12345)
+ should.Equal(12345, any.ToInt())
+ should.Equal(int32(12345), any.ToInt32())
+ should.Equal(int64(12345), any.ToInt64())
+ should.Equal(uint(12345), any.ToUint())
+ should.Equal(uint32(12345), any.ToUint32())
+ should.Equal(uint64(12345), any.ToUint64())
+ should.Equal(float32(12345), any.ToFloat32())
+ should.Equal(float64(12345), any.ToFloat64())
+ should.Equal("12345", any.ToString())
+ should.Equal(true, any.ToBool())
+ should.Equal(any.ValueType(), NumberValue)
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("12345", string(stream.Buffer()))
+}
+
+func Test_read_uint64_to_any(t *testing.T) {
+ should := require.New(t)
+ any := WrapUint64(12345)
+ should.Equal(12345, any.ToInt())
+ should.Equal(int32(12345), any.ToInt32())
+ should.Equal(int64(12345), any.ToInt64())
+ should.Equal(uint(12345), any.ToUint())
+ should.Equal(uint32(12345), any.ToUint32())
+ should.Equal(uint64(12345), any.ToUint64())
+ should.Equal(float32(12345), any.ToFloat32())
+ should.Equal(float64(12345), any.ToFloat64())
+ should.Equal("12345", any.ToString())
+ should.Equal(true, any.ToBool())
+ should.Equal(any.ValueType(), NumberValue)
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("12345", string(stream.Buffer()))
+ stream = NewStream(ConfigDefault, nil, 32)
+ stream.WriteUint(uint(123))
+ should.Equal("123", string(stream.Buffer()))
+}
+
+func Test_int_lazy_any_get(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("1234"))
+ // panic!!
+ //should.Equal(any.LastError(), io.EOF)
+ should.Equal(InvalidValue, any.Get(1, "2").ValueType())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_map_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_map_test.go
new file mode 100644
index 000000000..f93ea4e61
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_map_test.go
@@ -0,0 +1,14 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_wrap_map(t *testing.T) {
+ should := require.New(t)
+ any := Wrap(map[string]string{"Field1": "hello"})
+ should.Equal("hello", any.Get("Field1").ToString())
+ any = Wrap(map[string]string{"Field1": "hello"})
+ should.Equal(1, any.Size())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_null_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_null_test.go
new file mode 100644
index 000000000..eb09497b9
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_null_test.go
@@ -0,0 +1,15 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_read_null_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`null`))
+ should.Equal(0, any.ToInt())
+ should.Equal(float64(0), any.ToFloat64())
+ should.Equal("", any.ToString())
+ should.False(any.ToBool())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_object_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_object_test.go
new file mode 100644
index 000000000..9b215f95e
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_object_test.go
@@ -0,0 +1,107 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_object_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`{"a":"stream","c":"d"}`))
+ should.Equal(`{"a":"stream","c":"d"}`, any.ToString())
+ // partial parse
+ should.Equal("stream", any.Get("a").ToString())
+ should.Equal("d", any.Get("c").ToString())
+ should.Equal(2, len(any.Keys()))
+ any = Get([]byte(`{"a":"stream","c":"d"}`))
+ // full parse
+ should.Equal(2, len(any.Keys()))
+ should.Equal(2, any.Size())
+ should.True(any.ToBool())
+ should.Equal(0, any.ToInt())
+ should.Equal(ObjectValue, any.ValueType())
+ should.Nil(any.LastError())
+ obj := struct {
+ A string
+ }{}
+ any.ToVal(&obj)
+ should.Equal("stream", obj.A)
+}
+
+func Test_object_lazy_any_get(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`{"a":{"stream":{"c":"d"}}}`))
+ should.Equal("d", any.Get("a", "stream", "c").ToString())
+}
+
+func Test_object_lazy_any_get_all(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`{"a":[0],"stream":[1]}`))
+ should.Contains(any.Get('*', 0).ToString(), `"a":0`)
+}
+
+func Test_object_lazy_any_get_invalid(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`{}`))
+ should.Equal(InvalidValue, any.Get("a", "stream", "c").ValueType())
+ should.Equal(InvalidValue, any.Get(1).ValueType())
+}
+
+func Test_wrap_map_and_convert_to_any(t *testing.T) {
+ should := require.New(t)
+ any := Wrap(map[string]interface{}{"a": 1})
+ should.True(any.ToBool())
+ should.Equal(0, any.ToInt())
+ should.Equal(int32(0), any.ToInt32())
+ should.Equal(int64(0), any.ToInt64())
+ should.Equal(float32(0), any.ToFloat32())
+ should.Equal(float64(0), any.ToFloat64())
+ should.Equal(uint(0), any.ToUint())
+ should.Equal(uint32(0), any.ToUint32())
+ should.Equal(uint64(0), any.ToUint64())
+}
+
+func Test_wrap_object_and_convert_to_any(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ field2 string
+ }
+ any := Wrap(TestObject{"hello", "world"})
+ should.Equal("hello", any.Get("Field1").ToString())
+ any = Wrap(TestObject{"hello", "world"})
+ should.Equal(2, any.Size())
+ should.Equal(`{"Field1":"hello"}`, any.Get('*').ToString())
+
+ should.Equal(0, any.ToInt())
+ should.Equal(int32(0), any.ToInt32())
+ should.Equal(int64(0), any.ToInt64())
+ should.Equal(float32(0), any.ToFloat32())
+ should.Equal(float64(0), any.ToFloat64())
+ should.Equal(uint(0), any.ToUint())
+ should.Equal(uint32(0), any.ToUint32())
+ should.Equal(uint64(0), any.ToUint64())
+ should.True(any.ToBool())
+ should.Equal(`{"Field1":"hello"}`, any.ToString())
+
+ // cannot pass!
+ //stream := NewStream(ConfigDefault, nil, 32)
+ //any.WriteTo(stream)
+ //should.Equal(`{"Field1":"hello"}`, string(stream.Buffer()))
+ // cannot pass!
+
+}
+
+func Test_any_within_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 Any
+ Field2 Any
+ }
+ obj := TestObject{}
+ err := UnmarshalFromString(`{"Field1": "hello", "Field2": [1,2,3]}`, &obj)
+ should.Nil(err)
+ should.Equal("hello", obj.Field1.ToString())
+ should.Equal("[1,2,3]", obj.Field2.ToString())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_any_string_test.go b/vendor/github.com/json-iterator/go/jsoniter_any_string_test.go
new file mode 100644
index 000000000..119f7d27d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_any_string_test.go
@@ -0,0 +1,56 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+var stringConvertMap = map[string]string{
+ "null": "",
+ "321.1": "321.1",
+ `"1.1"`: "1.1",
+ `"-123.1"`: "-123.1",
+ "0.0": "0.0",
+ "0": "0",
+ `"0"`: "0",
+ `"0.0"`: "0.0",
+ `"00.0"`: "00.0",
+ "true": "true",
+ "false": "false",
+ `"true"`: "true",
+ `"false"`: "false",
+ `"true123"`: "true123",
+ `"+1"`: "+1",
+ "[]": "[]",
+ "[1,2]": "[1,2]",
+ "{}": "{}",
+ `{"a":1, "stream":true}`: `{"a":1, "stream":true}`,
+}
+
+func Test_read_any_to_string(t *testing.T) {
+ should := require.New(t)
+ for k, v := range stringConvertMap {
+ any := Get([]byte(k))
+ should.Equal(v, any.ToString(), "original val "+k)
+ }
+}
+
+func Test_read_string_as_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte(`"hello"`))
+ should.Equal("hello", any.ToString())
+ should.True(any.ToBool())
+ any = Get([]byte(`" "`))
+ should.False(any.ToBool())
+ any = Get([]byte(`"false"`))
+ should.True(any.ToBool())
+ any = Get([]byte(`"123"`))
+ should.Equal(123, any.ToInt())
+}
+
+func Test_wrap_string(t *testing.T) {
+ should := require.New(t)
+ any := WrapString("123")
+ should.Equal(123, any.ToInt())
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_array_test.go b/vendor/github.com/json-iterator/go/jsoniter_array_test.go
new file mode 100644
index 000000000..0f71ecb68
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_array_test.go
@@ -0,0 +1,213 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_empty_array(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[]`)
+ cont := iter.ReadArray()
+ should.False(cont)
+ iter = ParseString(ConfigDefault, `[]`)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ should.FailNow("should not call")
+ return true
+ })
+}
+
+func Test_one_element(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[1]`)
+ should.True(iter.ReadArray())
+ should.Equal(1, iter.ReadInt())
+ should.False(iter.ReadArray())
+ iter = ParseString(ConfigDefault, `[1]`)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ should.Equal(1, iter.ReadInt())
+ return true
+ })
+}
+
+func Test_two_elements(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[1,2]`)
+ should.True(iter.ReadArray())
+ should.Equal(int64(1), iter.ReadInt64())
+ should.True(iter.ReadArray())
+ should.Equal(int64(2), iter.ReadInt64())
+ should.False(iter.ReadArray())
+ iter = ParseString(ConfigDefault, `[1,2]`)
+ should.Equal([]interface{}{float64(1), float64(2)}, iter.Read())
+}
+
+func Test_whitespace_in_head(t *testing.T) {
+ iter := ParseString(ConfigDefault, ` [1]`)
+ cont := iter.ReadArray()
+ if cont != true {
+ t.FailNow()
+ }
+ if iter.ReadUint64() != 1 {
+ t.FailNow()
+ }
+}
+
+func Test_whitespace_after_array_start(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[ 1]`)
+ cont := iter.ReadArray()
+ if cont != true {
+ t.FailNow()
+ }
+ if iter.ReadUint64() != 1 {
+ t.FailNow()
+ }
+}
+
+func Test_whitespace_before_array_end(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[1 ]`)
+ cont := iter.ReadArray()
+ if cont != true {
+ t.FailNow()
+ }
+ if iter.ReadUint64() != 1 {
+ t.FailNow()
+ }
+ cont = iter.ReadArray()
+ if cont != false {
+ t.FailNow()
+ }
+}
+
+func Test_whitespace_before_comma(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[1 ,2]`)
+ cont := iter.ReadArray()
+ if cont != true {
+ t.FailNow()
+ }
+ if iter.ReadUint64() != 1 {
+ t.FailNow()
+ }
+ cont = iter.ReadArray()
+ if cont != true {
+ t.FailNow()
+ }
+ if iter.ReadUint64() != 2 {
+ t.FailNow()
+ }
+ cont = iter.ReadArray()
+ if cont != false {
+ t.FailNow()
+ }
+}
+
+func Test_write_array(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(Config{IndentionStep: 2}.Froze(), buf, 4096)
+ stream.WriteArrayStart()
+ stream.WriteInt(1)
+ stream.WriteMore()
+ stream.WriteInt(2)
+ stream.WriteArrayEnd()
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("[\n 1,\n 2\n]", buf.String())
+}
+
+func Test_write_val_array(t *testing.T) {
+ should := require.New(t)
+ val := []int{1, 2, 3}
+ str, err := MarshalToString(&val)
+ should.Nil(err)
+ should.Equal("[1,2,3]", str)
+}
+
+func Test_write_val_empty_array(t *testing.T) {
+ should := require.New(t)
+ val := []int{}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal("[]", str)
+}
+
+func Test_write_array_of_interface_in_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field []interface{}
+ Field2 string
+ }
+ val := TestObject{[]interface{}{1, 2}, ""}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Contains(str, `"Field":[1,2]`)
+ should.Contains(str, `"Field2":""`)
+}
+
+func Test_encode_byte_array(t *testing.T) {
+ should := require.New(t)
+ bytes, err := json.Marshal([]byte{1, 2, 3})
+ should.Nil(err)
+ should.Equal(`"AQID"`, string(bytes))
+ bytes, err = Marshal([]byte{1, 2, 3})
+ should.Nil(err)
+ should.Equal(`"AQID"`, string(bytes))
+}
+
+func Test_decode_byte_array_from_base64(t *testing.T) {
+ should := require.New(t)
+ data := []byte{}
+ err := json.Unmarshal([]byte(`"AQID"`), &data)
+ should.Nil(err)
+ should.Equal([]byte{1, 2, 3}, data)
+ err = Unmarshal([]byte(`"AQID"`), &data)
+ should.Nil(err)
+ should.Equal([]byte{1, 2, 3}, data)
+}
+
+func Test_decode_byte_array_from_array(t *testing.T) {
+ should := require.New(t)
+ data := []byte{}
+ err := json.Unmarshal([]byte(`[1,2,3]`), &data)
+ should.Nil(err)
+ should.Equal([]byte{1, 2, 3}, data)
+ err = Unmarshal([]byte(`[1,2,3]`), &data)
+ should.Nil(err)
+ should.Equal([]byte{1, 2, 3}, data)
+}
+
+func Test_decode_slice(t *testing.T) {
+ should := require.New(t)
+ slice := make([]string, 0, 5)
+ UnmarshalFromString(`["hello", "world"]`, &slice)
+ should.Equal([]string{"hello", "world"}, slice)
+}
+
+func Test_decode_large_slice(t *testing.T) {
+ should := require.New(t)
+ slice := make([]int, 0, 1)
+ UnmarshalFromString(`[1,2,3,4,5,6,7,8,9]`, &slice)
+ should.Equal([]int{1, 2, 3, 4, 5, 6, 7, 8, 9}, slice)
+}
+
+func Benchmark_jsoniter_array(b *testing.B) {
+ b.ReportAllocs()
+ input := []byte(`[1,2,3,4,5,6,7,8,9]`)
+ iter := ParseBytes(ConfigDefault, input)
+ b.ResetTimer()
+ for n := 0; n < b.N; n++ {
+ iter.ResetBytes(input)
+ for iter.ReadArray() {
+ iter.ReadUint64()
+ }
+ }
+}
+
+func Benchmark_json_array(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ result := []interface{}{}
+ json.Unmarshal([]byte(`[1,2,3]`), &result)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_bool_test.go b/vendor/github.com/json-iterator/go/jsoniter_bool_test.go
new file mode 100644
index 000000000..461b88ba3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_bool_test.go
@@ -0,0 +1,113 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_true(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `true`)
+ should.True(iter.ReadBool())
+ iter = ParseString(ConfigDefault, `true`)
+ should.Equal(true, iter.Read())
+}
+
+func Test_false(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `false`)
+ should.False(iter.ReadBool())
+}
+
+func Test_write_true_false(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteTrue()
+ stream.WriteFalse()
+ stream.WriteBool(false)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("truefalsefalse", buf.String())
+}
+
+func Test_write_val_bool(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(true)
+ should.Equal(stream.Buffered(), 4)
+ stream.Flush()
+ should.Equal(stream.Buffered(), 0)
+ should.Nil(stream.Error)
+ should.Equal("true", buf.String())
+}
+
+func Test_encode_string_bool(t *testing.T) {
+ type TestObject struct {
+ Field bool `json:",omitempty,string"`
+ }
+ should := require.New(t)
+ output, err := json.Marshal(TestObject{true})
+ should.Nil(err)
+ should.Equal(`{"Field":"true"}`, string(output))
+ output, err = Marshal(TestObject{true})
+ should.Nil(err)
+ should.Equal(`{"Field":"true"}`, string(output))
+}
+
+func Test_decode_string_bool(t *testing.T) {
+ type TestObject struct {
+ Field bool `json:",omitempty,string"`
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ err := json.Unmarshal([]byte(`{"Field":"true"}`), &obj)
+ should.Nil(err)
+ should.True(obj.Field)
+
+ obj = TestObject{}
+ err = json.Unmarshal([]byte(`{"Field":true}`), &obj)
+ should.NotNil(err)
+
+ obj = TestObject{}
+ err = Unmarshal([]byte(`{"Field":"true"}`), &obj)
+ should.Nil(err)
+ should.True(obj.Field)
+
+ obj = TestObject{}
+ err = Unmarshal([]byte(`{"Field":true}`), &obj)
+ should.NotNil(err)
+}
+
+func Test_bool_can_be_null(t *testing.T) {
+ type TestData struct {
+ Field bool `json:"field"`
+ }
+ should := require.New(t)
+
+ obj := TestData{}
+ data1 := []byte(`{"field": true}`)
+ err := Unmarshal(data1, &obj)
+ should.NoError(err)
+ should.Equal(true, obj.Field)
+
+ data2 := []byte(`{"field": null}`)
+ err = Unmarshal(data2, &obj)
+ should.NoError(err)
+ // Same behavior as stdlib, not touching the existing value.
+ should.Equal(true, obj.Field)
+
+ // Checking stdlib behavior as well
+ obj2 := TestData{}
+ err = json.Unmarshal(data1, &obj2)
+ should.NoError(err)
+ should.Equal(true, obj2.Field)
+
+ err = json.Unmarshal(data2, &obj2)
+ should.NoError(err)
+ should.Equal(true, obj2.Field)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_customize_test.go b/vendor/github.com/json-iterator/go/jsoniter_customize_test.go
new file mode 100644
index 000000000..628f89e45
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_customize_test.go
@@ -0,0 +1,341 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "strconv"
+ "testing"
+ "time"
+ "unsafe"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_customize_type_decoder(t *testing.T) {
+ RegisterTypeDecoderFunc("time.Time", func(ptr unsafe.Pointer, iter *Iterator) {
+ t, err := time.ParseInLocation("2006-01-02 15:04:05", iter.ReadString(), time.UTC)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ *((*time.Time)(ptr)) = t
+ })
+ defer ConfigDefault.(*frozenConfig).cleanDecoders()
+ val := time.Time{}
+ err := Unmarshal([]byte(`"2016-12-05 08:43:28"`), &val)
+ if err != nil {
+ t.Fatal(err)
+ }
+ year, month, day := val.Date()
+ if year != 2016 || month != 12 || day != 5 {
+ t.Fatal(val)
+ }
+}
+
+func Test_customize_type_encoder(t *testing.T) {
+ should := require.New(t)
+ RegisterTypeEncoderFunc("time.Time", func(ptr unsafe.Pointer, stream *Stream) {
+ t := *((*time.Time)(ptr))
+ stream.WriteString(t.UTC().Format("2006-01-02 15:04:05"))
+ }, nil)
+ defer ConfigDefault.(*frozenConfig).cleanEncoders()
+ val := time.Unix(0, 0)
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`"1970-01-01 00:00:00"`, str)
+}
+
+func Test_customize_byte_array_encoder(t *testing.T) {
+ ConfigDefault.(*frozenConfig).cleanEncoders()
+ should := require.New(t)
+ RegisterTypeEncoderFunc("[]uint8", func(ptr unsafe.Pointer, stream *Stream) {
+ t := *((*[]byte)(ptr))
+ stream.WriteString(string(t))
+ }, nil)
+ defer ConfigDefault.(*frozenConfig).cleanEncoders()
+ val := []byte("abc")
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`"abc"`, str)
+}
+
+func Test_customize_float_marshal(t *testing.T) {
+ should := require.New(t)
+ json := Config{MarshalFloatWith6Digits: true}.Froze()
+ str, err := json.MarshalToString(float32(1.23456789))
+ should.Nil(err)
+ should.Equal("1.234568", str)
+}
+
+type Tom struct {
+ field1 string
+}
+
+func Test_customize_field_decoder(t *testing.T) {
+ RegisterFieldDecoderFunc("jsoniter.Tom", "field1", func(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
+ })
+ defer ConfigDefault.(*frozenConfig).cleanDecoders()
+ tom := Tom{}
+ err := Unmarshal([]byte(`{"field1": 100}`), &tom)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+type TestObject1 struct {
+ Field1 string
+}
+
+type testExtension struct {
+ DummyExtension
+}
+
+func (extension *testExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+ if structDescriptor.Type.String() != "jsoniter.TestObject1" {
+ return
+ }
+ binding := structDescriptor.GetField("Field1")
+ binding.Encoder = &funcEncoder{fun: func(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ val, _ := strconv.Atoi(str)
+ stream.WriteInt(val)
+ }}
+ binding.Decoder = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = strconv.Itoa(iter.ReadInt())
+ }}
+ binding.ToNames = []string{"field-1"}
+ binding.FromNames = []string{"field-1"}
+}
+
+func Test_customize_field_by_extension(t *testing.T) {
+ should := require.New(t)
+ RegisterExtension(&testExtension{})
+ obj := TestObject1{}
+ err := UnmarshalFromString(`{"field-1": 100}`, &obj)
+ should.Nil(err)
+ should.Equal("100", obj.Field1)
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"field-1":100}`, str)
+}
+
+type timeImplementedMarshaler time.Time
+
+func (obj timeImplementedMarshaler) MarshalJSON() ([]byte, error) {
+ seconds := time.Time(obj).Unix()
+ return []byte(strconv.FormatInt(seconds, 10)), nil
+}
+
+func Test_marshaler(t *testing.T) {
+ type TestObject struct {
+ Field timeImplementedMarshaler
+ }
+ should := require.New(t)
+ val := timeImplementedMarshaler(time.Unix(123, 0))
+ obj := TestObject{val}
+ bytes, err := json.Marshal(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":123}`, string(bytes))
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":123}`, str)
+}
+
+func Test_marshaler_and_encoder(t *testing.T) {
+ type TestObject struct {
+ Field *timeImplementedMarshaler
+ }
+ ConfigDefault.(*frozenConfig).cleanEncoders()
+ should := require.New(t)
+ RegisterTypeEncoderFunc("jsoniter.timeImplementedMarshaler", func(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteString("hello from encoder")
+ }, nil)
+ val := timeImplementedMarshaler(time.Unix(123, 0))
+ obj := TestObject{&val}
+ bytes, err := json.Marshal(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":123}`, string(bytes))
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":"hello from encoder"}`, str)
+}
+
+type ObjectImplementedUnmarshaler int
+
+func (obj *ObjectImplementedUnmarshaler) UnmarshalJSON(s []byte) error {
+ val, _ := strconv.ParseInt(string(s[1:len(s)-1]), 10, 64)
+ *obj = ObjectImplementedUnmarshaler(val)
+ return nil
+}
+
+func Test_unmarshaler(t *testing.T) {
+ should := require.New(t)
+ var obj ObjectImplementedUnmarshaler
+ err := json.Unmarshal([]byte(` "100" `), &obj)
+ should.Nil(err)
+ should.Equal(100, int(obj))
+ iter := ParseString(ConfigDefault, ` "100" `)
+ iter.ReadVal(&obj)
+ should.Nil(err)
+ should.Equal(100, int(obj))
+}
+
+func Test_unmarshaler_and_decoder(t *testing.T) {
+ type TestObject struct {
+ Field *ObjectImplementedUnmarshaler
+ Field2 string
+ }
+ ConfigDefault.(*frozenConfig).cleanDecoders()
+ should := require.New(t)
+ RegisterTypeDecoderFunc("jsoniter.ObjectImplementedUnmarshaler", func(ptr unsafe.Pointer, iter *Iterator) {
+ *(*ObjectImplementedUnmarshaler)(ptr) = 10
+ iter.Skip()
+ })
+ obj := TestObject{}
+ val := ObjectImplementedUnmarshaler(0)
+ obj.Field = &val
+ err := json.Unmarshal([]byte(`{"Field":"100"}`), &obj)
+ should.Nil(err)
+ should.Equal(100, int(*obj.Field))
+ err = Unmarshal([]byte(`{"Field":"100"}`), &obj)
+ should.Nil(err)
+ should.Equal(10, int(*obj.Field))
+}
+
+type tmString string
+type tmStruct struct {
+ String tmString
+}
+
+func (s tmStruct) MarshalJSON() ([]byte, error) {
+ var b []byte
+ b = append(b, '"')
+ b = append(b, s.String...)
+ b = append(b, '"')
+ return b, nil
+}
+
+func Test_marshaler_on_struct(t *testing.T) {
+ fixed := tmStruct{"hello"}
+ //json.Marshal(fixed)
+ Marshal(fixed)
+}
+
+type withChan struct {
+ F2 chan []byte
+}
+
+func (q withChan) MarshalJSON() ([]byte, error) {
+ return []byte(`""`), nil
+}
+
+func (q *withChan) UnmarshalJSON(value []byte) error {
+ return nil
+}
+
+func Test_marshal_json_with_chan(t *testing.T) {
+ type TestObject struct {
+ F1 withChan
+ }
+ should := require.New(t)
+ output, err := MarshalToString(TestObject{})
+ should.Nil(err)
+ should.Equal(`{"F1":""}`, output)
+}
+
+type withTime struct {
+ time.Time
+}
+
+func (t *withTime) UnmarshalJSON(b []byte) error {
+ return nil
+}
+func (t withTime) MarshalJSON() ([]byte, error) {
+ return []byte(`"fake"`), nil
+}
+
+func Test_marshal_json_with_time(t *testing.T) {
+ type S1 struct {
+ F1 withTime
+ F2 *withTime
+ }
+ type TestObject struct {
+ TF1 S1
+ }
+ should := require.New(t)
+ obj := TestObject{
+ S1{
+ F1: withTime{
+ time.Unix(0, 0),
+ },
+ F2: &withTime{
+ time.Unix(0, 0),
+ },
+ },
+ }
+ output, err := json.Marshal(obj)
+ should.Nil(err)
+ should.Equal(`{"TF1":{"F1":"fake","F2":"fake"}}`, string(output))
+ output, err = Marshal(obj)
+ should.Nil(err)
+ should.Equal(`{"TF1":{"F1":"fake","F2":"fake"}}`, string(output))
+ obj = TestObject{}
+ should.Nil(json.Unmarshal([]byte(`{"TF1":{"F1":"fake","F2":"fake"}}`), &obj))
+ should.NotNil(obj.TF1.F2)
+ obj = TestObject{}
+ should.Nil(Unmarshal([]byte(`{"TF1":{"F1":"fake","F2":"fake"}}`), &obj))
+ should.NotNil(obj.TF1.F2)
+}
+
+func Test_customize_tag_key(t *testing.T) {
+
+ type TestObject struct {
+ Field string `orm:"field"`
+ }
+
+ should := require.New(t)
+ json := Config{TagKey: "orm"}.Froze()
+ str, err := json.MarshalToString(TestObject{"hello"})
+ should.Nil(err)
+ should.Equal(`{"field":"hello"}`, str)
+}
+
+func Test_recursive_empty_interface_customization(t *testing.T) {
+ t.Skip()
+ var obj interface{}
+ RegisterTypeDecoderFunc("interface {}", func(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case NumberValue:
+ *(*interface{})(ptr) = iter.ReadInt64()
+ default:
+ *(*interface{})(ptr) = iter.Read()
+ }
+ })
+ should := require.New(t)
+ Unmarshal([]byte("[100]"), &obj)
+ should.Equal([]interface{}{int64(100)}, obj)
+}
+
+type GeoLocation struct {
+ Id string `json:"id,omitempty" db:"id"`
+}
+
+func (p *GeoLocation) MarshalJSON() ([]byte, error) {
+ return []byte(`{}`), nil
+}
+
+func (p *GeoLocation) UnmarshalJSON(input []byte) error {
+ p.Id = "hello"
+ return nil
+}
+
+func Test_marshal_and_unmarshal_on_non_pointer(t *testing.T) {
+ should := require.New(t)
+ locations := []GeoLocation{{"000"}}
+ bytes, err := Marshal(locations)
+ should.Nil(err)
+ should.Equal("[{}]", string(bytes))
+ err = Unmarshal([]byte("[1]"), &locations)
+ should.Nil(err)
+ should.Equal("hello", locations[0].Id)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_demo_test.go b/vendor/github.com/json-iterator/go/jsoniter_demo_test.go
new file mode 100644
index 000000000..85718d1b2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_demo_test.go
@@ -0,0 +1,87 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_bind_api_demo(t *testing.T) {
+ should := require.New(t)
+ val := []int{}
+ err := UnmarshalFromString(`[0,1,2,3] `, &val)
+ should.Nil(err)
+ should.Equal([]int{0, 1, 2, 3}, val)
+}
+
+func Test_iterator_api_demo(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[0,1,2,3]`)
+ total := 0
+ for iter.ReadArray() {
+ total += iter.ReadInt()
+ }
+ should.Equal(6, total)
+}
+
+type People struct {
+ Name string
+ Gender string
+ Age int
+ Address string
+ Mobile string
+ Country string
+ Height int
+}
+
+func jsoniterMarshal(p *People) error {
+ _, err := Marshal(p)
+ if nil != err {
+ return err
+ }
+ return nil
+}
+func stdMarshal(p *People) error {
+ _, err := json.Marshal(p)
+ if nil != err {
+ return err
+ }
+ return nil
+}
+
+func BenchmarkJosniterMarshal(b *testing.B) {
+ var p People
+ p.Address = "上海市徐汇区漕宝路"
+ p.Age = 30
+ p.Country = "中国"
+ p.Gender = "male"
+ p.Height = 170
+ p.Mobile = "18502120533"
+ p.Name = "Elvin"
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ err := jsoniterMarshal(&p)
+ if nil != err {
+ b.Error(err)
+ }
+ }
+}
+
+func BenchmarkStdMarshal(b *testing.B) {
+ var p People
+ p.Address = "上海市徐汇区漕宝路"
+ p.Age = 30
+ p.Country = "中国"
+ p.Gender = "male"
+ p.Height = 170
+ p.Mobile = "18502120533"
+ p.Name = "Elvin"
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ err := stdMarshal(&p)
+ if nil != err {
+ b.Error(err)
+ }
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_encode_interface_test.go b/vendor/github.com/json-iterator/go/jsoniter_encode_interface_test.go
new file mode 100644
index 000000000..14b7add9a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_encode_interface_test.go
@@ -0,0 +1,42 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_encode_interface(t *testing.T) {
+ should := require.New(t)
+ var a interface{}
+ a = int8(10)
+ str, err := MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "10")
+ a = float32(3)
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "3")
+ a = map[string]interface{}{"abc": 1}
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, `{"abc":1}`)
+ a = uintptr(1)
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "1")
+ a = uint(1)
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "1")
+ a = uint8(1)
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "1")
+ a = json.RawMessage("abc")
+ MarshalToString(a)
+ str, err = MarshalToString(a)
+ should.Nil(err)
+ should.Equal(str, "abc")
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_enum_marshaler_test.go b/vendor/github.com/json-iterator/go/jsoniter_enum_marshaler_test.go
new file mode 100644
index 000000000..69e1e8cd3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_enum_marshaler_test.go
@@ -0,0 +1,50 @@
+package jsoniter
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+type MyEnum int64
+
+const (
+ MyEnumA MyEnum = iota
+ MyEnumB
+)
+
+func (m *MyEnum) MarshalJSON() ([]byte, error) {
+ return []byte(fmt.Sprintf(`"foo-%d"`, int(*m))), nil
+}
+
+func (m *MyEnum) UnmarshalJSON(jb []byte) error {
+ switch string(jb) {
+ case `"foo-1"`:
+ *m = MyEnumB
+ default:
+ *m = MyEnumA
+ }
+ return nil
+}
+
+func Test_custom_marshaler_on_enum(t *testing.T) {
+ type Wrapper struct {
+ Payload interface{}
+ }
+ type Wrapper2 struct {
+ Payload MyEnum
+ }
+ should := require.New(t)
+
+ w := Wrapper{Payload: MyEnumB}
+
+ jb, err := Marshal(w)
+ should.NoError(err)
+ should.Equal(`{"Payload":"foo-1"}`, string(jb))
+
+ var w2 Wrapper2
+ err = Unmarshal(jb, &w2)
+ should.NoError(err)
+ should.Equal(MyEnumB, w2.Payload)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_fixed_array_test.go b/vendor/github.com/json-iterator/go/jsoniter_fixed_array_test.go
new file mode 100644
index 000000000..6824b1196
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_fixed_array_test.go
@@ -0,0 +1,37 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_encode_fixed_array(t *testing.T) {
+ should := require.New(t)
+ type FixedArray [2]float64
+ fixed := FixedArray{0.1, 1.0}
+ output, err := MarshalToString(fixed)
+ should.Nil(err)
+ should.Equal("[0.1,1]", output)
+}
+
+func Test_encode_fixed_array_of_map(t *testing.T) {
+ should := require.New(t)
+ type FixedArray [2]map[string]string
+ fixed := FixedArray{map[string]string{"1": "2"}, map[string]string{"3": "4"}}
+ output, err := MarshalToString(fixed)
+ should.Nil(err)
+ should.Equal(`[{"1":"2"},{"3":"4"}]`, output)
+}
+
+func Test_decode_fixed_array(t *testing.T) {
+ should := require.New(t)
+ type FixedArray [2]float64
+ var fixed FixedArray
+ should.Nil(json.Unmarshal([]byte("[1,2,3]"), &fixed))
+ should.Equal(float64(1), fixed[0])
+ should.Equal(float64(2), fixed[1])
+ should.Nil(Unmarshal([]byte("[1,2,3]"), &fixed))
+ should.Equal(float64(1), fixed[0])
+ should.Equal(float64(2), fixed[1])
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_float_test.go b/vendor/github.com/json-iterator/go/jsoniter_float_test.go
new file mode 100644
index 000000000..d90ff2bd5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_float_test.go
@@ -0,0 +1,210 @@
+// +build go1.8
+
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_big_float(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `12.3`)
+ val := iter.ReadBigFloat()
+ val64, _ := val.Float64()
+ should.Equal(12.3, val64)
+}
+
+func Test_read_big_int(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `92233720368547758079223372036854775807`)
+ val := iter.ReadBigInt()
+ should.NotNil(val)
+ should.Equal(`92233720368547758079223372036854775807`, val.String())
+}
+
+func Test_read_float(t *testing.T) {
+ inputs := []string{
+ `1.1`, `1000`, `9223372036854775807`, `12.3`, `-12.3`, `720368.54775807`, `720368.547758075`,
+ `1e1`, `1e+1`, `1e-1`, `1E1`, `1E+1`, `1E-1`, `-1e1`, `-1e+1`, `-1e-1`,
+ }
+ for _, input := range inputs {
+ // non-streaming
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input+",")
+ expected, err := strconv.ParseFloat(input, 32)
+ should.Nil(err)
+ should.Equal(float32(expected), iter.ReadFloat32())
+ })
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input+",")
+ expected, err := strconv.ParseFloat(input, 64)
+ should.Nil(err)
+ should.Equal(expected, iter.ReadFloat64())
+ })
+ // streaming
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input+","), 2)
+ expected, err := strconv.ParseFloat(input, 32)
+ should.Nil(err)
+ should.Equal(float32(expected), iter.ReadFloat32())
+ })
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input+","), 2)
+ val := float64(0)
+ err := json.Unmarshal([]byte(input), &val)
+ should.Nil(err)
+ should.Equal(val, iter.ReadFloat64())
+ })
+ }
+}
+
+func Test_read_float_as_interface(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `12.3`)
+ should.Equal(float64(12.3), iter.Read())
+}
+
+func Test_wrap_float(t *testing.T) {
+ should := require.New(t)
+ str, err := MarshalToString(WrapFloat64(12.3))
+ should.Nil(err)
+ should.Equal("12.3", str)
+}
+
+func Test_write_float32(t *testing.T) {
+ vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
+ -0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteFloat32Lossy(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ output, err := json.Marshal(val)
+ should.Nil(err)
+ should.Equal(string(output), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ output, err := json.Marshal(val)
+ should.Nil(err)
+ should.Equal(string(output), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 10)
+ stream.WriteRaw("abcdefg")
+ stream.WriteFloat32Lossy(1.123456)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("abcdefg1.123456", buf.String())
+
+ stream = NewStream(ConfigDefault, nil, 0)
+ stream.WriteFloat32(float32(0.0000001))
+ should.Equal("1e-07", string(stream.buf))
+}
+
+func Test_write_float64(t *testing.T) {
+ vals := []float64{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff,
+ -0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteFloat64Lossy(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatFloat(val, 'f', -1, 64), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 10)
+ stream.WriteRaw("abcdefg")
+ stream.WriteFloat64Lossy(1.123456)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("abcdefg1.123456", buf.String())
+
+ stream = NewStream(ConfigDefault, nil, 0)
+ stream.WriteFloat64(float64(0.0000001))
+ should.Equal("1e-07", string(stream.buf))
+}
+
+func Test_read_float64_cursor(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, "[1.23456789\n,2,3]")
+ should.True(iter.ReadArray())
+ should.Equal(1.23456789, iter.Read())
+ should.True(iter.ReadArray())
+ should.Equal(float64(2), iter.Read())
+}
+
+func Test_read_float_scientific(t *testing.T) {
+ should := require.New(t)
+ var obj interface{}
+ should.Nil(UnmarshalFromString(`1e1`, &obj))
+ should.Equal(float64(10), obj)
+ should.Nil(json.Unmarshal([]byte(`1e1`), &obj))
+ should.Equal(float64(10), obj)
+ should.Nil(UnmarshalFromString(`1.0e1`, &obj))
+ should.Equal(float64(10), obj)
+ should.Nil(json.Unmarshal([]byte(`1.0e1`), &obj))
+ should.Equal(float64(10), obj)
+}
+
+func Test_lossy_float_marshal(t *testing.T) {
+ should := require.New(t)
+ api := Config{MarshalFloatWith6Digits: true}.Froze()
+ output, err := api.MarshalToString(float64(0.1234567))
+ should.Nil(err)
+ should.Equal("0.123457", output)
+ output, err = api.MarshalToString(float32(0.1234567))
+ should.Nil(err)
+ should.Equal("0.123457", output)
+}
+
+func Benchmark_jsoniter_float(b *testing.B) {
+ b.ReportAllocs()
+ input := []byte(`1.1123,`)
+ iter := NewIterator(ConfigDefault)
+ for n := 0; n < b.N; n++ {
+ iter.ResetBytes(input)
+ iter.ReadFloat64()
+ }
+}
+
+func Benchmark_json_float(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ result := float64(0)
+ json.Unmarshal([]byte(`1.1`), &result)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_int_test.go b/vendor/github.com/json-iterator/go/jsoniter_int_test.go
new file mode 100644
index 000000000..378947ca4
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_int_test.go
@@ -0,0 +1,567 @@
+// +build go1.8
+
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_uint64_invalid(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, ",")
+ iter.ReadUint64()
+ should.NotNil(iter.Error)
+}
+
+func Test_read_int_from_null(t *testing.T) {
+
+ type TestObject struct {
+ F1 int8
+ F2 int16
+ F3 int32
+ F4 int64
+ F5 int
+ F6 uint8
+ F7 uint16
+ F8 uint32
+ F9 uint64
+ F10 uint
+ F11 float32
+ F12 float64
+ F13 uintptr
+ }
+
+ should := require.New(t)
+ obj := TestObject{}
+ err := Unmarshal([]byte(`{
+ "f1":null,
+ "f2":null,
+ "f3":null,
+ "f4":null,
+ "f5":null,
+ "f6":null,
+ "f7":null,
+ "f8":null,
+ "f9":null,
+ "f10":null,
+ "f11":null,
+ "f12":null,
+ "f13":null
+ }`), &obj)
+ should.Nil(err)
+}
+
+func _int8(t *testing.T) {
+ inputs := []string{`127`, `-128`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ expected, err := strconv.ParseInt(input, 10, 8)
+ should.Nil(err)
+ should.Equal(int8(expected), iter.ReadInt8())
+ })
+ }
+}
+
+func Test_read_int16(t *testing.T) {
+ inputs := []string{`32767`, `-32768`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ expected, err := strconv.ParseInt(input, 10, 16)
+ should.Nil(err)
+ should.Equal(int16(expected), iter.ReadInt16())
+ })
+ }
+}
+
+func Test_read_int32(t *testing.T) {
+ inputs := []string{`1`, `12`, `123`, `1234`, `12345`, `123456`, `2147483647`, `-2147483648`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ expected, err := strconv.ParseInt(input, 10, 32)
+ should.Nil(err)
+ should.Equal(int32(expected), iter.ReadInt32())
+ })
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
+ expected, err := strconv.ParseInt(input, 10, 32)
+ should.Nil(err)
+ should.Equal(int32(expected), iter.ReadInt32())
+ })
+ }
+}
+
+func Test_read_int32_array(t *testing.T) {
+ should := require.New(t)
+ input := `[123,456,789]`
+ val := make([]int32, 0)
+ UnmarshalFromString(input, &val)
+ should.Equal(3, len(val))
+}
+
+func Test_read_int64_array(t *testing.T) {
+ should := require.New(t)
+ input := `[123,456,789]`
+ val := make([]int64, 0)
+ UnmarshalFromString(input, &val)
+ should.Equal(3, len(val))
+}
+
+func Test_read_int_overflow(t *testing.T) {
+ should := require.New(t)
+ inputArr := []string{"123451", "-123451"}
+ for _, s := range inputArr {
+ iter := ParseString(ConfigDefault, s)
+ iter.ReadInt8()
+ should.NotNil(iter.Error)
+
+ iterU := ParseString(ConfigDefault, s)
+ iterU.ReadUint8()
+ should.NotNil(iterU.Error)
+
+ }
+
+ inputArr = []string{"12345678912", "-12345678912"}
+ for _, s := range inputArr {
+ iter := ParseString(ConfigDefault, s)
+ iter.ReadInt16()
+ should.NotNil(iter.Error)
+
+ iterUint := ParseString(ConfigDefault, s)
+ iterUint.ReadUint16()
+ should.NotNil(iterUint.Error)
+ }
+
+ inputArr = []string{"3111111111", "-3111111111", "1234232323232323235678912", "-1234567892323232323212"}
+ for _, s := range inputArr {
+ iter := ParseString(ConfigDefault, s)
+ iter.ReadInt32()
+ should.NotNil(iter.Error)
+
+ iterUint := ParseString(ConfigDefault, s)
+ iterUint.ReadUint32()
+ should.NotNil(iterUint.Error)
+ }
+
+ inputArr = []string{"9223372036854775811", "-9523372036854775807", "1234232323232323235678912", "-1234567892323232323212"}
+ for _, s := range inputArr {
+ iter := ParseString(ConfigDefault, s)
+ iter.ReadInt64()
+ should.NotNil(iter.Error)
+
+ iterUint := ParseString(ConfigDefault, s)
+ iterUint.ReadUint64()
+ should.NotNil(iterUint.Error)
+ }
+}
+
+func Test_read_int64(t *testing.T) {
+ inputs := []string{`1`, `12`, `123`, `1234`, `12345`, `123456`, `9223372036854775807`, `-9223372036854775808`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ expected, err := strconv.ParseInt(input, 10, 64)
+ should.Nil(err)
+ should.Equal(expected, iter.ReadInt64())
+ })
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
+ expected, err := strconv.ParseInt(input, 10, 64)
+ should.Nil(err)
+ should.Equal(expected, iter.ReadInt64())
+ })
+ }
+}
+
+func Test_read_int64_overflow(t *testing.T) {
+ should := require.New(t)
+ input := "123456789123456789123456789123456789,"
+ iter := ParseString(ConfigDefault, input)
+ iter.ReadInt64()
+ should.NotNil(iter.Error)
+}
+
+func Test_wrap_int(t *testing.T) {
+ should := require.New(t)
+ str, err := MarshalToString(WrapInt64(100))
+ should.Nil(err)
+ should.Equal("100", str)
+}
+
+func Test_write_uint8(t *testing.T) {
+ vals := []uint8{0, 1, 11, 111, 255}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteUint8(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 3)
+ stream.WriteRaw("a")
+ stream.WriteUint8(100) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a100", buf.String())
+}
+
+func Test_write_int8(t *testing.T) {
+ vals := []int8{0, 1, -1, 99, 0x7f, -0x80}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteInt8(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4)
+ stream.WriteRaw("a")
+ stream.WriteInt8(-100) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a-100", buf.String())
+}
+
+func Test_write_uint16(t *testing.T) {
+ vals := []uint16{0, 1, 11, 111, 255, 0xfff, 0xffff}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteUint16(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 5)
+ stream.WriteRaw("a")
+ stream.WriteUint16(10000) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a10000", buf.String())
+}
+
+func Test_write_int16(t *testing.T) {
+ vals := []int16{0, 1, 11, 111, 255, 0xfff, 0x7fff, -0x8000}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteInt16(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 6)
+ stream.WriteRaw("a")
+ stream.WriteInt16(-10000) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a-10000", buf.String())
+}
+
+func Test_write_uint32(t *testing.T) {
+ vals := []uint32{0, 1, 11, 111, 255, 999999, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0xffffffff}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteUint32(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 10)
+ stream.WriteRaw("a")
+ stream.WriteUint32(0xffffffff) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a4294967295", buf.String())
+}
+
+func Test_write_int32(t *testing.T) {
+ vals := []int32{0, 1, 11, 111, 255, 999999, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0x7fffffff, -0x80000000}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteInt32(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(int64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 11)
+ stream.WriteRaw("a")
+ stream.WriteInt32(-0x7fffffff) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a-2147483647", buf.String())
+}
+
+func Test_write_uint64(t *testing.T) {
+ vals := []uint64{0, 1, 11, 111, 255, 999999, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0xffffffff,
+ 0xfffffffff, 0xffffffffff, 0xfffffffffff, 0xffffffffffff, 0xfffffffffffff, 0xffffffffffffff,
+ 0xfffffffffffffff, 0xffffffffffffffff}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteUint64(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatUint(uint64(val), 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 10)
+ stream.WriteRaw("a")
+ stream.WriteUint64(0xffffffff) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a4294967295", buf.String())
+}
+
+func Test_write_int64(t *testing.T) {
+ vals := []int64{0, 1, 11, 111, 255, 999999, 0xfff, 0xffff, 0xfffff, 0xffffff, 0xfffffff, 0xffffffff,
+ 0xfffffffff, 0xffffffffff, 0xfffffffffff, 0xffffffffffff, 0xfffffffffffff, 0xffffffffffffff,
+ 0xfffffffffffffff, 0x7fffffffffffffff, -0x8000000000000000}
+ for _, val := range vals {
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteInt64(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(val, 10), buf.String())
+ })
+ t.Run(fmt.Sprintf("%v", val), func(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(strconv.FormatInt(val, 10), buf.String())
+ })
+ }
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 10)
+ stream.WriteRaw("a")
+ stream.WriteInt64(0xffffffff) // should clear buffer
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("a4294967295", buf.String())
+}
+
+func Test_write_val_int(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal(1001)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("1001", buf.String())
+}
+
+func Test_write_val_int_ptr(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ val := 1001
+ stream.WriteVal(&val)
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("1001", buf.String())
+}
+
+func Test_json_number(t *testing.T) {
+ should := require.New(t)
+ var arr []json.Number
+ err := Unmarshal([]byte(`[1]`), &arr)
+ should.Nil(err)
+ should.Equal(json.Number("1"), arr[0])
+ str, err := MarshalToString(arr)
+ should.Nil(err)
+ should.Equal(`[1]`, str)
+}
+
+func Test_jsoniter_number(t *testing.T) {
+ should := require.New(t)
+ var arr []Number
+ err := Unmarshal([]byte(`[1]`), &arr)
+ should.Nil(err)
+ should.Equal(Number("1"), arr[0])
+ str, isNumber := CastJsonNumber(arr[0])
+ should.True(isNumber)
+ should.Equal("1", str)
+}
+
+func Test_non_numeric_as_number(t *testing.T) {
+ should := require.New(t)
+ var v1 json.Number
+ err := Unmarshal([]byte(`"500"`), &v1)
+ should.Nil(err)
+ should.Equal("500", string(v1))
+ var v2 Number
+ err = Unmarshal([]byte(`"500"`), &v2)
+ should.Nil(err)
+ should.Equal("500", string(v2))
+}
+
+func Test_null_as_number(t *testing.T) {
+ should := require.New(t)
+ var v1 json.Number
+ err := json.Unmarshal([]byte(`null`), &v1)
+ should.Nil(err)
+ should.Equal("", string(v1))
+ var v2 Number
+ err = Unmarshal([]byte(`null`), &v2)
+ should.Nil(err)
+ should.Equal("", string(v2))
+}
+
+func Test_float_as_int(t *testing.T) {
+ should := require.New(t)
+ var i int
+ should.NotNil(Unmarshal([]byte(`1.1`), &i))
+}
+
+func Benchmark_jsoniter_encode_int(b *testing.B) {
+ stream := NewStream(ConfigDefault, ioutil.Discard, 64)
+ for n := 0; n < b.N; n++ {
+ stream.n = 0
+ stream.WriteUint64(0xffffffff)
+ }
+}
+
+func Benchmark_itoa(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ strconv.FormatInt(0xffffffff, 10)
+ }
+}
+
+func Benchmark_jsoniter_int(b *testing.B) {
+ iter := NewIterator(ConfigDefault)
+ input := []byte(`100`)
+ for n := 0; n < b.N; n++ {
+ iter.ResetBytes(input)
+ iter.ReadInt64()
+ }
+}
+
+func Benchmark_json_int(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ result := int64(0)
+ json.Unmarshal([]byte(`-100`), &result)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_interface_test.go b/vendor/github.com/json-iterator/go/jsoniter_interface_test.go
new file mode 100644
index 000000000..869429a12
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_interface_test.go
@@ -0,0 +1,576 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "fmt"
+ "testing"
+ "unsafe"
+
+ "github.com/stretchr/testify/require"
+ "reflect"
+)
+
+func Test_write_empty_interface_via_placeholder(t *testing.T) {
+ fmt.Println(^uint(0) >> 1)
+ should := require.New(t)
+ m := map[uint32]interface{}{1: "hello"}
+ inf := reflect.ValueOf(m).MapIndex(reflect.ValueOf(uint32(1))).Interface()
+ encoder := &placeholderEncoder{
+ cfg: ConfigFastest.(*frozenConfig),
+ cacheKey: reflect.TypeOf(m).Elem(),
+ }
+ stream := ConfigFastest.BorrowStream(nil)
+ encoderOfType(ConfigFastest.(*frozenConfig), reflect.TypeOf(m).Elem())
+ encoder.EncodeInterface(inf, stream)
+ should.Equal(`"hello"`, string(stream.Buffer()))
+}
+
+func Test_write_array_of_interface(t *testing.T) {
+ should := require.New(t)
+ array := []interface{}{"hello"}
+ str, err := MarshalToString(array)
+ should.Nil(err)
+ should.Equal(`["hello"]`, str)
+}
+
+func Test_write_map_of_interface(t *testing.T) {
+ should := require.New(t)
+ val := map[string]interface{}{"hello": "world"}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"hello":"world"}`, str)
+}
+
+func Test_write_map_of_interface_in_struct(t *testing.T) {
+ type TestObject struct {
+ Field map[string]interface{}
+ }
+ should := require.New(t)
+ val := TestObject{map[string]interface{}{"hello": "world"}}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"Field":{"hello":"world"}}`, str)
+}
+
+func Test_write_map_of_interface_in_struct_with_two_fields(t *testing.T) {
+ type TestObject struct {
+ Field map[string]interface{}
+ Field2 string
+ }
+ should := require.New(t)
+ val := TestObject{map[string]interface{}{"hello": "world"}, ""}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Contains(str, `"Field":{"hello":"world"}`)
+}
+
+type MyInterface interface {
+ Hello() string
+}
+
+type MyString string
+
+func (ms MyString) Hello() string {
+ return string(ms)
+}
+
+func Test_write_map_of_custom_interface(t *testing.T) {
+ should := require.New(t)
+ myStr := MyString("world")
+ should.Equal("world", myStr.Hello())
+ val := map[string]MyInterface{"hello": myStr}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"hello":"world"}`, str)
+}
+
+func Test_write_interface(t *testing.T) {
+ should := require.New(t)
+ var val interface{}
+ val = "hello"
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`"hello"`, str)
+}
+
+func Test_read_interface(t *testing.T) {
+ should := require.New(t)
+ var val interface{}
+ err := UnmarshalFromString(`"hello"`, &val)
+ should.Nil(err)
+ should.Equal("hello", val)
+ err = UnmarshalFromString(`1e1`, &val)
+ should.Nil(err)
+ should.Equal(float64(10), val)
+ err = UnmarshalFromString(`1.0e1`, &val)
+ should.Nil(err)
+ should.Equal(float64(10), val)
+ err = json.Unmarshal([]byte(`1.0e1`), &val)
+ should.Nil(err)
+ should.Equal(float64(10), val)
+}
+
+func Test_read_custom_interface(t *testing.T) {
+ should := require.New(t)
+ var val MyInterface
+ RegisterTypeDecoderFunc("jsoniter.MyInterface", func(ptr unsafe.Pointer, iter *Iterator) {
+ *((*MyInterface)(ptr)) = MyString(iter.ReadString())
+ })
+ err := UnmarshalFromString(`"hello"`, &val)
+ should.Nil(err)
+ should.Equal("hello", val.Hello())
+}
+
+func Test_decode_object_contain_empty_interface(t *testing.T) {
+ type TestObject struct {
+ Field interface{}
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ obj.Field = 1024
+ should.Nil(UnmarshalFromString(`{"Field": "hello"}`, &obj))
+ should.Equal("hello", obj.Field)
+}
+
+func Test_decode_object_contain_non_empty_interface(t *testing.T) {
+ type TestObject struct {
+ Field MyInterface
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ obj.Field = MyString("abc")
+ should.Nil(UnmarshalFromString(`{"Field": "hello"}`, &obj))
+ should.Equal(MyString("hello"), obj.Field)
+}
+
+func Test_encode_object_contain_empty_interface(t *testing.T) {
+ type TestObject struct {
+ Field interface{}
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ obj.Field = 1024
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":1024}`, str)
+}
+
+func Test_encode_object_contain_non_empty_interface(t *testing.T) {
+ type TestObject struct {
+ Field MyInterface
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ obj.Field = MyString("hello")
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Field":"hello"}`, str)
+}
+
+func Test_nil_non_empty_interface(t *testing.T) {
+ ConfigDefault.(*frozenConfig).cleanEncoders()
+ ConfigDefault.(*frozenConfig).cleanDecoders()
+ type TestObject struct {
+ Field []MyInterface
+ }
+ should := require.New(t)
+ obj := TestObject{}
+ b := []byte(`{"Field":["AAA"]}`)
+ should.NotNil(json.Unmarshal(b, &obj))
+ should.NotNil(Unmarshal(b, &obj))
+}
+
+func Test_read_large_number_as_interface(t *testing.T) {
+ should := require.New(t)
+ var val interface{}
+ err := Config{UseNumber: true}.Froze().UnmarshalFromString(`123456789123456789123456789`, &val)
+ should.Nil(err)
+ output, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`123456789123456789123456789`, output)
+}
+
+func Test_nested_one_field_struct(t *testing.T) {
+ should := require.New(t)
+ type YetYetAnotherObject struct {
+ Field string
+ }
+ type YetAnotherObject struct {
+ Field *YetYetAnotherObject
+ }
+ type AnotherObject struct {
+ Field *YetAnotherObject
+ }
+ type TestObject struct {
+ Me *AnotherObject
+ }
+ obj := TestObject{&AnotherObject{&YetAnotherObject{&YetYetAnotherObject{"abc"}}}}
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
+ str, err = MarshalToString(&obj)
+ should.Nil(err)
+ should.Equal(`{"Me":{"Field":{"Field":{"Field":"abc"}}}}`, str)
+}
+
+func Test_struct_with_embedded_ptr_with_tag(t *testing.T) {
+ type O1 struct {
+ O1F string
+ }
+
+ type Option struct {
+ O1 *O1
+ }
+
+ type T struct {
+ Option `json:","`
+ }
+ var obj T
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"O1":null}`, output)
+}
+
+func Test_struct_with_one_nil(t *testing.T) {
+ type TestObject struct {
+ F *float64
+ }
+ var obj TestObject
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"F":null}`, output)
+}
+
+func Test_struct_with_one_nil_embedded(t *testing.T) {
+ type Parent struct {
+ Field1 string
+ Field2 string
+ }
+ type TestObject struct {
+ *Parent
+ }
+ obj := TestObject{}
+ should := require.New(t)
+ bytes, err := json.Marshal(obj)
+ should.Nil(err)
+ should.Equal("{}", string(bytes))
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{}`, output)
+}
+
+func Test_struct_with_not_nil_embedded(t *testing.T) {
+ type Parent struct {
+ Field0 string
+ Field1 []string
+ Field2 map[string]interface{}
+ }
+ type TestObject struct {
+ *Parent
+ }
+ should := require.New(t)
+ var obj TestObject
+ err := UnmarshalFromString(`{"Field0":"1","Field1":null,"Field2":{"K":"V"}}`, &obj)
+ should.Nil(err)
+ should.Nil(obj.Field1)
+ should.Equal(map[string]interface{}{"K": "V"}, obj.Field2)
+ should.Equal("1", obj.Field0)
+}
+
+func Test_array_with_one_nil_ptr(t *testing.T) {
+ obj := [1]*float64{nil}
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`[null]`, output)
+}
+
+func Test_array_with_one_not_nil_ptr(t *testing.T) {
+ two := float64(2)
+ obj := [1]*float64{&two}
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`[2]`, output)
+}
+
+func Test_embedded_array_with_one_nil(t *testing.T) {
+ type TestObject struct {
+ Field1 int
+ Field2 [1]*float64
+ }
+ var obj TestObject
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Contains(output, `"Field2":[null]`)
+}
+
+func Test_array_with_nothing(t *testing.T) {
+ var obj [2]*float64
+ should := require.New(t)
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`[null,null]`, output)
+}
+
+func Test_unmarshal_ptr_to_interface(t *testing.T) {
+ type TestData struct {
+ Name string `json:"name"`
+ }
+ should := require.New(t)
+ var obj interface{} = &TestData{}
+ err := json.Unmarshal([]byte(`{"name":"value"}`), &obj)
+ should.Nil(err)
+ should.Equal("&{value}", fmt.Sprintf("%v", obj))
+ obj = interface{}(&TestData{})
+ err = Unmarshal([]byte(`{"name":"value"}`), &obj)
+ should.Nil(err)
+ should.Equal("&{value}", fmt.Sprintf("%v", obj))
+}
+
+func Test_nil_out_null_interface(t *testing.T) {
+ type TestData struct {
+ Field interface{} `json:"field"`
+ }
+ should := require.New(t)
+
+ var boolVar bool
+ obj := TestData{
+ Field: &boolVar,
+ }
+
+ data1 := []byte(`{"field": true}`)
+
+ err := Unmarshal(data1, &obj)
+ should.NoError(err)
+ should.Equal(true, *(obj.Field.(*bool)))
+
+ data2 := []byte(`{"field": null}`)
+
+ err = Unmarshal(data2, &obj)
+ should.NoError(err)
+ should.Equal(nil, obj.Field)
+
+ // Checking stdlib behavior matches.
+ obj2 := TestData{
+ Field: &boolVar,
+ }
+
+ err = json.Unmarshal(data1, &obj2)
+ should.NoError(err)
+ should.Equal(true, *(obj2.Field.(*bool)))
+
+ err = json.Unmarshal(data2, &obj2)
+ should.NoError(err)
+ should.Equal(nil, obj2.Field)
+}
+
+func Test_omitempty_nil_interface(t *testing.T) {
+ type TestData struct {
+ Field interface{} `json:"field,omitempty"`
+ }
+ should := require.New(t)
+
+ obj := TestData{
+ Field: nil,
+ }
+
+ js, err := json.Marshal(obj)
+ should.NoError(err)
+ should.Equal("{}", string(js))
+
+ str, err := MarshalToString(obj)
+ should.NoError(err)
+ should.Equal(string(js), str)
+}
+
+func Test_omitempty_nil_nonempty_interface(t *testing.T) {
+ type TestData struct {
+ Field MyInterface `json:"field,omitempty"`
+ }
+ should := require.New(t)
+
+ obj := TestData{
+ Field: nil,
+ }
+
+ js, err := json.Marshal(obj)
+ should.NoError(err)
+ should.Equal("{}", string(js))
+
+ str, err := MarshalToString(obj)
+ should.NoError(err)
+ should.Equal(string(js), str)
+
+ obj.Field = MyString("hello")
+ err = UnmarshalFromString(`{"field":null}`, &obj)
+ should.NoError(err)
+ should.Equal(nil, obj.Field)
+}
+
+func Test_marshal_nil_marshaler_interface(t *testing.T) {
+ type TestData struct {
+ Field json.Marshaler `json:"field"`
+ }
+ should := require.New(t)
+
+ obj := TestData{
+ Field: nil,
+ }
+
+ js, err := json.Marshal(obj)
+ should.NoError(err)
+ should.Equal(`{"field":null}`, string(js))
+
+ str, err := MarshalToString(obj)
+ should.NoError(err)
+ should.Equal(string(js), str)
+}
+
+func Test_marshal_nil_nonempty_interface(t *testing.T) {
+ type TestData struct {
+ Field MyInterface `json:"field"`
+ }
+ should := require.New(t)
+
+ obj := TestData{
+ Field: nil,
+ }
+
+ js, err := json.Marshal(obj)
+ should.NoError(err)
+ should.Equal(`{"field":null}`, string(js))
+
+ str, err := MarshalToString(obj)
+ should.NoError(err)
+ should.Equal(string(js), str)
+
+ obj.Field = MyString("hello")
+ err = Unmarshal(js, &obj)
+ should.NoError(err)
+ should.Equal(nil, obj.Field)
+}
+
+func Test_overwrite_interface_ptr_value_with_nil(t *testing.T) {
+ type Wrapper struct {
+ Payload interface{} `json:"payload,omitempty"`
+ }
+ type Payload struct {
+ Value int `json:"val,omitempty"`
+ }
+
+ should := require.New(t)
+
+ payload := &Payload{}
+ wrapper := &Wrapper{
+ Payload: &payload,
+ }
+
+ err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(&payload, wrapper.Payload)
+ should.Equal(42, (*(wrapper.Payload.(**Payload))).Value)
+
+ err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(&payload, wrapper.Payload)
+ should.Equal((*Payload)(nil), payload)
+
+ payload = &Payload{}
+ wrapper = &Wrapper{
+ Payload: &payload,
+ }
+
+ err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(&payload, wrapper.Payload)
+ should.Equal(42, (*(wrapper.Payload.(**Payload))).Value)
+
+ err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(&payload, wrapper.Payload)
+ should.Equal((*Payload)(nil), payload)
+}
+
+func Test_overwrite_interface_value_with_nil(t *testing.T) {
+ type Wrapper struct {
+ Payload interface{} `json:"payload,omitempty"`
+ }
+ type Payload struct {
+ Value int `json:"val,omitempty"`
+ }
+
+ should := require.New(t)
+
+ payload := &Payload{}
+ wrapper := &Wrapper{
+ Payload: payload,
+ }
+
+ err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
+
+ err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(nil, wrapper.Payload)
+ should.Equal(42, payload.Value)
+
+ payload = &Payload{}
+ wrapper = &Wrapper{
+ Payload: payload,
+ }
+
+ err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(42, (*(wrapper.Payload.(*Payload))).Value)
+
+ err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Equal(nil, err)
+ should.Equal(nil, wrapper.Payload)
+ should.Equal(42, payload.Value)
+}
+
+func Test_unmarshal_into_nil(t *testing.T) {
+ type Payload struct {
+ Value int `json:"val,omitempty"`
+ }
+ type Wrapper struct {
+ Payload interface{} `json:"payload,omitempty"`
+ }
+
+ should := require.New(t)
+
+ var payload *Payload
+ wrapper := &Wrapper{
+ Payload: payload,
+ }
+
+ err := json.Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Nil(err)
+ should.NotNil(wrapper.Payload)
+ should.Nil(payload)
+
+ err = json.Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Nil(err)
+ should.Nil(wrapper.Payload)
+ should.Nil(payload)
+
+ payload = nil
+ wrapper = &Wrapper{
+ Payload: payload,
+ }
+
+ err = Unmarshal([]byte(`{"payload": {"val": 42}}`), &wrapper)
+ should.Nil(err)
+ should.NotNil(wrapper.Payload)
+ should.Nil(payload)
+
+ err = Unmarshal([]byte(`{"payload": null}`), &wrapper)
+ should.Nil(err)
+ should.Nil(wrapper.Payload)
+ should.Nil(payload)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_invalid_test.go b/vendor/github.com/json-iterator/go/jsoniter_invalid_test.go
new file mode 100644
index 000000000..69be4dc53
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_invalid_test.go
@@ -0,0 +1,138 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "github.com/stretchr/testify/require"
+ "io"
+ "testing"
+)
+
+func Test_missing_object_end(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Metric string `json:"metric"`
+ Tags map[string]interface{} `json:"tags"`
+ }
+ obj := TestObject{}
+ should.NotNil(UnmarshalFromString(`{"metric": "sys.777","tags": {"a":"123"}`, &obj))
+}
+
+func Test_missing_array_end(t *testing.T) {
+ should := require.New(t)
+ should.NotNil(UnmarshalFromString(`[1,2,3`, &[]int{}))
+}
+
+func Test_invalid_any(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("[]"))
+ should.Equal(InvalidValue, any.Get(0.3).ValueType())
+ // is nil correct ?
+ should.Equal(nil, any.Get(0.3).GetInterface())
+
+ any = any.Get(0.3)
+ should.Equal(false, any.ToBool())
+ should.Equal(int(0), any.ToInt())
+ should.Equal(int32(0), any.ToInt32())
+ should.Equal(int64(0), any.ToInt64())
+ should.Equal(uint(0), any.ToUint())
+ should.Equal(uint32(0), any.ToUint32())
+ should.Equal(uint64(0), any.ToUint64())
+ should.Equal(float32(0), any.ToFloat32())
+ should.Equal(float64(0), any.ToFloat64())
+ should.Equal("", any.ToString())
+
+ should.Equal(InvalidValue, any.Get(0.1).Get(1).ValueType())
+}
+
+func Test_invalid_struct_input(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct{}
+ input := []byte{54, 141, 30}
+ obj := TestObject{}
+ should.NotNil(Unmarshal(input, &obj))
+}
+
+func Test_invalid_slice_input(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct{}
+ input := []byte{93}
+ obj := []string{}
+ should.NotNil(Unmarshal(input, &obj))
+}
+
+func Test_invalid_array_input(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct{}
+ input := []byte{93}
+ obj := [0]string{}
+ should.NotNil(Unmarshal(input, &obj))
+}
+
+func Test_invalid_float(t *testing.T) {
+ inputs := []string{
+ `1.e1`, // dot without following digit
+ `1.`, // dot can not be the last char
+ ``, // empty number
+ `01`, // extra leading zero
+ `-`, // negative without digit
+ `--`, // double negative
+ `--2`, // double negative
+ }
+ for _, input := range inputs {
+ t.Run(input, func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input+",")
+ iter.Skip()
+ should.NotEqual(io.EOF, iter.Error)
+ should.NotNil(iter.Error)
+ v := float64(0)
+ should.NotNil(json.Unmarshal([]byte(input), &v))
+ iter = ParseString(ConfigDefault, input+",")
+ iter.ReadFloat64()
+ should.NotEqual(io.EOF, iter.Error)
+ should.NotNil(iter.Error)
+ iter = ParseString(ConfigDefault, input+",")
+ iter.ReadFloat32()
+ should.NotEqual(io.EOF, iter.Error)
+ should.NotNil(iter.Error)
+ })
+ }
+}
+
+func Test_chan(t *testing.T) {
+ t.Skip("do not support chan")
+
+ type TestObject struct {
+ MyChan chan bool
+ MyField int
+ }
+
+ should := require.New(t)
+ obj := TestObject{}
+ str, err := json.Marshal(obj)
+ should.Nil(err)
+ should.Equal(``, str)
+}
+
+func Test_invalid_number(t *testing.T) {
+ type Message struct {
+ Number int `json:"number"`
+ }
+ obj := Message{}
+ decoder := ConfigCompatibleWithStandardLibrary.NewDecoder(bytes.NewBufferString(`{"number":"5"}`))
+ err := decoder.Decode(&obj)
+ invalidStr := err.Error()
+ result, err := ConfigCompatibleWithStandardLibrary.Marshal(invalidStr)
+ should := require.New(t)
+ should.Nil(err)
+ result2, err := json.Marshal(invalidStr)
+ should.Nil(err)
+ should.Equal(string(result2), string(result))
+}
+
+func Test_valid(t *testing.T) {
+ should := require.New(t)
+ should.True(Valid([]byte(`{}`)))
+ should.False(Valid([]byte(`{`)))
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_io_test.go b/vendor/github.com/json-iterator/go/jsoniter_io_test.go
new file mode 100644
index 000000000..5046ccc40
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_io_test.go
@@ -0,0 +1,65 @@
+package jsoniter
+
+import (
+ "bytes"
+ "github.com/stretchr/testify/require"
+ "io"
+ "testing"
+)
+
+func Test_read_by_one(t *testing.T) {
+ iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 1)
+ b := iter.readByte()
+ if iter.Error != nil {
+ t.Fatal(iter.Error)
+ }
+ if b != 'a' {
+ t.Fatal(b)
+ }
+ iter.unreadByte()
+ if iter.Error != nil {
+ t.Fatal(iter.Error)
+ }
+ b = iter.readByte()
+ if iter.Error != nil {
+ t.Fatal(iter.Error)
+ }
+ if b != 'a' {
+ t.Fatal(b)
+ }
+}
+
+func Test_read_by_two(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 2)
+ b := iter.readByte()
+ should.Nil(iter.Error)
+ should.Equal(byte('a'), b)
+ b = iter.readByte()
+ should.Nil(iter.Error)
+ should.Equal(byte('b'), b)
+ iter.unreadByte()
+ should.Nil(iter.Error)
+ iter.unreadByte()
+ should.Nil(iter.Error)
+ b = iter.readByte()
+ should.Nil(iter.Error)
+ should.Equal(byte('a'), b)
+}
+
+func Test_read_until_eof(t *testing.T) {
+ iter := Parse(ConfigDefault, bytes.NewBufferString("abc"), 2)
+ iter.readByte()
+ iter.readByte()
+ b := iter.readByte()
+ if iter.Error != nil {
+ t.Fatal(iter.Error)
+ }
+ if b != 'c' {
+ t.Fatal(b)
+ }
+ iter.readByte()
+ if iter.Error != io.EOF {
+ t.Fatal(iter.Error)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_iterator_test.go b/vendor/github.com/json-iterator/go/jsoniter_iterator_test.go
new file mode 100644
index 000000000..951574d8d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_iterator_test.go
@@ -0,0 +1,66 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "strconv"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_bad_case(t *testing.T) {
+ // field := *(*string)(unsafe.Pointer(&str))
+ // caused this issue
+ iter := Parse(ConfigDefault, bytes.NewBufferString(`{"Info":{"InfoHash":"4lzhP/fwlVLRgalC31YbfBSiqwo=","InfoHashstr":"E25CE13FF7F09552D181A942DF561B7C14A2AB0A","AnnounceList":["http://tracker.nwps.ws:6969/announce","http://tracker.nwps.ws:6969/announce","http://tracker.winglai.com/announce","http://fr33dom.h33t.com:3310/announce","http://exodus.desync.com:6969/announce","http://torrent.gresille.org/announce","http://tracker.trackerfix.com/announce","udp://tracker.btzoo.eu:80/announce","http://tracker.windsormetalbattery.com/announce","udp://10.rarbg.me:80/announce","udp://ipv4.tracker.harry.lu:80/announce","udp://tracker.ilibr.org:6969/announce","udp://tracker.zond.org:80/announce","http://torrent-tracker.ru/announce.php","http://bigfoot1942.sektori.org:6969/announce","http://tracker.best-torrents.net:6969/announce","http://announce.torrentsmd.com:6969/announce","udp://tracker.token.ro:80/announce","udp://tracker.coppersurfer.tk:80","http://tracker.thepiratebay.org/announce","udp://9.rarbg.com:2710/announce","udp://open.demonii.com:1337/announce","udp://tracker.ccc.de:80/announce","udp://tracker.istole.it:80/announce","udp://tracker.publicbt.com:80/announce","udp://tracker.openbittorrent.com:80/announce","udp://tracker.istole.it:80/announce","http://tracker.istole.it/announce","udp://tracker.publicbt.com:80/announce","http://tracker.publicbt.com/announce","udp://open.demonii.com:1337/announce"],"Length":2434793890,"PieceSize":524288,"PieceNum":4645},"InfoHashstr":"E25CE13FF7F09552D181A942DF561B7C14A2AB0A","SectionNum":32,"PieceNum":4645,"PieceSize":16384,"Finished":false,"SparseSize":104857600,"Bit":[{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="},{"Size":4645,"EndIndex":580,"EndMask":248,"Good":0,"Flush":false,"Bit":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="}]}`), 4096)
+ count := 0
+ for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
+ if field == "Bit" {
+ for iter.ReadArray() {
+ for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
+ if field == "Bit" {
+ iter.ReadStringAsSlice()
+ } else {
+ if field != "Size" && field != "EndIndex" && field != "EndMask" && field != "Good" && field != "Flush" {
+ t.Fatal(field)
+ }
+ iter.Skip()
+ }
+ }
+ count++
+ }
+ } else {
+ iter.Skip()
+ }
+ }
+ if count != 32 {
+ t.Fatal(count)
+ }
+}
+
+func Test_iterator_use_number(t *testing.T) {
+ // Test UseNumber with iterator Read()
+ inputs := []string{`2147483647`, `-2147483648`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(Config{UseNumber: true}.Froze(), input)
+ expected := json.Number(input)
+ should.Equal(expected, iter.Read())
+ })
+ }
+}
+
+func Test_iterator_without_number(t *testing.T) {
+ inputs := []string{`2147483647`, `-2147483648`}
+ for _, input := range inputs {
+ t.Run(fmt.Sprintf("%v", input), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ expected, err := strconv.ParseInt(input, 10, 32)
+ should.Nil(err)
+ should.Equal(float64(expected), iter.Read())
+ })
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_large_file_test.go b/vendor/github.com/json-iterator/go/jsoniter_large_file_test.go
new file mode 100644
index 000000000..29eb58b16
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_large_file_test.go
@@ -0,0 +1,157 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io/ioutil"
+ "os"
+ "testing"
+)
+
+//func Test_large_file(t *testing.T) {
+// file, err := os.Open("/tmp/large-file.json")
+// if err != nil {
+// t.Fatal(err)
+// }
+// iter := Parse(file, 4096)
+// count := 0
+// for iter.ReadArray() {
+// iter.Skip()
+// count++
+// }
+// if count != 11351 {
+// t.Fatal(count)
+// }
+//}
+
+func init() {
+ ioutil.WriteFile("/tmp/large-file.json", []byte(`[{
+ "person": {
+ "id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
+ "name": {
+ "fullName": "Leonid Bugaev",
+ "givenName": "Leonid",
+ "familyName": "Bugaev"
+ },
+ "email": "leonsbox@gmail.com",
+ "gender": "male",
+ "location": "Saint Petersburg, Saint Petersburg, RU",
+ "geo": {
+ "city": "Saint Petersburg",
+ "state": "Saint Petersburg",
+ "country": "Russia",
+ "lat": 59.9342802,
+ "lng": 30.3350986
+ },
+ "bio": "Senior engineer at Granify.com",
+ "site": "http://flickfaver.com",
+ "avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
+ "employment": {
+ "name": "www.latera.ru",
+ "title": "Software Engineer",
+ "domain": "gmail.com"
+ },
+ "facebook": {
+ "handle": "leonid.bugaev"
+ },
+ "github": {
+ "handle": "buger",
+ "id": 14009,
+ "avatar": "https://avatars.githubusercontent.com/u/14009?v=3",
+ "company": "Granify",
+ "blog": "http://leonsbox.com",
+ "followers": 95,
+ "following": 10
+ },
+ "twitter": {
+ "handle": "flickfaver",
+ "id": 77004410,
+ "bio": null,
+ "followers": 2,
+ "following": 1,
+ "statuses": 5,
+ "favorites": 0,
+ "location": "",
+ "site": "http://flickfaver.com",
+ "avatar": null
+ },
+ "linkedin": {
+ "handle": "in/leonidbugaev"
+ },
+ "googleplus": {
+ "handle": null
+ },
+ "angellist": {
+ "handle": "leonid-bugaev",
+ "id": 61541,
+ "bio": "Senior engineer at Granify.com",
+ "blog": "http://buger.github.com",
+ "site": "http://buger.github.com",
+ "followers": 41,
+ "avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390"
+ },
+ "klout": {
+ "handle": null,
+ "score": null
+ },
+ "foursquare": {
+ "handle": null
+ },
+ "aboutme": {
+ "handle": "leonid.bugaev",
+ "bio": null,
+ "avatar": null
+ },
+ "gravatar": {
+ "handle": "buger",
+ "urls": [
+ ],
+ "avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
+ "avatars": [
+ {
+ "url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
+ "type": "thumbnail"
+ }
+ ]
+ },
+ "fuzzy": false
+ },
+ "company": "hello"
+}]`), 0666)
+}
+
+/*
+200000 8886 ns/op 4336 B/op 6 allocs/op
+50000 34244 ns/op 6744 B/op 14 allocs/op
+*/
+func Benchmark_jsoniter_large_file(b *testing.B) {
+ b.ReportAllocs()
+ for n := 0; n < b.N; n++ {
+ file, _ := os.Open("/tmp/large-file.json")
+ iter := Parse(ConfigDefault, file, 4096)
+ count := 0
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ // Skip() is strict by default, use --tags jsoniter-sloppy to skip without validation
+ iter.Skip()
+ count++
+ return true
+ })
+ file.Close()
+ if iter.Error != nil {
+ b.Error(iter.Error)
+ }
+ }
+}
+
+func Benchmark_json_large_file(b *testing.B) {
+ b.ReportAllocs()
+ for n := 0; n < b.N; n++ {
+ file, _ := os.Open("/tmp/large-file.json")
+ bytes, _ := ioutil.ReadAll(file)
+ file.Close()
+ result := []struct{}{}
+ err := json.Unmarshal(bytes, &result)
+ if err != nil {
+ b.Error(err)
+ }
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_map_test.go b/vendor/github.com/json-iterator/go/jsoniter_map_test.go
new file mode 100644
index 000000000..0865428f5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_map_test.go
@@ -0,0 +1,160 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "math/big"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+ "strings"
+)
+
+func Test_read_map(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `{"hello": "world"}`)
+ m := map[string]string{"1": "2"}
+ iter.ReadVal(&m)
+ copy(iter.buf, []byte{0, 0, 0, 0, 0, 0})
+ should.Equal(map[string]string{"1": "2", "hello": "world"}, m)
+}
+
+func Test_read_map_of_interface(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `{"hello": "world"}`)
+ m := map[string]interface{}{"1": "2"}
+ iter.ReadVal(&m)
+ should.Equal(map[string]interface{}{"1": "2", "hello": "world"}, m)
+ iter = ParseString(ConfigDefault, `{"hello": "world"}`)
+ should.Equal(map[string]interface{}{"hello": "world"}, iter.Read())
+}
+
+func Test_map_wrapper_any_get_all(t *testing.T) {
+ should := require.New(t)
+ any := Wrap(map[string][]int{"Field1": {1, 2}})
+ should.Equal(`{"Field1":1}`, any.Get('*', 0).ToString())
+ should.Contains(any.Keys(), "Field1")
+
+ // map write to
+ stream := NewStream(ConfigDefault, nil, 0)
+ any.WriteTo(stream)
+ // TODO cannot pass
+ //should.Equal(string(stream.buf), "")
+}
+
+func Test_write_val_map(t *testing.T) {
+ should := require.New(t)
+ val := map[string]string{"1": "2"}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"1":"2"}`, str)
+}
+
+func Test_slice_of_map(t *testing.T) {
+ should := require.New(t)
+ val := []map[string]string{{"1": "2"}}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`[{"1":"2"}]`, str)
+ val = []map[string]string{}
+ should.Nil(UnmarshalFromString(str, &val))
+ should.Equal("2", val[0]["1"])
+}
+
+func Test_encode_int_key_map(t *testing.T) {
+ should := require.New(t)
+ val := map[int]string{1: "2"}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"1":"2"}`, str)
+}
+
+func Test_decode_int_key_map(t *testing.T) {
+ should := require.New(t)
+ var val map[int]string
+ should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
+ should.Equal(map[int]string{1: "2"}, val)
+}
+
+func Test_encode_TextMarshaler_key_map(t *testing.T) {
+ should := require.New(t)
+ f, _, _ := big.ParseFloat("1", 10, 64, big.ToZero)
+ val := map[*big.Float]string{f: "2"}
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"1":"2"}`, str)
+}
+
+func Test_decode_TextMarshaler_key_map(t *testing.T) {
+ should := require.New(t)
+ var val map[*big.Float]string
+ should.Nil(UnmarshalFromString(`{"1":"2"}`, &val))
+ str, err := MarshalToString(val)
+ should.Nil(err)
+ should.Equal(`{"1":"2"}`, str)
+}
+
+func Test_map_key_with_escaped_char(t *testing.T) {
+ type Ttest struct {
+ Map map[string]string
+ }
+ var jsonBytes = []byte(`
+ {
+ "Map":{
+ "k\"ey": "val"
+ }
+ }`)
+ should := require.New(t)
+ {
+ var obj Ttest
+ should.Nil(json.Unmarshal(jsonBytes, &obj))
+ should.Equal(map[string]string{"k\"ey": "val"}, obj.Map)
+ }
+ {
+ var obj Ttest
+ should.Nil(Unmarshal(jsonBytes, &obj))
+ should.Equal(map[string]string{"k\"ey": "val"}, obj.Map)
+ }
+}
+
+func Test_encode_map_with_sorted_keys(t *testing.T) {
+ should := require.New(t)
+ m := map[string]interface{}{
+ "3": 3,
+ "1": 1,
+ "2": 2,
+ }
+ bytes, err := json.Marshal(m)
+ should.Nil(err)
+ output, err := ConfigCompatibleWithStandardLibrary.MarshalToString(m)
+ should.Nil(err)
+ should.Equal(string(bytes), output)
+}
+
+func Test_encode_map_uint_keys(t *testing.T) {
+ should := require.New(t)
+ m := map[uint64]interface{}{
+ uint64(1): "a",
+ uint64(2): "a",
+ uint64(4): "a",
+ }
+
+ bytes, err := json.Marshal(m)
+ should.Nil(err)
+
+ output, err := ConfigCompatibleWithStandardLibrary.MarshalToString(m)
+ should.Nil(err)
+ should.Equal(string(bytes), output)
+}
+
+func Test_read_map_with_reader(t *testing.T) {
+ should := require.New(t)
+ input := `{"branch":"beta","change_log":"add the rows{10}","channel":"fros","create_time":"2017-06-13 16:39:08","firmware_list":"","md5":"80dee2bf7305bcf179582088e29fd7b9","note":{"CoreServices":{"md5":"d26975c0a8c7369f70ed699f2855cc2e","package_name":"CoreServices","version_code":"76","version_name":"1.0.76"},"FrDaemon":{"md5":"6b1f0626673200bc2157422cd2103f5d","package_name":"FrDaemon","version_code":"390","version_name":"1.0.390"},"FrGallery":{"md5":"90d767f0f31bcd3c1d27281ec979ba65","package_name":"FrGallery","version_code":"349","version_name":"1.0.349"},"FrLocal":{"md5":"f15a215b2c070a80a01f07bde4f219eb","package_name":"FrLocal","version_code":"791","version_name":"1.0.791"}},"pack_region_urls":{"CN":"https://s3.cn-north-1.amazonaws.com.cn/xxx-os/ttt_xxx_android_1.5.3.344.393.zip","default":"http://192.168.8.78/ttt_xxx_android_1.5.3.344.393.zip","local":"http://192.168.8.78/ttt_xxx_android_1.5.3.344.393.zip"},"pack_version":"1.5.3.344.393","pack_version_code":393,"region":"all","release_flag":0,"revision":62,"size":38966875,"status":3}`
+ reader := strings.NewReader(input)
+ decoder := ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
+ m1 := map[string]interface{}{}
+ should.Nil(decoder.Decode(&m1))
+ m2 := map[string]interface{}{}
+ should.Nil(json.Unmarshal([]byte(input), &m2))
+ should.Equal(m2, m1)
+ should.Equal("1.0.76", m1["note"].(map[string]interface{})["CoreServices"].(map[string]interface{})["version_name"])
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_must_be_valid_test.go b/vendor/github.com/json-iterator/go/jsoniter_must_be_valid_test.go
new file mode 100644
index 000000000..01a91c90f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_must_be_valid_test.go
@@ -0,0 +1,71 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+// if must be valid is useless, just drop this test
+func Test_must_be_valid(t *testing.T) {
+ should := require.New(t)
+ any := Get([]byte("123"))
+ should.Equal(any.MustBeValid().ToInt(), 123)
+
+ any = Wrap(int8(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(int16(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(int32(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(int64(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(uint(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(uint8(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(uint16(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(uint32(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(uint64(10))
+ should.Equal(any.MustBeValid().ToInt(), 10)
+
+ any = Wrap(float32(10))
+ should.Equal(any.MustBeValid().ToFloat64(), float64(10))
+
+ any = Wrap(float64(10))
+ should.Equal(any.MustBeValid().ToFloat64(), float64(10))
+
+ any = Wrap(true)
+ should.Equal(any.MustBeValid().ToFloat64(), float64(1))
+
+ any = Wrap(false)
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap(nil)
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap(struct{ age int }{age: 1})
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap(map[string]interface{}{"abc": 1})
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap("abc")
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap([]int{})
+ should.Equal(any.MustBeValid().ToFloat64(), float64(0))
+
+ any = Wrap([]int{1, 2})
+ should.Equal(any.MustBeValid().ToFloat64(), float64(1))
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_nested_test.go b/vendor/github.com/json-iterator/go/jsoniter_nested_test.go
new file mode 100644
index 000000000..29f8cee84
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_nested_test.go
@@ -0,0 +1,88 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "reflect"
+ "testing"
+)
+
+type Level1 struct {
+ Hello []Level2
+}
+
+type Level2 struct {
+ World string
+}
+
+func Test_nested(t *testing.T) {
+ iter := ParseString(ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
+ l1 := Level1{}
+ for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
+ switch l1Field {
+ case "hello":
+ l2Array := []Level2{}
+ for iter.ReadArray() {
+ l2 := Level2{}
+ for l2Field := iter.ReadObject(); l2Field != ""; l2Field = iter.ReadObject() {
+ switch l2Field {
+ case "world":
+ l2.World = iter.ReadString()
+ default:
+ iter.ReportError("bind l2", "unexpected field: "+l2Field)
+ }
+ }
+ l2Array = append(l2Array, l2)
+ }
+ l1.Hello = l2Array
+ default:
+ iter.ReportError("bind l1", "unexpected field: "+l1Field)
+ }
+ }
+ if !reflect.DeepEqual(l1, Level1{
+ Hello: []Level2{
+ {World: "value1"},
+ {World: "value2"},
+ },
+ }) {
+ t.Fatal(l1)
+ }
+}
+
+func Benchmark_jsoniter_nested(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ iter := ParseString(ConfigDefault, `{"hello": [{"world": "value1"}, {"world": "value2"}]}`)
+ l1 := Level1{}
+ for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
+ switch l1Field {
+ case "hello":
+ l1.Hello = readLevel1Hello(iter)
+ default:
+ iter.Skip()
+ }
+ }
+ }
+}
+
+func readLevel1Hello(iter *Iterator) []Level2 {
+ l2Array := make([]Level2, 0, 2)
+ for iter.ReadArray() {
+ l2 := Level2{}
+ for l2Field := iter.ReadObject(); l2Field != ""; l2Field = iter.ReadObject() {
+ switch l2Field {
+ case "world":
+ l2.World = iter.ReadString()
+ default:
+ iter.Skip()
+ }
+ }
+ l2Array = append(l2Array, l2)
+ }
+ return l2Array
+}
+
+func Benchmark_json_nested(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ l1 := Level1{}
+ json.Unmarshal([]byte(`{"hello": [{"world": "value1"}, {"world": "value2"}]}`), &l1)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_null_test.go b/vendor/github.com/json-iterator/go/jsoniter_null_test.go
new file mode 100644
index 000000000..8c891470f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_null_test.go
@@ -0,0 +1,168 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "io"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_null(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `null`)
+ should.True(iter.ReadNil())
+ iter = ParseString(ConfigDefault, `null`)
+ should.Nil(iter.Read())
+ iter = ParseString(ConfigDefault, `navy`)
+ iter.Read()
+ should.True(iter.Error != nil && iter.Error != io.EOF)
+ iter = ParseString(ConfigDefault, `navy`)
+ iter.ReadNil()
+ should.True(iter.Error != nil && iter.Error != io.EOF)
+}
+
+func Test_write_null(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteNil()
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("null", buf.String())
+}
+
+func Test_encode_null(t *testing.T) {
+ should := require.New(t)
+ str, err := MarshalToString(nil)
+ should.Nil(err)
+ should.Equal("null", str)
+}
+
+func Test_decode_null_object_field(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[null,"a"]`)
+ iter.ReadArray()
+ if iter.ReadObject() != "" {
+ t.FailNow()
+ }
+ iter.ReadArray()
+ if iter.ReadString() != "a" {
+ t.FailNow()
+ }
+ type TestObject struct {
+ Field string
+ }
+ objs := []TestObject{}
+ should.Nil(UnmarshalFromString("[null]", &objs))
+ should.Len(objs, 1)
+}
+
+func Test_decode_null_array_element(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[null,"a"]`)
+ should.True(iter.ReadArray())
+ should.True(iter.ReadNil())
+ should.True(iter.ReadArray())
+ should.Equal("a", iter.ReadString())
+}
+
+func Test_decode_null_array(t *testing.T) {
+ should := require.New(t)
+ arr := []string{}
+ should.Nil(UnmarshalFromString("null", &arr))
+ should.Nil(arr)
+}
+
+func Test_decode_null_map(t *testing.T) {
+ should := require.New(t)
+ arr := map[string]string{}
+ should.Nil(UnmarshalFromString("null", &arr))
+ should.Nil(arr)
+}
+
+func Test_decode_null_string(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[null,"a"]`)
+ should.True(iter.ReadArray())
+ should.Equal("", iter.ReadString())
+ should.True(iter.ReadArray())
+ should.Equal("a", iter.ReadString())
+}
+
+func Test_decode_null_skip(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[null,"a"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "a" {
+ t.FailNow()
+ }
+}
+
+func Test_encode_nil_map(t *testing.T) {
+ should := require.New(t)
+ type Ttest map[string]string
+ var obj1 Ttest
+ output, err := json.Marshal(obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = json.Marshal(&obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = Marshal(obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = Marshal(&obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+}
+
+func Test_encode_nil_array(t *testing.T) {
+ should := require.New(t)
+ type Ttest []string
+ var obj1 Ttest
+ output, err := json.Marshal(obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = json.Marshal(&obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = Marshal(obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+ output, err = Marshal(&obj1)
+ should.Nil(err)
+ should.Equal("null", string(output))
+}
+
+func Test_decode_nil_num(t *testing.T) {
+ type TestData struct {
+ Field int `json:"field"`
+ }
+ should := require.New(t)
+
+ data1 := []byte(`{"field": 42}`)
+ data2 := []byte(`{"field": null}`)
+
+ // Checking stdlib behavior as well
+ obj2 := TestData{}
+ err := json.Unmarshal(data1, &obj2)
+ should.Equal(nil, err)
+ should.Equal(42, obj2.Field)
+
+ err = json.Unmarshal(data2, &obj2)
+ should.Equal(nil, err)
+ should.Equal(42, obj2.Field)
+
+ obj := TestData{}
+
+ err = Unmarshal(data1, &obj)
+ should.Equal(nil, err)
+ should.Equal(42, obj.Field)
+
+ err = Unmarshal(data2, &obj)
+ should.Equal(nil, err)
+ should.Equal(42, obj.Field)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_object_test.go b/vendor/github.com/json-iterator/go/jsoniter_object_test.go
new file mode 100644
index 000000000..9c9c53d02
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_object_test.go
@@ -0,0 +1,342 @@
+package jsoniter
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_empty_object(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `{}`)
+ field := iter.ReadObject()
+ should.Equal("", field)
+ iter = ParseString(ConfigDefault, `{}`)
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ should.FailNow("should not call")
+ return true
+ })
+}
+
+func Test_one_field(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `{"a": "stream"}`)
+ field := iter.ReadObject()
+ should.Equal("a", field)
+ value := iter.ReadString()
+ should.Equal("stream", value)
+ field = iter.ReadObject()
+ should.Equal("", field)
+ iter = ParseString(ConfigDefault, `{"a": "stream"}`)
+ should.True(iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ should.Equal("a", field)
+ iter.Skip()
+ return true
+ }))
+
+}
+
+func Test_two_field(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `{ "a": "stream" , "c": "d" }`)
+ field := iter.ReadObject()
+ should.Equal("a", field)
+ value := iter.ReadString()
+ should.Equal("stream", value)
+ field = iter.ReadObject()
+ should.Equal("c", field)
+ value = iter.ReadString()
+ should.Equal("d", value)
+ field = iter.ReadObject()
+ should.Equal("", field)
+ iter = ParseString(ConfigDefault, `{"field1": "1", "field2": 2}`)
+ for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
+ switch field {
+ case "field1":
+ iter.ReadString()
+ case "field2":
+ iter.ReadInt64()
+ default:
+ iter.ReportError("bind object", "unexpected field")
+ }
+ }
+}
+
+func Test_object_wrapper_any_get_all(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 []int
+ Field2 []int
+ }
+ any := Wrap(TestObject{[]int{1, 2}, []int{3, 4}})
+ should.Contains(any.Get('*', 0).ToString(), `"Field2":3`)
+ should.Contains(any.Keys(), "Field1")
+ should.Contains(any.Keys(), "Field2")
+ should.NotContains(any.Keys(), "Field3")
+
+ //should.Contains(any.GetObject()["Field1"].GetArray()[0], 1)
+}
+
+func Test_write_object(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(Config{IndentionStep: 2}.Froze(), buf, 4096)
+ stream.WriteObjectStart()
+ stream.WriteObjectField("hello")
+ stream.WriteInt(1)
+ stream.WriteMore()
+ stream.WriteObjectField("world")
+ stream.WriteInt(2)
+ stream.WriteObjectEnd()
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal("{\n \"hello\": 1,\n \"world\": 2\n}", buf.String())
+}
+
+func Test_write_val_zero_field_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ }
+ obj := TestObject{}
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{}`, str)
+}
+
+func Test_write_val_one_field_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string `json:"field-1"`
+ }
+ obj := TestObject{"hello"}
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"field-1":"hello"}`, str)
+}
+
+func Test_mixed(t *testing.T) {
+ should := require.New(t)
+ type AA struct {
+ ID int `json:"id"`
+ Payload map[string]interface{} `json:"payload"`
+ buf *bytes.Buffer
+ }
+ aa := AA{}
+ err := UnmarshalFromString(` {"id":1, "payload":{"account":"123","password":"456"}}`, &aa)
+ should.Nil(err)
+ should.Equal(1, aa.ID)
+ should.Equal("123", aa.Payload["account"])
+}
+
+func Test_omit_empty(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string `json:"field-1,omitempty"`
+ Field2 string `json:"field-2,omitempty"`
+ Field3 string `json:"field-3,omitempty"`
+ }
+ obj := TestObject{}
+ obj.Field2 = "hello"
+ str, err := MarshalToString(&obj)
+ should.Nil(err)
+ should.Equal(`{"field-2":"hello"}`, str)
+}
+
+func Test_ignore_field_on_not_valid_type(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string `json:"field-1,omitempty"`
+ Field2 func() `json:"-"`
+ }
+ obj := TestObject{}
+ obj.Field1 = "hello world"
+ obj.Field2 = func() {}
+ str, err := MarshalToString(&obj)
+ should.Nil(err)
+ should.Equal(`{"field-1":"hello world"}`, str)
+}
+
+func Test_recursive_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Me *TestObject
+ }
+ obj := TestObject{}
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Contains(str, `"Field1":""`)
+ should.Contains(str, `"Me":null`)
+ err = UnmarshalFromString(str, &obj)
+ should.Nil(err)
+}
+
+func Test_encode_anonymous_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field string
+ }
+ str, err := MarshalToString(struct {
+ TestObject
+ Field int
+ }{
+ Field: 100,
+ })
+ should.Nil(err)
+ should.Equal(`{"Field":100}`, str)
+}
+
+func Test_decode_anonymous_struct(t *testing.T) {
+ should := require.New(t)
+ type Inner struct {
+ Key string `json:"key"`
+ }
+
+ type Outer struct {
+ Inner
+ }
+ var outer Outer
+ j := []byte("{\"key\":\"value\"}")
+ should.Nil(Unmarshal(j, &outer))
+ should.Equal("value", outer.Key)
+}
+
+func Test_multiple_level_anonymous_struct(t *testing.T) {
+ type Level1 struct {
+ Field1 string
+ }
+ type Level2 struct {
+ Level1
+ Field2 string
+ }
+ type Level3 struct {
+ Level2
+ Field3 string
+ }
+ should := require.New(t)
+ obj := Level3{Level2{Level1{"1"}, "2"}, "3"}
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Equal(`{"Field1":"1","Field2":"2","Field3":"3"}`, output)
+}
+
+func Test_multiple_level_anonymous_struct_with_ptr(t *testing.T) {
+ type Level1 struct {
+ Field1 string
+ Field2 string
+ Field4 string
+ }
+ type Level2 struct {
+ *Level1
+ Field2 string
+ Field3 string
+ }
+ type Level3 struct {
+ *Level2
+ Field3 string
+ }
+ should := require.New(t)
+ obj := Level3{&Level2{&Level1{"1", "", "4"}, "2", ""}, "3"}
+ output, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Contains(output, `"Field1":"1"`)
+ should.Contains(output, `"Field2":"2"`)
+ should.Contains(output, `"Field3":"3"`)
+ should.Contains(output, `"Field4":"4"`)
+}
+
+func Test_shadow_struct_field(t *testing.T) {
+ should := require.New(t)
+ type omit *struct{}
+ type CacheItem struct {
+ Key string `json:"key"`
+ MaxAge int `json:"cacheAge"`
+ }
+ output, err := MarshalToString(struct {
+ *CacheItem
+
+ // Omit bad keys
+ OmitMaxAge omit `json:"cacheAge,omitempty"`
+
+ // Add nice keys
+ MaxAge int `json:"max_age"`
+ }{
+ CacheItem: &CacheItem{
+ Key: "value",
+ MaxAge: 100,
+ },
+ MaxAge: 20,
+ })
+ should.Nil(err)
+ should.Contains(output, `"key":"value"`)
+ should.Contains(output, `"max_age":20`)
+}
+
+func Test_embedded_order(t *testing.T) {
+ type A struct {
+ Field2 string
+ }
+
+ type C struct {
+ Field5 string
+ }
+
+ type B struct {
+ Field4 string
+ C
+ Field6 string
+ }
+
+ type TestObject struct {
+ Field1 string
+ A
+ Field3 string
+ B
+ Field7 string
+ }
+ should := require.New(t)
+ s := TestObject{}
+ output, err := MarshalToString(s)
+ should.Nil(err)
+ should.Equal(`{"Field1":"","Field2":"","Field3":"","Field4":"","Field5":"","Field6":"","Field7":""}`, output)
+}
+
+func Test_decode_nested(t *testing.T) {
+ type StructOfString struct {
+ Field1 string
+ Field2 string
+ }
+ iter := ParseString(ConfigDefault, `[{"field1": "hello"}, null, {"field2": "world"}]`)
+ slice := []*StructOfString{}
+ iter.ReadVal(&slice)
+ if len(slice) != 3 {
+ fmt.Println(iter.Error)
+ t.Fatal(len(slice))
+ }
+ if slice[0].Field1 != "hello" {
+ fmt.Println(iter.Error)
+ t.Fatal(slice[0])
+ }
+ if slice[1] != nil {
+ fmt.Println(iter.Error)
+ t.Fatal(slice[1])
+ }
+ if slice[2].Field2 != "world" {
+ fmt.Println(iter.Error)
+ t.Fatal(slice[2])
+ }
+}
+
+func Test_decode_field_with_escape(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ }
+ var obj TestObject
+ should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"Field\"1":"hello"}`), &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal([]byte(`{"\u0046ield1":"hello"}`), &obj))
+ should.Equal("hello", obj.Field1)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_optional_test.go b/vendor/github.com/json-iterator/go/jsoniter_optional_test.go
new file mode 100644
index 000000000..82b789b50
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_optional_test.go
@@ -0,0 +1,46 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_encode_optional_int_pointer(t *testing.T) {
+ should := require.New(t)
+ var ptr *int
+ str, err := MarshalToString(ptr)
+ should.Nil(err)
+ should.Equal("null", str)
+ val := 100
+ ptr = &val
+ str, err = MarshalToString(ptr)
+ should.Nil(err)
+ should.Equal("100", str)
+}
+
+func Test_decode_struct_with_optional_field(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 *string
+ Field2 *string
+ }
+ obj := TestObject{}
+ UnmarshalFromString(`{"field1": null, "field2": "world"}`, &obj)
+ should.Nil(obj.Field1)
+ should.Equal("world", *obj.Field2)
+}
+
+func Test_encode_struct_with_optional_field(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 *string
+ Field2 *string
+ }
+ obj := TestObject{}
+ world := "world"
+ obj.Field2 = &world
+ str, err := MarshalToString(obj)
+ should.Nil(err)
+ should.Contains(str, `"Field1":null`)
+ should.Contains(str, `"Field2":"world"`)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_raw_message_test.go b/vendor/github.com/json-iterator/go/jsoniter_raw_message_test.go
new file mode 100644
index 000000000..4fba33f89
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_raw_message_test.go
@@ -0,0 +1,114 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/stretchr/testify/require"
+ "strings"
+ "testing"
+)
+
+func Test_json_RawMessage(t *testing.T) {
+ should := require.New(t)
+ var data json.RawMessage
+ should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
+ should.Equal(`[1,2,3]`, string(data))
+ str, err := MarshalToString(data)
+ should.Nil(err)
+ should.Equal(`[1,2,3]`, str)
+}
+
+func Test_jsoniter_RawMessage(t *testing.T) {
+ should := require.New(t)
+ var data RawMessage
+ should.Nil(Unmarshal([]byte(`[1,2,3]`), &data))
+ should.Equal(`[1,2,3]`, string(data))
+ str, err := MarshalToString(data)
+ should.Nil(err)
+ should.Equal(`[1,2,3]`, str)
+}
+
+func Test_json_RawMessage_in_struct(t *testing.T) {
+ type TestObject struct {
+ Field1 string
+ Field2 json.RawMessage
+ }
+ should := require.New(t)
+ var data TestObject
+ should.Nil(Unmarshal([]byte(`{"field1": "hello", "field2": [1,2,3]}`), &data))
+ should.Equal(` [1,2,3]`, string(data.Field2))
+ should.Equal(`hello`, data.Field1)
+}
+
+func Test_decode_map_of_raw_message(t *testing.T) {
+ should := require.New(t)
+ type RawMap map[string]*json.RawMessage
+ b := []byte("{\"test\":[{\"key\":\"value\"}]}")
+ var rawMap RawMap
+ should.Nil(Unmarshal(b, &rawMap))
+ should.Equal(`[{"key":"value"}]`, string(*rawMap["test"]))
+ type Inner struct {
+ Key string `json:"key"`
+ }
+ var inner []Inner
+ Unmarshal(*rawMap["test"], &inner)
+ should.Equal("value", inner[0].Key)
+}
+
+func Test_encode_map_of_raw_message(t *testing.T) {
+ should := require.New(t)
+ type RawMap map[string]*json.RawMessage
+ value := json.RawMessage("[]")
+ rawMap := RawMap{"hello": &value}
+ output, err := MarshalToString(rawMap)
+ should.Nil(err)
+ should.Equal(`{"hello":[]}`, output)
+}
+
+func Test_encode_map_of_jsoniter_raw_message(t *testing.T) {
+ should := require.New(t)
+ type RawMap map[string]*RawMessage
+ value := RawMessage("[]")
+ rawMap := RawMap{"hello": &value}
+ output, err := MarshalToString(rawMap)
+ should.Nil(err)
+ should.Equal(`{"hello":[]}`, output)
+}
+
+func Test_marshal_invalid_json_raw_message(t *testing.T) {
+ type A struct {
+ Raw json.RawMessage `json:"raw"`
+ }
+ message := []byte(`{}`)
+
+ a := A{}
+ should := require.New(t)
+ should.Nil(ConfigCompatibleWithStandardLibrary.Unmarshal(message, &a))
+ aout, aouterr := ConfigCompatibleWithStandardLibrary.Marshal(&a)
+ should.Equal(`{"raw":null}`, string(aout))
+ should.Nil(aouterr)
+}
+
+func Test_raw_message_memory_not_copied_issue(t *testing.T) {
+ jsonStream := `{"name":"xxxxx","bundle_id":"com.zonst.majiang","app_platform":"ios","app_category":"100103", "budget_day":1000,"bidding_min":1,"bidding_max":2,"bidding_type":"CPM", "freq":{"open":true,"type":"day","num":100},"speed":1, "targeting":{"vendor":{"open":true,"list":["zonst"]}, "geo_code":{"open":true,"list":["156110100"]},"app_category":{"open":true,"list":["100101"]}, "day_parting":{"open":true,"list":["100409","100410"]},"device_type":{"open":true,"list":["ipad"]}, "os_version":{"open":true,"list":[10]},"carrier":{"open":true,"list":["mobile"]}, "network":{"open":true,"list":["4G"]}},"url":{"tracking_imp_url":"http://www.baidu.com", "tracking_clk_url":"http://www.baidu.com","jump_url":"http://www.baidu.com","deep_link_url":"http://www.baidu.com"}}`
+ type IteratorObject struct {
+ Name *string `json:"name"`
+ BundleId *string `json:"bundle_id"`
+ AppCategory *string `json:"app_category"`
+ AppPlatform *string `json:"app_platform"`
+ BudgetDay *float32 `json:"budget_day"`
+ BiddingMax *float32 `json:"bidding_max"`
+ BiddingMin *float32 `json:"bidding_min"`
+ BiddingType *string `json:"bidding_type"`
+ Freq *RawMessage `json:"freq"`
+ Targeting *RawMessage `json:"targeting"`
+ Url *RawMessage `json:"url"`
+ Speed *int `json:"speed" db:"speed"`
+ }
+
+ obj := &IteratorObject{}
+ decoder := NewDecoder(strings.NewReader(jsonStream))
+ err := decoder.Decode(obj)
+ should := require.New(t)
+ should.Nil(err)
+ should.Equal(`{"open":true,"type":"day","num":100}`, string(*obj.Freq))
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_reader_test.go b/vendor/github.com/json-iterator/go/jsoniter_reader_test.go
new file mode 100644
index 000000000..b3b3588e1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_reader_test.go
@@ -0,0 +1,57 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "strings"
+ "testing"
+ "time"
+)
+
+func Test_reader_and_load_more(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ CreatedAt time.Time
+ }
+ reader := strings.NewReader(`
+{
+ "agency": null,
+ "candidateId": 0,
+ "candidate": "Blah Blah",
+ "bookingId": 0,
+ "shiftId": 1,
+ "shiftTypeId": 0,
+ "shift": "Standard",
+ "bonus": 0,
+ "bonusNI": 0,
+ "days": [],
+ "totalHours": 27,
+ "expenses": [],
+ "weekEndingDateSystem": "2016-10-09",
+ "weekEndingDateClient": "2016-10-09",
+ "submittedAt": null,
+ "submittedById": null,
+ "approvedAt": "2016-10-10T18:38:04Z",
+ "approvedById": 0,
+ "authorisedAt": "2016-10-10T18:38:04Z",
+ "authorisedById": 0,
+ "invoicedAt": "2016-10-10T20:00:00Z",
+ "revokedAt": null,
+ "revokedById": null,
+ "revokeReason": null,
+ "rejectedAt": null,
+ "rejectedById": null,
+ "rejectReasonCode": null,
+ "rejectReason": null,
+ "createdAt": "2016-10-03T00:00:00Z",
+ "updatedAt": "2016-11-09T10:26:13Z",
+ "updatedById": null,
+ "overrides": [],
+ "bookingApproverId": null,
+ "bookingApprover": null,
+ "status": "approved"
+}
+ `)
+ decoder := ConfigCompatibleWithStandardLibrary.NewDecoder(reader)
+ obj := TestObject{}
+ should.Nil(decoder.Decode(&obj))
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_reflect_native_test.go b/vendor/github.com/json-iterator/go/jsoniter_reflect_native_test.go
new file mode 100644
index 000000000..0e2d68bba
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_reflect_native_test.go
@@ -0,0 +1,154 @@
+package jsoniter
+
+import (
+ "fmt"
+ "testing"
+)
+
+func Test_reflect_str(t *testing.T) {
+ iter := ParseString(ConfigDefault, `"hello"`)
+ str := ""
+ iter.ReadVal(&str)
+ if str != "hello" {
+ fmt.Println(iter.Error)
+ t.Fatal(str)
+ }
+}
+
+func Test_reflect_ptr_str(t *testing.T) {
+ iter := ParseString(ConfigDefault, `"hello"`)
+ var str *string
+ iter.ReadVal(&str)
+ if *str != "hello" {
+ t.Fatal(str)
+ }
+}
+
+func Test_reflect_int(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := int(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_int8(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := int8(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_int16(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := int16(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_int32(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := int32(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_int64(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := int64(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_uint(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := uint(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_uint8(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := uint8(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_uint16(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := uint16(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_uint32(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := uint32(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_uint64(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := uint64(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_byte(t *testing.T) {
+ iter := ParseString(ConfigDefault, `123`)
+ val := byte(0)
+ iter.ReadVal(&val)
+ if val != 123 {
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_float32(t *testing.T) {
+ iter := ParseString(ConfigDefault, `1.23`)
+ val := float32(0)
+ iter.ReadVal(&val)
+ if val != 1.23 {
+ fmt.Println(iter.Error)
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_float64(t *testing.T) {
+ iter := ParseString(ConfigDefault, `1.23`)
+ val := float64(0)
+ iter.ReadVal(&val)
+ if val != 1.23 {
+ fmt.Println(iter.Error)
+ t.Fatal(val)
+ }
+}
+
+func Test_reflect_bool(t *testing.T) {
+ iter := ParseString(ConfigDefault, `true`)
+ val := false
+ iter.ReadVal(&val)
+ if val != true {
+ fmt.Println(iter.Error)
+ t.Fatal(val)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_skip_test.go b/vendor/github.com/json-iterator/go/jsoniter_skip_test.go
new file mode 100644
index 000000000..cb13e507c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_skip_test.go
@@ -0,0 +1,184 @@
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_skip_number_in_array(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[-0.12, "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ should.Nil(iter.Error)
+ should.Equal("stream", iter.ReadString())
+}
+
+func Test_skip_string_in_array(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `["hello", "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ should.Nil(iter.Error)
+ should.Equal("stream", iter.ReadString())
+}
+
+func Test_skip_null(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[null , "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_true(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[true , "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_false(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[false , "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_array(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[[1, [2, [3], 4]], "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_empty_array(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[ [ ], "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_nested(t *testing.T) {
+ iter := ParseString(ConfigDefault, `[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`)
+ iter.ReadArray()
+ iter.Skip()
+ iter.ReadArray()
+ if iter.ReadString() != "stream" {
+ t.FailNow()
+ }
+}
+
+func Test_skip_and_return_bytes(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`)
+ iter.ReadArray()
+ skipped := iter.SkipAndReturnBytes()
+ should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
+}
+
+func Test_skip_and_return_bytes_with_reader(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(`[ {"a" : [{"stream": "c"}], "d": 102 }, "stream"]`), 4)
+ iter.ReadArray()
+ skipped := iter.SkipAndReturnBytes()
+ should.Equal(`{"a" : [{"stream": "c"}], "d": 102 }`, string(skipped))
+}
+
+func Test_skip_empty(t *testing.T) {
+ should := require.New(t)
+ should.NotNil(Get([]byte("")).LastError())
+}
+
+type TestResp struct {
+ Code uint64
+}
+
+func Benchmark_jsoniter_skip(b *testing.B) {
+ input := []byte(`
+{
+ "_shards":{
+ "total" : 5,
+ "successful" : 5,
+ "failed" : 0
+ },
+ "hits":{
+ "total" : 1,
+ "hits" : [
+ {
+ "_index" : "twitter",
+ "_type" : "tweet",
+ "_id" : "1",
+ "_source" : {
+ "user" : "kimchy",
+ "postDate" : "2009-11-15T14:12:12",
+ "message" : "trying out Elasticsearch"
+ }
+ }
+ ]
+ },
+ "code": 200
+}`)
+ for n := 0; n < b.N; n++ {
+ result := TestResp{}
+ iter := ParseBytes(ConfigDefault, input)
+ for field := iter.ReadObject(); field != ""; field = iter.ReadObject() {
+ switch field {
+ case "code":
+ result.Code = iter.ReadUint64()
+ default:
+ iter.Skip()
+ }
+ }
+ }
+}
+
+func Benchmark_json_skip(b *testing.B) {
+ input := []byte(`
+{
+ "_shards":{
+ "total" : 5,
+ "successful" : 5,
+ "failed" : 0
+ },
+ "hits":{
+ "total" : 1,
+ "hits" : [
+ {
+ "_index" : "twitter",
+ "_type" : "tweet",
+ "_id" : "1",
+ "_source" : {
+ "user" : "kimchy",
+ "postDate" : "2009-11-15T14:12:12",
+ "message" : "trying out Elasticsearch"
+ }
+ }
+ ]
+ },
+ "code": 200
+}`)
+ for n := 0; n < b.N; n++ {
+ result := TestResp{}
+ json.Unmarshal(input, &result)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_sloppy_test.go b/vendor/github.com/json-iterator/go/jsoniter_sloppy_test.go
new file mode 100644
index 000000000..487713ae7
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_sloppy_test.go
@@ -0,0 +1,162 @@
+//+build jsoniter-sloppy
+
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "io"
+ "testing"
+)
+
+func Test_string_end(t *testing.T) {
+ end, escaped := ParseString(ConfigDefault, `abc"`).findStringEnd()
+ if end != 4 {
+ t.Fatal(end)
+ }
+ if escaped != false {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `abc\\"`).findStringEnd()
+ if end != 6 {
+ t.Fatal(end)
+ }
+ if escaped != true {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `abc\\\\"`).findStringEnd()
+ if end != 8 {
+ t.Fatal(end)
+ }
+ if escaped != true {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `abc\"`).findStringEnd()
+ if end != -1 {
+ t.Fatal(end)
+ }
+ if escaped != false {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `abc\`).findStringEnd()
+ if end != -1 {
+ t.Fatal(end)
+ }
+ if escaped != true {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `abc\\`).findStringEnd()
+ if end != -1 {
+ t.Fatal(end)
+ }
+ if escaped != false {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `\\`).findStringEnd()
+ if end != -1 {
+ t.Fatal(end)
+ }
+ if escaped != false {
+ t.Fatal(escaped)
+ }
+ end, escaped = ParseString(ConfigDefault, `\`).findStringEnd()
+ if end != -1 {
+ t.Fatal(end)
+ }
+ if escaped != true {
+ t.Fatal(escaped)
+ }
+}
+
+type StagedReader struct {
+ r1 string
+ r2 string
+ r3 string
+ r int
+}
+
+func (reader *StagedReader) Read(p []byte) (n int, err error) {
+ reader.r++
+ switch reader.r {
+ case 1:
+ copy(p, []byte(reader.r1))
+ return len(reader.r1), nil
+ case 2:
+ copy(p, []byte(reader.r2))
+ return len(reader.r2), nil
+ case 3:
+ copy(p, []byte(reader.r3))
+ return len(reader.r3), nil
+ default:
+ return 0, io.EOF
+ }
+}
+
+func Test_skip_string(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `"abc`)
+ iter.skipString()
+ should.Equal(1, iter.head)
+ iter = ParseString(ConfigDefault, `\""abc`)
+ iter.skipString()
+ should.Equal(3, iter.head)
+ reader := &StagedReader{
+ r1: `abc`,
+ r2: `"`,
+ }
+ iter = Parse(ConfigDefault, reader, 4096)
+ iter.skipString()
+ should.Equal(1, iter.head)
+ reader = &StagedReader{
+ r1: `abc`,
+ r2: `1"`,
+ }
+ iter = Parse(ConfigDefault, reader, 4096)
+ iter.skipString()
+ should.Equal(2, iter.head)
+ reader = &StagedReader{
+ r1: `abc\`,
+ r2: `"`,
+ }
+ iter = Parse(ConfigDefault, reader, 4096)
+ iter.skipString()
+ should.NotNil(iter.Error)
+ reader = &StagedReader{
+ r1: `abc\`,
+ r2: `""`,
+ }
+ iter = Parse(ConfigDefault, reader, 4096)
+ iter.skipString()
+ should.Equal(2, iter.head)
+}
+
+func Test_skip_object(t *testing.T) {
+ iter := ParseString(ConfigDefault, `}`)
+ iter.skipObject()
+ if iter.head != 1 {
+ t.Fatal(iter.head)
+ }
+ iter = ParseString(ConfigDefault, `a}`)
+ iter.skipObject()
+ if iter.head != 2 {
+ t.Fatal(iter.head)
+ }
+ iter = ParseString(ConfigDefault, `{}}a`)
+ iter.skipObject()
+ if iter.head != 3 {
+ t.Fatal(iter.head)
+ }
+ reader := &StagedReader{
+ r1: `{`,
+ r2: `}}a`,
+ }
+ iter = Parse(ConfigDefault, reader, 4096)
+ iter.skipObject()
+ if iter.head != 2 {
+ t.Fatal(iter.head)
+ }
+ iter = ParseString(ConfigDefault, `"}"}a`)
+ iter.skipObject()
+ if iter.head != 4 {
+ t.Fatal(iter.head)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_stream_test.go b/vendor/github.com/json-iterator/go/jsoniter_stream_test.go
new file mode 100644
index 000000000..8df96b9f3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_stream_test.go
@@ -0,0 +1,69 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_writeByte_should_grow_buffer(t *testing.T) {
+ should := require.New(t)
+ stream := NewStream(ConfigDefault, nil, 1)
+ stream.writeByte('1')
+ should.Equal("1", string(stream.Buffer()))
+ should.Equal(1, len(stream.buf))
+ stream.writeByte('2')
+ should.Equal("12", string(stream.Buffer()))
+ should.Equal(2, len(stream.buf))
+ stream.writeThreeBytes('3', '4', '5')
+ should.Equal("12345", string(stream.Buffer()))
+}
+
+func Test_writeBytes_should_grow_buffer(t *testing.T) {
+ should := require.New(t)
+ stream := NewStream(ConfigDefault, nil, 1)
+ stream.Write([]byte{'1', '2'})
+ should.Equal("12", string(stream.Buffer()))
+ should.Equal(3, len(stream.buf))
+ stream.Write([]byte{'3', '4', '5', '6', '7'})
+ should.Equal("1234567", string(stream.Buffer()))
+ should.Equal(8, len(stream.buf))
+}
+
+func Test_writeIndention_should_grow_buffer(t *testing.T) {
+ should := require.New(t)
+ stream := NewStream(Config{IndentionStep: 2}.Froze(), nil, 1)
+ stream.WriteVal([]int{1, 2, 3})
+ should.Equal("[\n 1,\n 2,\n 3\n]", string(stream.Buffer()))
+}
+
+func Test_writeRaw_should_grow_buffer(t *testing.T) {
+ should := require.New(t)
+ stream := NewStream(ConfigDefault, nil, 1)
+ stream.WriteRaw("123")
+ should.Nil(stream.Error)
+ should.Equal("123", string(stream.Buffer()))
+}
+
+func Test_writeString_should_grow_buffer(t *testing.T) {
+ should := require.New(t)
+ stream := NewStream(ConfigDefault, nil, 0)
+ stream.WriteString("123")
+ should.Nil(stream.Error)
+ should.Equal(`"123"`, string(stream.Buffer()))
+}
+
+type NopWriter struct {
+ bufferSize int
+}
+
+func (w *NopWriter) Write(p []byte) (n int, err error) {
+ w.bufferSize = cap(p)
+ return len(p), nil
+}
+
+func Test_flush_buffer_should_stop_grow_buffer(t *testing.T) {
+ writer := new(NopWriter)
+ NewEncoder(writer).Encode(make([]int, 10000000))
+ should := require.New(t)
+ should.Equal(512, writer.bufferSize)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_string_test.go b/vendor/github.com/json-iterator/go/jsoniter_string_test.go
new file mode 100644
index 000000000..61c0f33b3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_string_test.go
@@ -0,0 +1,261 @@
+// +build go1.8
+
+package jsoniter
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "testing"
+ "unicode/utf8"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_read_string(t *testing.T) {
+ badInputs := []string{
+ ``,
+ `"`,
+ `"\"`,
+ `"\\\"`,
+ "\"\n\"",
+ `"\U0001f64f"`,
+ `"\uD83D\u00"`,
+ }
+ for i := 0; i < 32; i++ {
+ // control characters are invalid
+ badInputs = append(badInputs, string([]byte{'"', byte(i), '"'}))
+ }
+
+ for _, input := range badInputs {
+ testReadString(t, input, "", true, "json.Unmarshal", json.Unmarshal)
+ testReadString(t, input, "", true, "jsoniter.Unmarshal", Unmarshal)
+ testReadString(t, input, "", true, "jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal", ConfigCompatibleWithStandardLibrary.Unmarshal)
+ }
+
+ goodInputs := []struct {
+ input string
+ expectValue string
+ }{
+ {`""`, ""},
+ {`"a"`, "a"},
+ {`null`, ""},
+ {`"Iñtërnâtiônàlizætiøn,💝🐹🌇⛔"`, "Iñtërnâtiônàlizætiøn,💝🐹🌇⛔"},
+ {`"\uD83D"`, string([]byte{239, 191, 189})},
+ {`"\uD83D\\"`, string([]byte{239, 191, 189, '\\'})},
+ {`"\uD83D\ub000"`, string([]byte{239, 191, 189, 235, 128, 128})},
+ {`"\uD83D\ude04"`, "😄"},
+ {`"\uDEADBEEF"`, string([]byte{239, 191, 189, 66, 69, 69, 70})},
+ }
+
+ for _, tc := range goodInputs {
+ testReadString(t, tc.input, tc.expectValue, false, "json.Unmarshal", json.Unmarshal)
+ testReadString(t, tc.input, tc.expectValue, false, "jsoniter.Unmarshal", Unmarshal)
+ testReadString(t, tc.input, tc.expectValue, false, "jsoniter.ConfigCompatibleWithStandardLibrary.Unmarshal", ConfigCompatibleWithStandardLibrary.Unmarshal)
+ }
+}
+
+func testReadString(t *testing.T, input string, expectValue string, expectError bool, marshalerName string, marshaler func([]byte, interface{}) error) {
+ var value string
+ err := marshaler([]byte(input), &value)
+ if expectError != (err != nil) {
+ t.Errorf("%q: %s: expected error %v, got %v", input, marshalerName, expectError, err)
+ return
+ }
+ if value != expectValue {
+ t.Errorf("%q: %s: expected %q, got %q", input, marshalerName, expectValue, value)
+ return
+ }
+}
+
+func Test_read_normal_string(t *testing.T) {
+ cases := map[string]string{
+ `"0123456789012345678901234567890123456789"`: `0123456789012345678901234567890123456789`,
+ `""`: ``,
+ `"hello"`: `hello`,
+ }
+ for input, output := range cases {
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ should.Equal(output, iter.ReadString())
+ })
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
+ should.Equal(output, iter.ReadString())
+ })
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ should.Equal(output, string(iter.ReadStringAsSlice()))
+ })
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
+ should.Equal(output, string(iter.ReadStringAsSlice()))
+ })
+ }
+}
+
+func Test_read_exotic_string(t *testing.T) {
+ cases := map[string]string{
+ `"hel\"lo"`: `hel"lo`,
+ `"hel\\\/lo"`: `hel\/lo`,
+ `"hel\\blo"`: `hel\blo`,
+ `"hel\\\blo"`: "hel\\\blo",
+ `"hel\\nlo"`: `hel\nlo`,
+ `"hel\\\nlo"`: "hel\\\nlo",
+ `"hel\\tlo"`: `hel\tlo`,
+ `"hel\\flo"`: `hel\flo`,
+ `"hel\\\flo"`: "hel\\\flo",
+ `"hel\\\rlo"`: "hel\\\rlo",
+ `"hel\\\tlo"`: "hel\\\tlo",
+ `"\u4e2d\u6587"`: "中文",
+ `"\ud83d\udc4a"`: "\xf0\x9f\x91\x8a", // surrogate
+ }
+ for input, output := range cases {
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, input)
+ var v string
+ should.Nil(json.Unmarshal([]byte(input), &v))
+ should.Equal(v, iter.ReadString())
+ })
+ t.Run(fmt.Sprintf("%v:%v", input, output), func(t *testing.T) {
+ should := require.New(t)
+ iter := Parse(ConfigDefault, bytes.NewBufferString(input), 2)
+ should.Equal(output, iter.ReadString())
+ })
+ }
+}
+
+func Test_read_string_as_interface(t *testing.T) {
+ should := require.New(t)
+ iter := ParseString(ConfigDefault, `"hello"`)
+ should.Equal("hello", iter.Read())
+}
+
+func Test_write_string(t *testing.T) {
+ should := require.New(t)
+ str, err := MarshalToString("hello")
+ should.Equal(`"hello"`, str)
+ should.Nil(err)
+ str, err = MarshalToString(`hel"lo`)
+ should.Equal(`"hel\"lo"`, str)
+ should.Nil(err)
+}
+
+func Test_write_val_string(t *testing.T) {
+ should := require.New(t)
+ buf := &bytes.Buffer{}
+ stream := NewStream(ConfigDefault, buf, 4096)
+ stream.WriteVal("hello")
+ stream.Flush()
+ should.Nil(stream.Error)
+ should.Equal(`"hello"`, buf.String())
+}
+
+func Test_decode_slash(t *testing.T) {
+ should := require.New(t)
+ var obj interface{}
+ should.NotNil(json.Unmarshal([]byte("\\"), &obj))
+ should.NotNil(UnmarshalFromString("\\", &obj))
+}
+
+func Test_html_escape(t *testing.T) {
+ should := require.New(t)
+ output, err := json.Marshal(`>`)
+ should.Nil(err)
+ should.Equal(`"\u003e"`, string(output))
+ output, err = ConfigCompatibleWithStandardLibrary.Marshal(`>`)
+ should.Nil(err)
+ should.Equal(`"\u003e"`, string(output))
+ type MyString string
+ output, err = ConfigCompatibleWithStandardLibrary.Marshal(MyString(`>`))
+ should.Nil(err)
+ should.Equal(`"\u003e"`, string(output))
+}
+
+func Test_string_encode_with_std(t *testing.T) {
+ should := require.New(t)
+ for i := 0; i < utf8.RuneSelf; i++ {
+ input := string([]byte{byte(i)})
+ stdOutputBytes, err := json.Marshal(input)
+ should.Nil(err)
+ stdOutput := string(stdOutputBytes)
+ jsoniterOutputBytes, err := ConfigCompatibleWithStandardLibrary.Marshal(input)
+ should.Nil(err)
+ jsoniterOutput := string(jsoniterOutputBytes)
+ should.Equal(stdOutput, jsoniterOutput)
+ }
+}
+
+func Test_unicode(t *testing.T) {
+ should := require.New(t)
+ output, _ := MarshalToString(map[string]interface{}{"a": "数字山谷"})
+ should.Equal(`{"a":"数字山谷"}`, output)
+ output, _ = Config{EscapeHTML: false}.Froze().MarshalToString(map[string]interface{}{"a": "数字山谷"})
+ should.Equal(`{"a":"数字山谷"}`, output)
+}
+
+func Test_unicode_and_escape(t *testing.T) {
+ should := require.New(t)
+ output, err := MarshalToString(`"数字山谷"`)
+ should.Nil(err)
+ should.Equal(`"\"数字山谷\""`, output)
+ output, err = ConfigFastest.MarshalToString(`"数字山谷"`)
+ should.Nil(err)
+ should.Equal(`"\"数字山谷\""`, output)
+}
+
+func Test_unsafe_unicode(t *testing.T) {
+ ConfigDefault.(*frozenConfig).cleanEncoders()
+ should := require.New(t)
+ output, err := ConfigDefault.MarshalToString("he\u2029\u2028he")
+ should.Nil(err)
+ should.Equal(`"he\u2029\u2028he"`, output)
+ output, err = ConfigFastest.MarshalToString("he\u2029\u2028he")
+ should.Nil(err)
+ should.Equal("\"he\u2029\u2028he\"", output)
+}
+
+func Benchmark_jsoniter_unicode(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ iter := ParseString(ConfigDefault, `"\ud83d\udc4a"`)
+ iter.ReadString()
+ }
+}
+
+func Benchmark_jsoniter_ascii(b *testing.B) {
+ iter := NewIterator(ConfigDefault)
+ input := []byte(`"hello, world! hello, world!"`)
+ b.ResetTimer()
+ for n := 0; n < b.N; n++ {
+ iter.ResetBytes(input)
+ iter.ReadString()
+ }
+}
+
+func Benchmark_jsoniter_string_as_bytes(b *testing.B) {
+ iter := ParseString(ConfigDefault, `"hello, world!"`)
+ b.ResetTimer()
+ for n := 0; n < b.N; n++ {
+ iter.ResetBytes(iter.buf)
+ iter.ReadStringAsSlice()
+ }
+}
+
+func Benchmark_json_unicode(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ result := ""
+ json.Unmarshal([]byte(`"\ud83d\udc4a"`), &result)
+ }
+}
+
+func Benchmark_json_ascii(b *testing.B) {
+ for n := 0; n < b.N; n++ {
+ result := ""
+ json.Unmarshal([]byte(`"hello"`), &result)
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_struct_decoder_test.go b/vendor/github.com/json-iterator/go/jsoniter_struct_decoder_test.go
new file mode 100644
index 000000000..257bbdb65
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_struct_decoder_test.go
@@ -0,0 +1,267 @@
+package jsoniter
+
+import (
+ "github.com/stretchr/testify/require"
+ "testing"
+)
+
+func Test_decode_one_field_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"field1": "hello"}`, &obj))
+ should.Equal("hello", obj.Field1)
+}
+
+func Test_decode_two_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+}
+
+func Test_decode_three_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream", "Field3": "c"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+}
+
+func Test_decode_four_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+}
+
+func Test_decode_five_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+}
+
+func Test_decode_six_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e", "Field6": "x"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal("x", obj.Field6)
+}
+
+func Test_decode_seven_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ Field7 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e", "Field6": "x", "Field7":"y"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal("x", obj.Field6)
+ should.Equal("y", obj.Field7)
+}
+
+func Test_decode_eight_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ Field7 string
+ Field8 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field8":"1", "Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e", "Field6": "x", "Field7":"y"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal("x", obj.Field6)
+ should.Equal("y", obj.Field7)
+ should.Equal("1", obj.Field8)
+}
+
+func Test_decode_nine_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ Field7 string
+ Field8 string
+ Field9 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field8" : "zzzzzzzzzzz", "Field7": "zz", "Field6" : "xx", "Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e", "Field9":"f"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal("xx", obj.Field6)
+ should.Equal("zz", obj.Field7)
+ should.Equal("zzzzzzzzzzz", obj.Field8)
+ should.Equal("f", obj.Field9)
+}
+
+func Test_decode_ten_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ Field7 string
+ Field8 string
+ Field9 string
+ Field10 string
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"Field10":"x", "Field9": "x", "Field8":"x", "Field7":"x", "Field6":"x", "Field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal("x", obj.Field6)
+ should.Equal("x", obj.Field7)
+ should.Equal("x", obj.Field8)
+ should.Equal("x", obj.Field9)
+ should.Equal("x", obj.Field10)
+}
+
+func Test_decode_more_than_ten_fields_struct(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string
+ Field2 string
+ Field3 string
+ Field4 string
+ Field5 string
+ Field6 string
+ Field7 string
+ Field8 string
+ Field9 string
+ Field10 string
+ Field11 int
+ }
+ obj := TestObject{}
+ should.Nil(UnmarshalFromString(`{}`, &obj))
+ should.Equal("", obj.Field1)
+ should.Nil(UnmarshalFromString(`{"field11":1, "field1": "a", "Field2": "stream", "Field3": "c", "Field4": "d", "Field5": "e"}`, &obj))
+ should.Equal("a", obj.Field1)
+ should.Equal("stream", obj.Field2)
+ should.Equal("c", obj.Field3)
+ should.Equal("d", obj.Field4)
+ should.Equal("e", obj.Field5)
+ should.Equal(1, obj.Field11)
+}
+
+func Test_decode_struct_field_with_tag(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 string `json:"field-1"`
+ Field2 string `json:"-"`
+ Field3 int `json:",string"`
+ }
+ obj := TestObject{Field2: "world"}
+ UnmarshalFromString(`{"field-1": "hello", "field2": "", "Field3": "100"}`, &obj)
+ should.Equal("hello", obj.Field1)
+ should.Equal("world", obj.Field2)
+ should.Equal(100, obj.Field3)
+}
+
+func Test_decode_struct_field_with_tag_string(t *testing.T) {
+ should := require.New(t)
+ type TestObject struct {
+ Field1 int `json:",string"`
+ }
+ obj := TestObject{Field1: 100}
+ should.Nil(UnmarshalFromString(`{"Field1": "100"}`, &obj))
+ should.Equal(100, obj.Field1)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_struct_encoder_test.go b/vendor/github.com/json-iterator/go/jsoniter_struct_encoder_test.go
new file mode 100644
index 000000000..0e3e54188
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_struct_encoder_test.go
@@ -0,0 +1,52 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_encode_unexported_field(t *testing.T) {
+ type TestData struct {
+ a int
+ b <-chan int
+ C int
+ d *time.Timer
+ }
+
+ should := require.New(t)
+
+ testChan := make(<-chan int, 10)
+ testTimer := time.NewTimer(10 * time.Second)
+
+ obj := &TestData{
+ a: 42,
+ b: testChan,
+ C: 21,
+ d: testTimer,
+ }
+
+ jb, err := json.Marshal(obj)
+ should.NoError(err)
+ should.Equal([]byte(`{"C":21}`), jb)
+
+ err = json.Unmarshal([]byte(`{"a": 444, "b":"bad", "C":55, "d":{"not": "a timer"}}`), obj)
+ should.NoError(err)
+ should.Equal(42, obj.a)
+ should.Equal(testChan, obj.b)
+ should.Equal(55, obj.C)
+ should.Equal(testTimer, obj.d)
+
+ jb, err = Marshal(obj)
+ should.NoError(err)
+ should.Equal(jb, []byte(`{"C":55}`))
+
+ err = Unmarshal([]byte(`{"a": 444, "b":"bad", "C":256, "d":{"not":"a timer"}}`), obj)
+ should.NoError(err)
+ should.Equal(42, obj.a)
+ should.Equal(testChan, obj.b)
+ should.Equal(256, obj.C)
+ should.Equal(testTimer, obj.d)
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter_wrap_test.go b/vendor/github.com/json-iterator/go/jsoniter_wrap_test.go
new file mode 100644
index 000000000..b300fb591
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter_wrap_test.go
@@ -0,0 +1,118 @@
+package jsoniter
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_wrap_and_valuetype_everything(t *testing.T) {
+ should := require.New(t)
+ var i interface{}
+ any := Get([]byte("123"))
+ // default of number type is float64
+ i = float64(123)
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(int8(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ // get interface is not int8 interface
+ // i = int8(10)
+ // should.Equal(i, any.GetInterface())
+
+ any = Wrap(int16(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ //i = int16(10)
+ //should.Equal(i, any.GetInterface())
+
+ any = Wrap(int32(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ i = int32(10)
+ should.Equal(i, any.GetInterface())
+ any = Wrap(int64(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ i = int64(10)
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(uint(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ // not equal
+ //i = uint(10)
+ //should.Equal(i, any.GetInterface())
+ any = Wrap(uint8(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ // not equal
+ // i = uint8(10)
+ // should.Equal(i, any.GetInterface())
+ any = Wrap(uint16(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ any = Wrap(uint32(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ i = uint32(10)
+ should.Equal(i, any.GetInterface())
+ any = Wrap(uint64(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ i = uint64(10)
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(float32(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ // not equal
+ //i = float32(10)
+ //should.Equal(i, any.GetInterface())
+ any = Wrap(float64(10))
+ should.Equal(any.ValueType(), NumberValue)
+ should.Equal(any.LastError(), nil)
+ i = float64(10)
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(true)
+ should.Equal(any.ValueType(), BoolValue)
+ should.Equal(any.LastError(), nil)
+ i = true
+ should.Equal(i, any.GetInterface())
+ any = Wrap(false)
+ should.Equal(any.ValueType(), BoolValue)
+ should.Equal(any.LastError(), nil)
+ i = false
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(nil)
+ should.Equal(any.ValueType(), NilValue)
+ should.Equal(any.LastError(), nil)
+ i = nil
+ should.Equal(i, any.GetInterface())
+
+ stream := NewStream(ConfigDefault, nil, 32)
+ any.WriteTo(stream)
+ should.Equal("null", string(stream.Buffer()))
+ should.Equal(any.LastError(), nil)
+
+ any = Wrap(struct{ age int }{age: 1})
+ should.Equal(any.ValueType(), ObjectValue)
+ should.Equal(any.LastError(), nil)
+ i = struct{ age int }{age: 1}
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap(map[string]interface{}{"abc": 1})
+ should.Equal(any.ValueType(), ObjectValue)
+ should.Equal(any.LastError(), nil)
+ i = map[string]interface{}{"abc": 1}
+ should.Equal(i, any.GetInterface())
+
+ any = Wrap("abc")
+ i = "abc"
+ should.Equal(i, any.GetInterface())
+ should.Equal(nil, any.LastError())
+
+}
diff --git a/vendor/github.com/json-iterator/go/test.sh b/vendor/github.com/json-iterator/go/test.sh
new file mode 100755
index 000000000..466f1141a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/test.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+set -e
+echo "" > coverage.txt
+
+for d in $(go list ./... | grep -v vendor); do
+ go test -coverprofile=profile.out $d
+ if [ -f profile.out ]; then
+ cat profile.out >> coverage.txt
+ rm profile.out
+ fi
+done
diff --git a/vendor/github.com/json-iterator/go/unmarshal_input_test.go b/vendor/github.com/json-iterator/go/unmarshal_input_test.go
new file mode 100644
index 000000000..9d7b99c5f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/unmarshal_input_test.go
@@ -0,0 +1,72 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "reflect"
+ "testing"
+
+ fuzz "github.com/google/gofuzz"
+)
+
+func Test_NilInput(t *testing.T) {
+ var jb []byte // nil
+ var out string
+ err := Unmarshal(jb, &out)
+ if err == nil {
+ t.Errorf("Expected error")
+ }
+}
+
+func Test_EmptyInput(t *testing.T) {
+ jb := []byte("")
+ var out string
+ err := Unmarshal(jb, &out)
+ if err == nil {
+ t.Errorf("Expected error")
+ }
+}
+
+func Test_RandomInput_Bytes(t *testing.T) {
+ fz := fuzz.New().NilChance(0)
+ for i := 0; i < 10000; i++ {
+ var jb []byte
+ fz.Fuzz(&jb)
+ testRandomInput(t, jb)
+ }
+}
+
+func Test_RandomInput_String(t *testing.T) {
+ fz := fuzz.New().NilChance(0)
+ for i := 0; i < 10000; i++ {
+ var js string
+ fz.Fuzz(&js)
+ jb := []byte(js)
+ testRandomInput(t, jb)
+ }
+}
+
+func testRandomInput(t *testing.T, jb []byte) {
+ var outString string
+ testRandomInputTo(t, jb, &outString)
+
+ var outInt int
+ testRandomInputTo(t, jb, &outInt)
+
+ var outStruct struct{}
+ testRandomInputTo(t, jb, &outStruct)
+
+ var outSlice []string
+ testRandomInputTo(t, jb, &outSlice)
+}
+
+func testRandomInputTo(t *testing.T, jb []byte, out interface{}) {
+ err := Unmarshal(jb, out)
+ if err == nil {
+ // Cross-check stdlib to see if we just happened to fuzz a legit value.
+ err := json.Unmarshal(jb, out)
+ if err != nil {
+ t.Fatalf("Expected error unmarshaling as %s:\nas string: %q\nas bytes: %v",
+ reflect.TypeOf(out).Elem().Kind(), string(jb), jb)
+ }
+ }
+}