mirror of
https://github.com/datarhei/core.git
synced 2025-10-05 07:57:13 +08:00
Use faster JSON and gzip packages
This commit is contained in:
@@ -50,8 +50,8 @@ import (
|
||||
"github.com/datarhei/core/v16/srt"
|
||||
srturl "github.com/datarhei/core/v16/srt/url"
|
||||
"github.com/datarhei/core/v16/update"
|
||||
"github.com/google/gops/agent"
|
||||
|
||||
"github.com/google/gops/agent"
|
||||
"github.com/lestrrat-go/strftime"
|
||||
"go.uber.org/automaxprocs/maxprocs"
|
||||
)
|
||||
|
@@ -4,7 +4,6 @@ package main
|
||||
// TODO: import von internal RTMP (external.stream), wenn jemand z.B. von OBS reinschiesst
|
||||
|
||||
import (
|
||||
gojson "encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/url"
|
||||
@@ -511,7 +510,7 @@ func importV1(fs fs.Filesystem, path string, cfg importConfig) (store.Data, erro
|
||||
|
||||
var v1data storeDataV1
|
||||
|
||||
if err := gojson.Unmarshal(jsondata, &v1data); err != nil {
|
||||
if err := json.Unmarshal(jsondata, &v1data); err != nil {
|
||||
return r, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
@@ -1196,9 +1195,9 @@ func importV1(fs fs.Filesystem, path string, cfg importConfig) (store.Data, erro
|
||||
Metadata: map[string]interface{}{},
|
||||
}
|
||||
|
||||
if metadata, err := gojson.Marshal(ui); err == nil {
|
||||
if metadata, err := json.Marshal(ui); err == nil {
|
||||
m := map[string]interface{}{}
|
||||
gojson.Unmarshal(metadata, &m)
|
||||
json.Unmarshal(metadata, &m)
|
||||
p.Metadata["restreamer-ui"] = m
|
||||
}
|
||||
|
||||
@@ -1426,9 +1425,9 @@ func importV1(fs fs.Filesystem, path string, cfg importConfig) (store.Data, erro
|
||||
Metadata: map[string]interface{}{},
|
||||
}
|
||||
|
||||
if metadata, err := gojson.Marshal(egress); err == nil {
|
||||
if metadata, err := json.Marshal(egress); err == nil {
|
||||
m := map[string]interface{}{}
|
||||
gojson.Unmarshal(metadata, &m)
|
||||
json.Unmarshal(metadata, &m)
|
||||
p.Metadata["restreamer-ui"] = m
|
||||
}
|
||||
|
||||
|
@@ -21,6 +21,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/datarhei/core/v16/cluster/client"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/handler/util"
|
||||
httplog "github.com/datarhei/core/v16/http/log"
|
||||
mwlog "github.com/datarhei/core/v16/http/middleware/log"
|
||||
@@ -73,7 +74,7 @@ func NewAPI(config APIConfig) (API, error) {
|
||||
a.address = address
|
||||
|
||||
a.router = echo.New()
|
||||
a.router.Debug = true
|
||||
a.router.JSONSerializer = &GoJSONSerializer{}
|
||||
a.router.HTTPErrorHandler = ErrorHandler
|
||||
a.router.Validator = validator.New()
|
||||
a.router.HideBanner = true
|
||||
@@ -965,3 +966,27 @@ func ErrorHandler(err error, c echo.Context) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GoJSONSerializer implements JSON encoding using encoding/json.
|
||||
type GoJSONSerializer struct{}
|
||||
|
||||
// Serialize converts an interface into a json and writes it to the response.
|
||||
// You can optionally use the indent parameter to produce pretty JSONs.
|
||||
func (d GoJSONSerializer) Serialize(c echo.Context, i interface{}, indent string) error {
|
||||
enc := json.NewEncoder(c.Response())
|
||||
if indent != "" {
|
||||
enc.SetIndent("", indent)
|
||||
}
|
||||
return enc.Encode(i)
|
||||
}
|
||||
|
||||
// Deserialize reads a JSON from a request body and converts it into an interface.
|
||||
func (d GoJSONSerializer) Deserialize(c echo.Context, i interface{}) error {
|
||||
err := json.NewDecoder(c.Request().Body).Decode(i)
|
||||
if ute, ok := err.(*json.UnmarshalTypeError); ok {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Unmarshal type error: expected=%v, got=%v, field=%v, offset=%v", ute.Type, ute.Value, ute.Field, ute.Offset)).SetInternal(err)
|
||||
} else if se, ok := err.(*json.SyntaxError); ok {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Syntax error: offset=%v, error=%v", se.Offset, se.Error())).SetInternal(err)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
@@ -2,7 +2,6 @@ package client
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
@@ -11,6 +10,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/config"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/ffmpeg/skills"
|
||||
httpapi "github.com/datarhei/core/v16/http/api"
|
||||
iamaccess "github.com/datarhei/core/v16/iam/access"
|
||||
|
@@ -2,7 +2,6 @@ package cluster
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -23,6 +22,7 @@ import (
|
||||
"github.com/datarhei/core/v16/cluster/raft"
|
||||
"github.com/datarhei/core/v16/cluster/store"
|
||||
"github.com/datarhei/core/v16/config"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/ffmpeg/skills"
|
||||
"github.com/datarhei/core/v16/iam"
|
||||
iamaccess "github.com/datarhei/core/v16/iam/access"
|
||||
|
@@ -3,7 +3,6 @@ package cluster
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
@@ -12,6 +11,7 @@ import (
|
||||
|
||||
"github.com/datarhei/core/v16/cluster/proxy"
|
||||
"github.com/datarhei/core/v16/cluster/store"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/maps"
|
||||
"github.com/datarhei/core/v16/restream/app"
|
||||
|
@@ -3,7 +3,6 @@ package raft
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
gonet "net"
|
||||
@@ -15,6 +14,7 @@ import (
|
||||
|
||||
raftlogger "github.com/datarhei/core/v16/cluster/logger"
|
||||
"github.com/datarhei/core/v16/cluster/store"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"go.etcd.io/bbolt"
|
||||
|
||||
|
@@ -1,12 +1,12 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/iam/access"
|
||||
"github.com/datarhei/core/v16/iam/identity"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/raft"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
|
||||
"github.com/hashicorp/raft"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
gojson "encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
@@ -155,7 +154,7 @@ func (c *jsonStore) store(data *config.Config) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
jsondata, err := gojson.MarshalIndent(data, "", " ")
|
||||
jsondata, err := json.MarshalIndent(data, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -169,14 +168,14 @@ func migrate(jsondata []byte) (*config.Data, error) {
|
||||
data := &config.Data{}
|
||||
version := DataVersion{}
|
||||
|
||||
if err := gojson.Unmarshal(jsondata, &version); err != nil {
|
||||
if err := json.Unmarshal(jsondata, &version); err != nil {
|
||||
return nil, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
if version.Version == 1 {
|
||||
dataV1 := &v1.New(nil).Data
|
||||
|
||||
if err := gojson.Unmarshal(jsondata, dataV1); err != nil {
|
||||
if err := json.Unmarshal(jsondata, dataV1); err != nil {
|
||||
return nil, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
@@ -194,7 +193,7 @@ func migrate(jsondata []byte) (*config.Data, error) {
|
||||
} else if version.Version == 2 {
|
||||
dataV2 := &v2.New(nil).Data
|
||||
|
||||
if err := gojson.Unmarshal(jsondata, dataV2); err != nil {
|
||||
if err := json.Unmarshal(jsondata, dataV2); err != nil {
|
||||
return nil, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
@@ -207,7 +206,7 @@ func migrate(jsondata []byte) (*config.Data, error) {
|
||||
} else if version.Version == 3 {
|
||||
dataV3 := &config.New(nil).Data
|
||||
|
||||
if err := gojson.Unmarshal(jsondata, dataV3); err != nil {
|
||||
if err := json.Unmarshal(jsondata, dataV3); err != nil {
|
||||
return nil, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
|
@@ -1,12 +1,12 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/config"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
@@ -2,10 +2,11 @@ package value
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
// array of auth0 tenants
|
||||
|
@@ -2,15 +2,39 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
type UnmarshalTypeError = json.UnmarshalTypeError
|
||||
type SyntaxError = json.SyntaxError
|
||||
type Number = json.Number
|
||||
|
||||
// Unmarshal is a wrapper for json.Unmarshal
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
return json.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// Marshal is a wrapper for json.Marshal
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
return json.Marshal(v)
|
||||
}
|
||||
|
||||
// MarshalIndent is a wrapper for json.MarshalIndent
|
||||
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
return json.MarshalIndent(v, prefix, indent)
|
||||
}
|
||||
|
||||
func NewDecoder(r io.Reader) *json.Decoder {
|
||||
return json.NewDecoder(r)
|
||||
}
|
||||
|
||||
func NewEncoder(w io.Writer) *json.Encoder {
|
||||
return json.NewEncoder(w)
|
||||
}
|
||||
|
||||
// FormatError takes the marshalled data and the error from Unmarshal and returns a detailed
|
||||
// error message where the error was and what the error is.
|
||||
func FormatError(input []byte, err error) error {
|
||||
|
@@ -2,7 +2,6 @@ package parse
|
||||
|
||||
import (
|
||||
"container/ring"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
@@ -10,6 +9,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/ffmpeg/prelude"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/net/url"
|
||||
|
@@ -1,9 +1,10 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
// Duration represents a time.Duration
|
||||
|
@@ -1,10 +1,10 @@
|
||||
package probe
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/ffmpeg/prelude"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/process"
|
||||
|
5
go.mod
5
go.mod
@@ -9,7 +9,7 @@ require (
|
||||
github.com/atrox/haikunatorgo/v2 v2.0.1
|
||||
github.com/caddyserver/certmagic v0.19.2
|
||||
github.com/casbin/casbin/v2 v2.77.2
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240415125433-2e78b4319e8e
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240424105158-86a7f261b92c
|
||||
github.com/datarhei/gosrt v0.5.4
|
||||
github.com/datarhei/joy4 v0.0.0-20230505074825-fde05957445a
|
||||
github.com/fujiwara/shapeio v1.0.0
|
||||
@@ -23,6 +23,7 @@ require (
|
||||
github.com/hashicorp/raft-boltdb/v2 v2.2.2
|
||||
github.com/invopop/jsonschema v0.4.0
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/klauspost/compress v1.16.7
|
||||
github.com/klauspost/cpuid/v2 v2.2.5
|
||||
github.com/labstack/echo/v4 v4.11.1
|
||||
github.com/lestrrat-go/strftime v1.0.6
|
||||
@@ -69,6 +70,7 @@ require (
|
||||
github.com/go-openapi/swag v0.22.4 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
|
||||
github.com/golang/protobuf v1.5.3 // indirect
|
||||
@@ -80,7 +82,6 @@ require (
|
||||
github.com/iancoleman/orderedmap v0.2.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.16.7 // indirect
|
||||
github.com/labstack/gommon v0.4.0 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/libdns/libdns v0.2.1 // indirect
|
||||
|
6
go.sum
6
go.sum
@@ -46,8 +46,8 @@ github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240415125433-2e78b4319e8e h1:3O75rKCLZLe8323DZCYOL9+eyyWxw50/qxHctDdBTss=
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240415125433-2e78b4319e8e/go.mod h1:3eKfwhPKoW7faTn+luShRVNMqcIskvlIKjRJ7ShjyL8=
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240424105158-86a7f261b92c h1:RIzMclqmSYwpMZxyW7nLg0XyKjY6prQWcuIdgm97U8o=
|
||||
github.com/datarhei/core-client-go/v16 v16.11.1-0.20240424105158-86a7f261b92c/go.mod h1:7XrUOUlB165Gs8JUE4lzVuNve6HW90Yz3/+lTY2EV4A=
|
||||
github.com/datarhei/gosrt v0.5.4 h1:dE3mmSB+n1GeviGM8xQAW3+UD3mKeFmd84iefDul5Vs=
|
||||
github.com/datarhei/gosrt v0.5.4/go.mod h1:MiUCwCG+LzFMzLM/kTA+3wiTtlnkVvGbW/F0XzyhtG8=
|
||||
github.com/datarhei/joy4 v0.0.0-20230505074825-fde05957445a h1:Tf4DSHY1xruBglr+yYP5Wct7czM86GKMYgbXH8a7OFo=
|
||||
@@ -101,6 +101,8 @@ github.com/go-playground/validator/v10 v10.15.3/go.mod h1:9iXMNT7sEkjXb0I+enO7QX
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
)
|
||||
|
||||
|
@@ -1,8 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
func ToNumber(f float64) json.Number {
|
||||
|
@@ -1,10 +1,10 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/monitor"
|
||||
)
|
||||
|
||||
|
@@ -1,8 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/restream/app"
|
||||
)
|
||||
|
||||
|
@@ -1,8 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/restream/app"
|
||||
|
||||
"github.com/lithammer/shortuuid/v4"
|
||||
|
@@ -1,9 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/restream/app"
|
||||
)
|
||||
|
||||
|
@@ -1,8 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/session"
|
||||
)
|
||||
|
||||
|
@@ -6,10 +6,10 @@ package resolver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/graph/models"
|
||||
"github.com/datarhei/core/v16/playout"
|
||||
"github.com/datarhei/core/v16/restream/app"
|
||||
|
@@ -2,7 +2,6 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
@@ -11,6 +10,7 @@ import (
|
||||
clientapi "github.com/datarhei/core-client-go/v16/api"
|
||||
"github.com/datarhei/core/v16/cluster/proxy"
|
||||
"github.com/datarhei/core/v16/cluster/store"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/glob"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/handler/util"
|
||||
|
@@ -2,7 +2,6 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -10,6 +9,7 @@ import (
|
||||
"github.com/datarhei/core/v16/config"
|
||||
"github.com/datarhei/core/v16/config/store"
|
||||
v1 "github.com/datarhei/core/v16/config/v1"
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/mock"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
"github.com/labstack/echo/v4"
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/handler/util"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
|
@@ -2,7 +2,6 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
@@ -10,6 +9,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/cache"
|
||||
httpfs "github.com/datarhei/core/v16/http/fs"
|
||||
|
@@ -2,12 +2,12 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/handler/util"
|
||||
"github.com/datarhei/core/v16/playout"
|
||||
|
@@ -2,13 +2,13 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/mock"
|
||||
"github.com/datarhei/core/v16/iam"
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/mock"
|
||||
"github.com/datarhei/core/v16/restream"
|
||||
|
34
http/json.go
Normal file
34
http/json.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
// GoJSONSerializer implements JSON encoding using encoding/json.
|
||||
type GoJSONSerializer struct{}
|
||||
|
||||
// Serialize converts an interface into a json and writes it to the response.
|
||||
// You can optionally use the indent parameter to produce pretty JSONs.
|
||||
func (d GoJSONSerializer) Serialize(c echo.Context, i interface{}, indent string) error {
|
||||
enc := json.NewEncoder(c.Response())
|
||||
if indent != "" {
|
||||
enc.SetIndent("", indent)
|
||||
}
|
||||
return enc.Encode(i)
|
||||
}
|
||||
|
||||
// Deserialize reads a JSON from a request body and converts it into an interface.
|
||||
func (d GoJSONSerializer) Deserialize(c echo.Context, i interface{}) error {
|
||||
err := json.NewDecoder(c.Request().Body).Decode(i)
|
||||
if ute, ok := err.(*json.UnmarshalTypeError); ok {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Unmarshal type error: expected=%v, got=%v, field=%v, offset=%v", ute.Type, ute.Value, ute.Field, ute.Offset)).SetInternal(err)
|
||||
} else if se, ok := err.(*json.SyntaxError); ok {
|
||||
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Syntax error: offset=%v, error=%v", se.Offset, se.Error())).SetInternal(err)
|
||||
}
|
||||
return err
|
||||
}
|
@@ -1,9 +1,10 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
type logwrapper struct {
|
||||
|
16703
http/middleware/gzip/fixtures/processList.json
Normal file
16703
http/middleware/gzip/fixtures/processList.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,13 +3,13 @@ package gzip
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/klauspost/compress/gzip"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/labstack/echo/v4/middleware"
|
||||
)
|
||||
|
@@ -2,15 +2,16 @@ package gzip
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/klauspost/compress/gzip"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGzip(t *testing.T) {
|
||||
@@ -238,3 +239,28 @@ func BenchmarkGzip(b *testing.B) {
|
||||
h(c)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGzipLarge(b *testing.B) {
|
||||
data, err := os.ReadFile("./fixtures/processList.json")
|
||||
require.NoError(b, err)
|
||||
|
||||
e := echo.New()
|
||||
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
req.Header.Set(echo.HeaderAcceptEncoding, gzipScheme)
|
||||
|
||||
h := New()(func(c echo.Context) error {
|
||||
c.Response().Write(data) // For Content-Type sniffing
|
||||
return nil
|
||||
})
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
// Gzip
|
||||
rec := httptest.NewRecorder()
|
||||
c := e.NewContext(req, rec)
|
||||
h(c)
|
||||
}
|
||||
}
|
||||
|
@@ -3,13 +3,13 @@ package iam
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
apihandler "github.com/datarhei/core/v16/http/handler/api"
|
||||
"github.com/datarhei/core/v16/http/validator"
|
||||
|
@@ -1,9 +1,10 @@
|
||||
package session
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@@ -2,7 +2,6 @@ package mock
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@@ -12,6 +11,7 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/ffmpeg"
|
||||
"github.com/datarhei/core/v16/http/api"
|
||||
"github.com/datarhei/core/v16/http/errorhandler"
|
||||
|
@@ -383,6 +383,7 @@ func NewServer(config Config) (serverhandler.Server, error) {
|
||||
}
|
||||
|
||||
s.router = echo.New()
|
||||
s.router.JSONSerializer = &GoJSONSerializer{}
|
||||
s.router.HTTPErrorHandler = errorhandler.HTTPErrorHandler
|
||||
s.router.Validator = validator.New()
|
||||
s.router.Use(s.middleware.log)
|
||||
|
@@ -1,13 +1,13 @@
|
||||
package access
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
|
||||
|
@@ -1,10 +1,11 @@
|
||||
package access
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@@ -1,11 +1,11 @@
|
||||
package identity
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
)
|
||||
|
@@ -4,12 +4,13 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@@ -1,11 +1,12 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
type Formatter interface {
|
||||
|
@@ -2,13 +2,14 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
)
|
||||
|
||||
// LogLevel represents a log level
|
||||
|
@@ -1,7 +1,6 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
gojson "encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
@@ -158,7 +157,7 @@ func (s *jsonStore) store(filepath string, data Data) error {
|
||||
return fmt.Errorf("invalid version (have: %d, want: %d)", data.Version, version)
|
||||
}
|
||||
|
||||
jsondata, err := gojson.MarshalIndent(&data, "", " ")
|
||||
jsondata, err := json.MarshalIndent(&data, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -195,12 +194,12 @@ func (s *jsonStore) load(filepath string, version uint64) (Data, error) {
|
||||
|
||||
var db storeVersion
|
||||
|
||||
if err = gojson.Unmarshal(jsondata, &db); err != nil {
|
||||
if err = json.Unmarshal(jsondata, &db); err != nil {
|
||||
return r, json.FormatError(jsondata, err)
|
||||
}
|
||||
|
||||
if db.Version == version {
|
||||
if err = gojson.Unmarshal(jsondata, &r); err != nil {
|
||||
if err = json.Unmarshal(jsondata, &r); err != nil {
|
||||
return r, json.FormatError(jsondata, err)
|
||||
}
|
||||
} else {
|
||||
|
@@ -2,7 +2,6 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -10,6 +9,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
)
|
||||
|
||||
|
@@ -2,11 +2,11 @@ package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/monitor/metric"
|
||||
"github.com/datarhei/core/v16/service/api"
|
||||
|
@@ -1,13 +1,13 @@
|
||||
package session
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/net"
|
||||
|
||||
|
@@ -3,13 +3,13 @@ package session
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/io/fs"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/lestrrat-go/strftime"
|
||||
|
@@ -3,7 +3,6 @@ package update
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@@ -11,6 +10,7 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/datarhei/core/v16/encoding/json"
|
||||
"github.com/datarhei/core/v16/log"
|
||||
"github.com/datarhei/core/v16/monitor/metric"
|
||||
"golang.org/x/mod/semver"
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/api/metrics.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/api/metrics.go
generated
vendored
@@ -1,8 +1,9 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
type MetricsDescription struct {
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/client.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/client.go
generated
vendored
@@ -3,7 +3,6 @@ package coreclient
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
@@ -13,6 +12,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
|
||||
"github.com/Masterminds/semver/v3"
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/cluster.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/cluster.go
generated
vendored
@@ -2,12 +2,13 @@ package coreclient
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/cluster_db.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/cluster_db.go
generated
vendored
@@ -1,9 +1,10 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/cluster_fs.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/cluster_fs.go
generated
vendored
@@ -1,9 +1,10 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/cluster_node.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/cluster_node.go
generated
vendored
@@ -2,11 +2,12 @@ package coreclient
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/config.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/config.go
generated
vendored
@@ -2,7 +2,8 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/events.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/events.go
generated
vendored
@@ -3,10 +3,11 @@ package coreclient
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/fs.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/fs.go
generated
vendored
@@ -2,13 +2,14 @@ package coreclient
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/graph.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/graph.go
generated
vendored
@@ -2,7 +2,8 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/iam.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/iam.go
generated
vendored
@@ -2,9 +2,10 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/log.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/log.go
generated
vendored
@@ -1,9 +1,10 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/metadata.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/metadata.go
generated
vendored
@@ -2,9 +2,10 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/metrics.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/metrics.go
generated
vendored
@@ -2,7 +2,8 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/playout.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/playout.go
generated
vendored
@@ -1,9 +1,10 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/process.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/process.go
generated
vendored
@@ -2,10 +2,11 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
2
vendor/github.com/datarhei/core-client-go/v16/rtmp.go
generated
vendored
2
vendor/github.com/datarhei/core-client-go/v16/rtmp.go
generated
vendored
@@ -1,7 +1,7 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/session.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/session.go
generated
vendored
@@ -2,10 +2,11 @@ package coreclient
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
2
vendor/github.com/datarhei/core-client-go/v16/skills.go
generated
vendored
2
vendor/github.com/datarhei/core-client-go/v16/skills.go
generated
vendored
@@ -1,7 +1,7 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
2
vendor/github.com/datarhei/core-client-go/v16/srt.go
generated
vendored
2
vendor/github.com/datarhei/core-client-go/v16/srt.go
generated
vendored
@@ -1,7 +1,7 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
3
vendor/github.com/datarhei/core-client-go/v16/widget.go
generated
vendored
3
vendor/github.com/datarhei/core-client-go/v16/widget.go
generated
vendored
@@ -1,9 +1,10 @@
|
||||
package coreclient
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/url"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
|
||||
"github.com/datarhei/core-client-go/v16/api"
|
||||
)
|
||||
|
||||
|
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
codecov:
|
||||
require_ci_to_pass: yes
|
||||
|
||||
coverage:
|
||||
precision: 2
|
||||
round: down
|
||||
range: "70...100"
|
||||
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 70%
|
||||
threshold: 2%
|
||||
patch: off
|
||||
changes: no
|
||||
|
||||
parsers:
|
||||
gcov:
|
||||
branch_detection:
|
||||
conditional: yes
|
||||
loop: yes
|
||||
method: no
|
||||
macro: no
|
||||
|
||||
comment:
|
||||
layout: "header,diff"
|
||||
behavior: default
|
||||
require_changes: no
|
||||
|
||||
ignore:
|
||||
- internal/encoder/vm_color
|
||||
- internal/encoder/vm_color_indent
|
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
cover.html
|
||||
cover.out
|
83
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
83
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
run:
|
||||
skip-files:
|
||||
- encode_optype.go
|
||||
- ".*_test\\.go$"
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable-all: true
|
||||
disable:
|
||||
- shadow
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
- dogsled
|
||||
- dupl
|
||||
- exhaustive
|
||||
- exhaustivestruct
|
||||
- errorlint
|
||||
- forbidigo
|
||||
- funlen
|
||||
- gci
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
- gocognit
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- godot
|
||||
- godox
|
||||
- goerr113
|
||||
- gofumpt
|
||||
- gomnd
|
||||
- gosec
|
||||
- ifshort
|
||||
- lll
|
||||
- makezero
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- paralleltest
|
||||
- testpackage
|
||||
- thelper
|
||||
- wrapcheck
|
||||
- interfacer
|
||||
- lll
|
||||
- nakedret
|
||||
- nestif
|
||||
- nlreturn
|
||||
- testpackage
|
||||
- wsl
|
||||
- varnamelen
|
||||
- nilnil
|
||||
- ireturn
|
||||
- govet
|
||||
- forcetypeassert
|
||||
- cyclop
|
||||
- containedctx
|
||||
- revive
|
||||
|
||||
issues:
|
||||
exclude-rules:
|
||||
# not needed
|
||||
- path: /*.go
|
||||
text: "ST1003: should not use underscores in package names"
|
||||
linters:
|
||||
- stylecheck
|
||||
- path: /*.go
|
||||
text: "don't use an underscore in package name"
|
||||
linters:
|
||||
- golint
|
||||
- path: rtype.go
|
||||
linters:
|
||||
- golint
|
||||
- stylecheck
|
||||
- path: error.go
|
||||
linters:
|
||||
- staticcheck
|
||||
|
||||
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
|
||||
max-issues-per-linter: 0
|
||||
|
||||
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
|
||||
max-same-issues: 0
|
425
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
425
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,425 @@
|
||||
# v0.10.2 - 2023/03/20
|
||||
|
||||
### New features
|
||||
|
||||
* Support DebugDOT option for debugging encoder ( #440 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix combination of embedding structure and omitempty option ( #442 )
|
||||
|
||||
# v0.10.1 - 2023/03/13
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix checkptr error for array decoder ( #415 )
|
||||
* Fix added buffer size check when decoding key ( #430 )
|
||||
* Fix handling of anonymous fields other than struct ( #431 )
|
||||
* Fix to not optimize when lower conversion can't handle byte-by-byte ( #432 )
|
||||
* Fix a problem that MarshalIndent does not work when UnorderedMap is specified ( #435 )
|
||||
* Fix mapDecoder.DecodeStream() for empty objects containing whitespace ( #425 )
|
||||
* Fix an issue that could not set the correct NextField for fields in the embedded structure ( #438 )
|
||||
|
||||
# v0.10.0 - 2022/11/29
|
||||
|
||||
### New features
|
||||
|
||||
* Support JSON Path ( #250 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix marshaler for map's key ( #409 )
|
||||
|
||||
# v0.9.11 - 2022/08/18
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix unexpected behavior when buffer ends with backslash ( #383 )
|
||||
* Fix stream decoding of escaped character ( #387 )
|
||||
|
||||
# v0.9.10 - 2022/07/15
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix boundary exception of type caching ( #382 )
|
||||
|
||||
# v0.9.9 - 2022/07/15
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix encoding of directed interface with typed nil ( #377 )
|
||||
* Fix embedded primitive type encoding using alias ( #378 )
|
||||
* Fix slice/array type encoding with types implementing MarshalJSON ( #379 )
|
||||
* Fix unicode decoding when the expected buffer state is not met after reading ( #380 )
|
||||
|
||||
# v0.9.8 - 2022/06/30
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix decoding of surrogate-pair ( #365 )
|
||||
* Fix handling of embedded primitive type ( #366 )
|
||||
* Add validation of escape sequence for decoder ( #367 )
|
||||
* Fix stream tokenizing respecting UseNumber ( #369 )
|
||||
* Fix encoding when struct pointer type that implements Marshal JSON is embedded ( #375 )
|
||||
|
||||
### Improve performance
|
||||
|
||||
* Improve performance of linkRecursiveCode ( #368 )
|
||||
|
||||
# v0.9.7 - 2022/04/22
|
||||
|
||||
### Fix bugs
|
||||
|
||||
#### Encoder
|
||||
|
||||
* Add filtering process for encoding on slow path ( #355 )
|
||||
* Fix encoding of interface{} with pointer type ( #363 )
|
||||
|
||||
#### Decoder
|
||||
|
||||
* Fix map key decoder that implements UnmarshalJSON ( #353 )
|
||||
* Fix decoding of []uint8 type ( #361 )
|
||||
|
||||
### New features
|
||||
|
||||
* Add DebugWith option for encoder ( #356 )
|
||||
|
||||
# v0.9.6 - 2022/03/22
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Correct the handling of the minimum value of int type for decoder ( #344 )
|
||||
* Fix bugs of stream decoder's bufferSize ( #349 )
|
||||
* Add a guard to use typeptr more safely ( #351 )
|
||||
|
||||
### Improve decoder performance
|
||||
|
||||
* Improve escapeString's performance ( #345 )
|
||||
|
||||
### Others
|
||||
|
||||
* Update go version for CI ( #347 )
|
||||
|
||||
# v0.9.5 - 2022/03/04
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix panic when decoding time.Time with context ( #328 )
|
||||
* Fix reading the next character in buffer to nul consideration ( #338 )
|
||||
* Fix incorrect handling on skipValue ( #341 )
|
||||
|
||||
### Improve decoder performance
|
||||
|
||||
* Improve performance when a payload contains escape sequence ( #334 )
|
||||
|
||||
# v0.9.4 - 2022/01/21
|
||||
|
||||
* Fix IsNilForMarshaler for string type with omitempty ( #323 )
|
||||
* Fix the case where the embedded field is at the end ( #326 )
|
||||
|
||||
# v0.9.3 - 2022/01/14
|
||||
|
||||
* Fix logic of removing struct field for decoder ( #322 )
|
||||
|
||||
# v0.9.2 - 2022/01/14
|
||||
|
||||
* Add invalid decoder to delay type error judgment at decode ( #321 )
|
||||
|
||||
# v0.9.1 - 2022/01/11
|
||||
|
||||
* Fix encoding of MarshalText/MarshalJSON operation with head offset ( #319 )
|
||||
|
||||
# v0.9.0 - 2022/01/05
|
||||
|
||||
### New feature
|
||||
|
||||
* Supports dynamic filtering of struct fields ( #314 )
|
||||
|
||||
### Improve encoding performance
|
||||
|
||||
* Improve map encoding performance ( #310 )
|
||||
* Optimize encoding path for escaped string ( #311 )
|
||||
* Add encoding option for performance ( #312 )
|
||||
|
||||
### Fix bugs
|
||||
|
||||
* Fix panic at encoding map value on 1.18 ( #310 )
|
||||
* Fix MarshalIndent for interface type ( #317 )
|
||||
|
||||
# v0.8.1 - 2021/12/05
|
||||
|
||||
* Fix operation conversion from PtrHead to Head in Recursive type ( #305 )
|
||||
|
||||
# v0.8.0 - 2021/12/02
|
||||
|
||||
* Fix embedded field conflict behavior ( #300 )
|
||||
* Refactor compiler for encoder ( #301 #302 )
|
||||
|
||||
# v0.7.10 - 2021/10/16
|
||||
|
||||
* Fix conversion from pointer to uint64 ( #294 )
|
||||
|
||||
# v0.7.9 - 2021/09/28
|
||||
|
||||
* Fix encoding of nil value about interface type that has method ( #291 )
|
||||
|
||||
# v0.7.8 - 2021/09/01
|
||||
|
||||
* Fix mapassign_faststr for indirect struct type ( #283 )
|
||||
* Fix encoding of not empty interface type ( #284 )
|
||||
* Fix encoding of empty struct interface type ( #286 )
|
||||
|
||||
# v0.7.7 - 2021/08/25
|
||||
|
||||
* Fix invalid utf8 on stream decoder ( #279 )
|
||||
* Fix buffer length bug on string stream decoder ( #280 )
|
||||
|
||||
Thank you @orisano !!
|
||||
|
||||
# v0.7.6 - 2021/08/13
|
||||
|
||||
* Fix nil slice assignment ( #276 )
|
||||
* Improve error message ( #277 )
|
||||
|
||||
# v0.7.5 - 2021/08/12
|
||||
|
||||
* Fix encoding of embedded struct with tags ( #265 )
|
||||
* Fix encoding of embedded struct that isn't first field ( #272 )
|
||||
* Fix decoding of binary type with escaped char ( #273 )
|
||||
|
||||
# v0.7.4 - 2021/07/06
|
||||
|
||||
* Fix encoding of indirect layout structure ( #264 )
|
||||
|
||||
# v0.7.3 - 2021/06/29
|
||||
|
||||
* Fix encoding of pointer type in empty interface ( #262 )
|
||||
|
||||
# v0.7.2 - 2021/06/26
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Add decoder for func type to fix decoding of nil function value ( #257 )
|
||||
* Fix stream decoding of []byte type ( #258 )
|
||||
|
||||
### Performance
|
||||
|
||||
* Improve decoding performance of map[string]interface{} type ( use `mapassign_faststr` ) ( #256 )
|
||||
* Improve encoding performance of empty interface type ( remove recursive calling of `vm.Run` ) ( #259 )
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add bytedance/sonic as benchmark target ( #254 )
|
||||
|
||||
# v0.7.1 - 2021/06/18
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix error when unmarshal empty array ( #253 )
|
||||
|
||||
# v0.7.0 - 2021/06/12
|
||||
|
||||
### Support context for MarshalJSON and UnmarshalJSON ( #248 )
|
||||
|
||||
* json.MarshalContext(context.Context, interface{}, ...json.EncodeOption) ([]byte, error)
|
||||
* json.NewEncoder(io.Writer).EncodeContext(context.Context, interface{}, ...json.EncodeOption) error
|
||||
* json.UnmarshalContext(context.Context, []byte, interface{}, ...json.DecodeOption) error
|
||||
* json.NewDecoder(io.Reader).DecodeContext(context.Context, interface{}) error
|
||||
|
||||
```go
|
||||
type MarshalerContext interface {
|
||||
MarshalJSON(context.Context) ([]byte, error)
|
||||
}
|
||||
|
||||
type UnmarshalerContext interface {
|
||||
UnmarshalJSON(context.Context, []byte) error
|
||||
}
|
||||
```
|
||||
|
||||
### Add DecodeFieldPriorityFirstWin option ( #242 )
|
||||
|
||||
In the default behavior, go-json, like encoding/json, will reflect the result of the last evaluation when a field with the same name exists. I've added new options to allow you to change this behavior. `json.DecodeFieldPriorityFirstWin` option reflects the result of the first evaluation if a field with the same name exists. This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated.
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix indent number contains recursive type ( #249 )
|
||||
* Fix encoding of using empty interface as map key ( #244 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding fields containing escaped characters ( #237 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Move some tests to subdirectory ( #243 )
|
||||
* Refactor package layout for decoder ( #238 )
|
||||
|
||||
# v0.6.1 - 2021/06/02
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix value of totalLength for encoding ( #236 )
|
||||
|
||||
# v0.6.0 - 2021/06/01
|
||||
|
||||
### Support Colorize option for encoding (#233)
|
||||
|
||||
```go
|
||||
b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme))
|
||||
if err != nil {
|
||||
...
|
||||
}
|
||||
fmt.Println(string(b)) // print colored json
|
||||
```
|
||||
|
||||
### Refactor
|
||||
|
||||
* Fix opcode layout - Adjust memory layout of the opcode to 128 bytes in a 64-bit environment ( #230 )
|
||||
* Refactor encode option ( #231 )
|
||||
* Refactor escape string ( #232 )
|
||||
|
||||
# v0.5.1 - 2021/5/20
|
||||
|
||||
### Optimization
|
||||
|
||||
* Add type addrShift to enable bigger encoder/decoder cache ( #213 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Keep original reference of slice element ( #229 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* Refactor Debug mode for encoding ( #226 )
|
||||
* Generate VM sources for encoding ( #227 )
|
||||
* Refactor validator for null/true/false for decoding ( #221 )
|
||||
|
||||
# v0.5.0 - 2021/5/9
|
||||
|
||||
### Supports using omitempty and string tags at the same time ( #216 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix stream decoder for unicode char ( #215 )
|
||||
* Fix decoding of slice element ( #219 )
|
||||
* Fix calculating of buffer length for stream decoder ( #220 )
|
||||
|
||||
### Refactor
|
||||
|
||||
* replace skipWhiteSpace goto by loop ( #212 )
|
||||
|
||||
# v0.4.14 - 2021/5/4
|
||||
|
||||
### Benchmark
|
||||
|
||||
* Add valyala/fastjson to benchmark ( #193 )
|
||||
* Add benchmark task for CI ( #211 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of slice with unmarshal json type ( #198 )
|
||||
* Fix decoding of null value for interface type that does not implement Unmarshaler ( #205 )
|
||||
* Fix decoding of null value to []byte by json.Unmarshal ( #206 )
|
||||
* Fix decoding of backslash char at the end of string ( #207 )
|
||||
* Fix stream decoder for null/true/false value ( #208 )
|
||||
* Fix stream decoder for slow reader ( #211 )
|
||||
|
||||
### Performance
|
||||
|
||||
* If cap of slice is enough, reuse slice data for compatibility with encoding/json ( #200 )
|
||||
|
||||
# v0.4.13 - 2021/4/20
|
||||
|
||||
### Fix json.Compact and json.Indent
|
||||
|
||||
* Support validation the input buffer for json.Compact and json.Indent ( #189 )
|
||||
* Optimize json.Compact and json.Indent ( improve memory footprint ) ( #190 )
|
||||
|
||||
# v0.4.12 - 2021/4/15
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fix unnecessary indent for empty slice type ( #181 )
|
||||
* Fix encoding of omitempty feature for the slice or interface type ( #183 )
|
||||
* Fix encoding custom types zero values with omitempty when marshaller exists ( #187 )
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoder for invalid top level value ( #184 )
|
||||
* Fix decoder for invalid number value ( #185 )
|
||||
|
||||
# v0.4.11 - 2021/4/3
|
||||
|
||||
* Improve decoder performance for interface type
|
||||
|
||||
# v0.4.10 - 2021/4/2
|
||||
|
||||
### Fix encoder
|
||||
|
||||
* Fixed a bug when encoding slice and map containing recursive structures
|
||||
* Fixed a logic to determine if indirect reference
|
||||
|
||||
# v0.4.9 - 2021/3/29
|
||||
|
||||
### Add debug mode
|
||||
|
||||
If you use `json.MarshalWithOption(v, json.Debug())` and `panic` occurred in `go-json`, produces debug information to console.
|
||||
|
||||
### Support a new feature to compatible with encoding/json
|
||||
|
||||
- invalid UTF-8 is coerced to valid UTF-8 ( without performance down )
|
||||
|
||||
### Fix encoder
|
||||
|
||||
- Fixed handling of MarshalJSON of function type
|
||||
|
||||
### Fix decoding of slice of pointer type
|
||||
|
||||
If there is a pointer value, go-json will use it. (This behavior is necessary to achieve the ability to prioritize pre-filled values). However, since slices are reused internally, there was a bug that referred to the previous pointer value. Therefore, it is not necessary to refer to the pointer value in advance for the slice element, so we explicitly initialize slice element by `nil`.
|
||||
|
||||
# v0.4.8 - 2021/3/21
|
||||
|
||||
### Reduce memory usage at compile time
|
||||
|
||||
* go-json have used about 2GB of memory at compile time, but now it can compile with about less than 550MB.
|
||||
|
||||
### Fix any encoder's bug
|
||||
|
||||
* Add many test cases for encoder
|
||||
* Fix composite type ( slice/array/map )
|
||||
* Fix pointer types
|
||||
* Fix encoding of MarshalJSON or MarshalText or json.Number type
|
||||
|
||||
### Refactor encoder
|
||||
|
||||
* Change package layout for reducing memory usage at compile
|
||||
* Remove anonymous and only operation
|
||||
* Remove root property from encodeCompileContext and opcode
|
||||
|
||||
### Fix CI
|
||||
|
||||
* Add Go 1.16
|
||||
* Remove Go 1.13
|
||||
* Fix `make cover` task
|
||||
|
||||
### Number/Delim/Token/RawMessage use the types defined in encoding/json by type alias
|
||||
|
||||
# v0.4.7 - 2021/02/22
|
||||
|
||||
### Fix decoder
|
||||
|
||||
* Fix decoding of deep recursive structure
|
||||
* Fix decoding of embedded unexported pointer field
|
||||
* Fix invalid test case
|
||||
* Fix decoding of invalid value
|
||||
* Fix decoding of prefilled value
|
||||
* Fix not being able to return UnmarshalTypeError when it should be returned
|
||||
* Fix decoding of null value
|
||||
* Fix decoding of type of null string
|
||||
* Use pre allocated pointer if exists it at decoding
|
||||
|
||||
### Reduce memory usage at compile
|
||||
|
||||
* Integrate int/int8/int16/int32/int64 and uint/uint8/uint16/uint32/uint64 operation to reduce memory usage at compile
|
||||
|
||||
### Remove unnecessary optype
|
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Masaaki Goshima
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
PKG := github.com/goccy/go-json
|
||||
|
||||
BIN_DIR := $(CURDIR)/bin
|
||||
PKGS := $(shell go list ./... | grep -v internal/cmd|grep -v test)
|
||||
COVER_PKGS := $(foreach pkg,$(PKGS),$(subst $(PKG),.,$(pkg)))
|
||||
|
||||
COMMA := ,
|
||||
EMPTY :=
|
||||
SPACE := $(EMPTY) $(EMPTY)
|
||||
COVERPKG_OPT := $(subst $(SPACE),$(COMMA),$(COVER_PKGS))
|
||||
|
||||
$(BIN_DIR):
|
||||
@mkdir -p $(BIN_DIR)
|
||||
|
||||
.PHONY: cover
|
||||
cover:
|
||||
go test -coverpkg=$(COVERPKG_OPT) -coverprofile=cover.out ./...
|
||||
|
||||
.PHONY: cover-html
|
||||
cover-html: cover
|
||||
go tool cover -html=cover.out
|
||||
|
||||
.PHONY: lint
|
||||
lint: golangci-lint
|
||||
$(BIN_DIR)/golangci-lint run
|
||||
|
||||
golangci-lint: | $(BIN_DIR)
|
||||
@{ \
|
||||
set -e; \
|
||||
GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \
|
||||
cd $$GOLANGCI_LINT_TMP_DIR; \
|
||||
go mod init tmp; \
|
||||
GOBIN=$(BIN_DIR) go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.48.0; \
|
||||
rm -rf $$GOLANGCI_LINT_TMP_DIR; \
|
||||
}
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
go generate ./internal/...
|
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
@@ -0,0 +1,529 @@
|
||||
# go-json
|
||||
|
||||

|
||||
[](https://pkg.go.dev/github.com/goccy/go-json?tab=doc)
|
||||
[](https://codecov.io/gh/goccy/go-json)
|
||||
|
||||
Fast JSON encoder/decoder compatible with encoding/json for Go
|
||||
|
||||
<img width="400px" src="https://user-images.githubusercontent.com/209884/92572337-42b42900-f2bf-11ea-973a-c74a359553a5.png"></img>
|
||||
|
||||
# Roadmap
|
||||
|
||||
```
|
||||
* version ( expected release date )
|
||||
|
||||
* v0.9.0
|
||||
|
|
||||
| while maintaining compatibility with encoding/json, we will add convenient APIs
|
||||
|
|
||||
v
|
||||
* v1.0.0
|
||||
```
|
||||
|
||||
We are accepting requests for features that will be implemented between v0.9.0 and v.1.0.0.
|
||||
If you have the API you need, please submit your issue [here](https://github.com/goccy/go-json/issues).
|
||||
|
||||
# Features
|
||||
|
||||
- Drop-in replacement of `encoding/json`
|
||||
- Fast ( See [Benchmark section](https://github.com/goccy/go-json#benchmarks) )
|
||||
- Flexible customization with options
|
||||
- Coloring the encoded string
|
||||
- Can propagate context.Context to `MarshalJSON` or `UnmarshalJSON`
|
||||
- Can dynamically filter the fields of the structure type-safely
|
||||
|
||||
# Installation
|
||||
|
||||
```
|
||||
go get github.com/goccy/go-json
|
||||
```
|
||||
|
||||
# How to use
|
||||
|
||||
Replace import statement from `encoding/json` to `github.com/goccy/go-json`
|
||||
|
||||
```
|
||||
-import "encoding/json"
|
||||
+import "github.com/goccy/go-json"
|
||||
```
|
||||
|
||||
# JSON library comparison
|
||||
|
||||
| name | encoder | decoder | compatible with `encoding/json` |
|
||||
| :----: | :------: | :-----: | :-----------------------------: |
|
||||
| encoding/json | yes | yes | N/A |
|
||||
| [json-iterator/go](https://github.com/json-iterator/go) | yes | yes | partial |
|
||||
| [easyjson](https://github.com/mailru/easyjson) | yes | yes | no |
|
||||
| [gojay](https://github.com/francoispqt/gojay) | yes | yes | no |
|
||||
| [segmentio/encoding/json](https://github.com/segmentio/encoding/tree/master/json) | yes | yes | partial |
|
||||
| [jettison](https://github.com/wI2L/jettison) | yes | no | no |
|
||||
| [simdjson-go](https://github.com/minio/simdjson-go) | no | yes | no |
|
||||
| goccy/go-json | yes | yes | yes |
|
||||
|
||||
- `json-iterator/go` isn't compatible with `encoding/json` in many ways (e.g. https://github.com/json-iterator/go/issues/229 ), but it hasn't been supported for a long time.
|
||||
- `segmentio/encoding/json` is well supported for encoders, but some are not supported for decoder APIs such as `Token` ( streaming decode )
|
||||
|
||||
## Other libraries
|
||||
|
||||
- [jingo](https://github.com/bet365/jingo)
|
||||
|
||||
I tried the benchmark but it didn't work.
|
||||
Also, it seems to panic when it receives an unexpected value because there is no error handling...
|
||||
|
||||
- [ffjson](https://github.com/pquerna/ffjson)
|
||||
|
||||
Benchmarking gave very slow results.
|
||||
It seems that it is assumed that the user will use the buffer pool properly.
|
||||
Also, development seems to have already stopped
|
||||
|
||||
# Benchmarks
|
||||
|
||||
```
|
||||
$ cd benchmarks
|
||||
$ go test -bench .
|
||||
```
|
||||
|
||||
## Encode
|
||||
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126758-0845cb00-68f5-11eb-8db7-086fcf9bcfaa.png"></img>
|
||||
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126757-07ad3480-68f5-11eb-87aa-858cc5eacfcb.png"></img>
|
||||
|
||||
## Decode
|
||||
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979944-bd1d6d80-7002-11eb-944b-9d17b6674e3f.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979931-b989e680-7002-11eb-87a0-66fc22d90dd4.png">
|
||||
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979940-bc84d700-7002-11eb-9647-869bbc25c9d9.png">
|
||||
|
||||
|
||||
# Fuzzing
|
||||
|
||||
[go-json-fuzz](https://github.com/goccy/go-json-fuzz) is the repository for fuzzing tests.
|
||||
If you run the test in this repository and find a bug, please commit to corpus to go-json-fuzz and report the issue to [go-json](https://github.com/goccy/go-json/issues).
|
||||
|
||||
# How it works
|
||||
|
||||
`go-json` is very fast in both encoding and decoding compared to other libraries.
|
||||
It's easier to implement by using automatic code generation for performance or by using a dedicated interface, but `go-json` dares to stick to compatibility with `encoding/json` and is the simple interface. Despite this, we are developing with the aim of being the fastest library.
|
||||
|
||||
Here, we explain the various speed-up techniques implemented by `go-json`.
|
||||
|
||||
## Basic technique
|
||||
|
||||
The techniques listed here are the ones used by most of the libraries listed above.
|
||||
|
||||
### Buffer reuse
|
||||
|
||||
Since the only value required for the result of `json.Marshal(interface{}) ([]byte, error)` is `[]byte`, the only value that must be allocated during encoding is the return value `[]byte` .
|
||||
|
||||
Also, as the number of allocations increases, the performance will be affected, so the number of allocations should be kept as low as possible when creating `[]byte`.
|
||||
|
||||
Therefore, there is a technique to reduce the number of times a new buffer must be allocated by reusing the buffer used for the previous encoding by using `sync.Pool`.
|
||||
|
||||
Finally, you allocate a buffer that is as long as the resulting buffer and copy the contents into it, you only need to allocate the buffer once in theory.
|
||||
|
||||
```go
|
||||
type buffer struct {
|
||||
data []byte
|
||||
}
|
||||
|
||||
var bufPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &buffer{data: make([]byte, 0, 1024)}
|
||||
},
|
||||
}
|
||||
|
||||
buf := bufPool.Get().(*buffer)
|
||||
data := encode(buf.data) // reuse buf.data
|
||||
|
||||
newBuf := make([]byte, len(data))
|
||||
copy(newBuf, buf)
|
||||
|
||||
buf.data = data
|
||||
bufPool.Put(buf)
|
||||
```
|
||||
|
||||
### Elimination of reflection
|
||||
|
||||
As you know, the reflection operation is very slow.
|
||||
|
||||
Therefore, using the fact that the address position where the type information is stored is fixed for each binary ( we call this `typeptr` ),
|
||||
we can use the address in the type information to call a pre-built optimized process.
|
||||
|
||||
For example, you can get the address to the type information from `interface{}` as follows and you can use that information to call a process that does not have reflection.
|
||||
|
||||
To process without reflection, pass a pointer (`unsafe.Pointer`) to the value is stored.
|
||||
|
||||
```go
|
||||
|
||||
type emptyInterface struct {
|
||||
typ unsafe.Pointer
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
var typeToEncoder = map[uintptr]func(unsafe.Pointer)([]byte, error){}
|
||||
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
iface := (*emptyInterface)(unsafe.Pointer(&v)
|
||||
typeptr := uintptr(iface.typ)
|
||||
if enc, exists := typeToEncoder[typeptr]; exists {
|
||||
return enc(iface.ptr)
|
||||
}
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
※ In reality, `typeToEncoder` can be referenced by multiple goroutines, so exclusive control is required.
|
||||
|
||||
## Unique speed-up technique
|
||||
|
||||
## Encoder
|
||||
|
||||
### Do not escape arguments of `Marshal`
|
||||
|
||||
`json.Marshal` and `json.Unmarshal` receive `interface{}` value and they perform type determination dynamically to process.
|
||||
In normal case, you need to use the `reflect` library to determine the type dynamically, but since `reflect.Type` is defined as `interface`, when you call the method of `reflect.Type`, The reflect's argument is escaped.
|
||||
|
||||
Therefore, the arguments for `Marshal` and `Unmarshal` are always escaped to the heap.
|
||||
However, `go-json` can use the feature of `reflect.Type` while avoiding escaping.
|
||||
|
||||
`reflect.Type` is defined as `interface`, but in reality `reflect.Type` is implemented only by the structure `rtype` defined in the `reflect` package.
|
||||
For this reason, to date `reflect.Type` is the same as `*reflect.rtype`.
|
||||
|
||||
Therefore, by directly handling `*reflect.rtype`, which is an implementation of `reflect.Type`, it is possible to avoid escaping because it changes from `interface` to using `struct`.
|
||||
|
||||
The technique for working with `*reflect.rtype` directly from `go-json` is implemented at [rtype.go](https://github.com/goccy/go-json/blob/master/internal/runtime/rtype.go)
|
||||
|
||||
Also, the same technique is cut out as a library ( https://github.com/goccy/go-reflect )
|
||||
|
||||
Initially this feature was the default behavior of `go-json`.
|
||||
But after careful testing, I found that I passed a large value to `json.Marshal()` and if the argument could not be assigned to the stack, it could not be properly escaped to the heap (a bug in the Go compiler).
|
||||
|
||||
Therefore, this feature will be provided as an **optional** until this issue is resolved.
|
||||
|
||||
To use it, add `NoEscape` like `MarshalNoEscape()`
|
||||
|
||||
### Encoding using opcode sequence
|
||||
|
||||
I explained that you can use `typeptr` to call a pre-built process from type information.
|
||||
|
||||
In other libraries, this dedicated process is processed by making it an function calling like anonymous function, but function calls are inherently slow processes and should be avoided as much as possible.
|
||||
|
||||
Therefore, `go-json` adopted the Instruction-based execution processing system, which is also used to implement virtual machines for programming language.
|
||||
|
||||
If it is the first type to encode, create the opcode ( instruction ) sequence required for encoding.
|
||||
From the second time onward, use `typeptr` to get the cached pre-built opcode sequence and encode it based on it. An example of the opcode sequence is shown below.
|
||||
|
||||
```go
|
||||
json.Marshal(struct{
|
||||
X int `json:"x"`
|
||||
Y string `json:"y"`
|
||||
}{X: 1, Y: "hello"})
|
||||
```
|
||||
|
||||
When encoding a structure like the one above, create a sequence of opcodes like this:
|
||||
|
||||
```
|
||||
- opStructFieldHead ( `{` )
|
||||
- opStructFieldInt ( `"x": 1,` )
|
||||
- opStructFieldString ( `"y": "hello"` )
|
||||
- opStructEnd ( `}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
※ When processing each operation, write the letters on the right.
|
||||
|
||||
In addition, each opcode is managed by the following structure (
|
||||
Pseudo code ).
|
||||
|
||||
```go
|
||||
type opType int
|
||||
const (
|
||||
opStructFieldHead opType = iota
|
||||
opStructFieldInt
|
||||
opStructFieldStirng
|
||||
opStructEnd
|
||||
opEnd
|
||||
)
|
||||
type opcode struct {
|
||||
op opType
|
||||
key []byte
|
||||
next *opcode
|
||||
}
|
||||
```
|
||||
|
||||
The process of encoding using the opcode sequence is roughly implemented as follows.
|
||||
|
||||
```go
|
||||
func encode(code *opcode, b []byte, p unsafe.Pointer) ([]byte, error) {
|
||||
for {
|
||||
switch code.op {
|
||||
case opStructFieldHead:
|
||||
b = append(b, '{')
|
||||
code = code.next
|
||||
case opStructFieldInt:
|
||||
b = append(b, code.key...)
|
||||
b = appendInt((*int)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructFieldString:
|
||||
b = append(b, code.key...)
|
||||
b = appendString((*string)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||
code = code.next
|
||||
case opStructEnd:
|
||||
b = append(b, '}')
|
||||
code = code.next
|
||||
case opEnd:
|
||||
goto END
|
||||
}
|
||||
}
|
||||
END:
|
||||
return b, nil
|
||||
}
|
||||
```
|
||||
|
||||
In this way, the huge `switch-case` is used to encode by manipulating the linked list opcodes to avoid unnecessary function calls.
|
||||
|
||||
### Opcode sequence optimization
|
||||
|
||||
One of the advantages of encoding using the opcode sequence is the ease of optimization.
|
||||
The opcode sequence mentioned above is actually converted into the following optimized operations and used.
|
||||
|
||||
```
|
||||
- opStructFieldHeadInt ( `{"x": 1,` )
|
||||
- opStructEndString ( `"y": "hello"}` )
|
||||
- opEnd
|
||||
```
|
||||
|
||||
It has been reduced from 5 opcodes to 3 opcodes !
|
||||
Reducing the number of opcodees means reducing the number of branches with `switch-case`.
|
||||
In other words, the closer the number of operations is to 1, the faster the processing can be performed.
|
||||
|
||||
In `go-json`, optimization to reduce the number of opcodes itself like the above and it speeds up by preparing opcodes with optimized paths.
|
||||
|
||||
### Change recursive call from CALL to JMP
|
||||
|
||||
Recursive processing is required during encoding if the type is defined recursively as follows:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
X int
|
||||
U *U
|
||||
}
|
||||
|
||||
type U struct {
|
||||
T *T
|
||||
}
|
||||
|
||||
b, err := json.Marshal(&T{
|
||||
X: 1,
|
||||
U: &U{
|
||||
T: &T{
|
||||
X: 2,
|
||||
},
|
||||
},
|
||||
})
|
||||
fmt.Println(string(b)) // {"X":1,"U":{"T":{"X":2,"U":null}}}
|
||||
```
|
||||
|
||||
In `go-json`, recursive processing is processed by the operation type of ` opStructFieldRecursive`.
|
||||
|
||||
In this operation, after acquiring the opcode sequence used for recursive processing, the function is **not** called recursively as it is, but the necessary values are saved by itself and implemented by moving to the next operation.
|
||||
|
||||
The technique of implementing recursive processing with the `JMP` operation while avoiding the `CALL` operation is a famous technique for implementing a high-speed virtual machine.
|
||||
|
||||
For more details, please refer to [the article](https://engineering.mercari.com/blog/entry/1599563768-081104c850) ( but Japanese only ).
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
When retrieving the data cached from the type information by `typeptr`, we usually use map.
|
||||
Map requires exclusive control, so use `sync.Map` for a naive implementation.
|
||||
|
||||
However, this is slow, so it's a good idea to use the `atomic` package for exclusive control as implemented by `segmentio/encoding/json` ( https://github.com/segmentio/encoding/blob/master/json/codec.go#L41-L55 ).
|
||||
|
||||
This implementation slows down the set instead of speeding up the get, but it works well because of the nature of the library, it encodes much more for the same type.
|
||||
|
||||
However, as a result of profiling, I noticed that `runtime.mapaccess2` accounts for a significant percentage of the execution time. So I thought if I could change the lookup from map to slice.
|
||||
|
||||
There is an API named `typelinks` defined in the `runtime` package that the `reflect` package uses internally.
|
||||
This allows you to get all the type information defined in the binary at runtime.
|
||||
|
||||
The fact that all type information can be acquired means that by constructing slices in advance with the acquired total number of type information, it is possible to look up with the value of `typeptr` without worrying about out-of-range access.
|
||||
|
||||
However, if there is too much type information, it will use a lot of memory, so by default we will only use this optimization if the slice size fits within **2Mib** .
|
||||
|
||||
If this approach is not available, it will fall back to the `atomic` based process described above.
|
||||
|
||||
If you want to know more, please refer to the implementation [here](https://github.com/goccy/go-json/blob/master/internal/runtime/type.go#L36-L100)
|
||||
|
||||
## Decoder
|
||||
|
||||
### Dispatch by typeptr from map to slice
|
||||
|
||||
Like the encoder, the decoder also uses typeptr to call the dedicated process.
|
||||
|
||||
### Faster termination character inspection using NUL character
|
||||
|
||||
In order to decode, you have to traverse the input buffer character by position.
|
||||
At that time, if you check whether the buffer has reached the end, it will be very slow.
|
||||
|
||||
`buf` : `[]byte` type variable. holds the string passed to the decoder
|
||||
`cursor` : `int64` type variable. holds the current read position
|
||||
|
||||
```go
|
||||
buflen := len(buf)
|
||||
for ; cursor < buflen; cursor++ { // compare cursor and buflen at all times, it is so slow.
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Therefore, by adding the `NUL` (`\000`) character to the end of the read buffer as shown below, it is possible to check the termination character at the same time as other characters.
|
||||
|
||||
```go
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Use Boundary Check Elimination
|
||||
|
||||
Due to the `NUL` character optimization, the Go compiler does a boundary check every time, even though `buf[cursor]` does not cause out-of-range access.
|
||||
|
||||
Therefore, `go-json` eliminates boundary check by fetching characters for hotspot by pointer operation. For example, the following code.
|
||||
|
||||
```go
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
p := (*sliceHeader)(&unsafe.Pointer(buf)).data
|
||||
for {
|
||||
switch char(p, cursor) {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
case '\000':
|
||||
return nil
|
||||
}
|
||||
cursor++
|
||||
}
|
||||
```
|
||||
|
||||
### Checking the existence of fields of struct using Bitmaps
|
||||
|
||||
I found by the profiling result, in the struct decode, lookup process for field was taking a long time.
|
||||
|
||||
For example, consider decoding a string like `{"a":1,"b":2,"c":3}` into the following structure:
|
||||
|
||||
```go
|
||||
type T struct {
|
||||
A int `json:"a"`
|
||||
B int `json:"b"`
|
||||
C int `json:"c"`
|
||||
}
|
||||
```
|
||||
|
||||
At this time, it was found that it takes a lot of time to acquire the decoding process corresponding to the field from the field name as shown below during the decoding process.
|
||||
|
||||
```go
|
||||
fieldName := decodeKey(buf, cursor) // "a" or "b" or "c"
|
||||
decoder, exists := fieldToDecoderMap[fieldName] // so slow
|
||||
if exists {
|
||||
decoder(buf, cursor)
|
||||
} else {
|
||||
skipValue(buf, cursor)
|
||||
}
|
||||
```
|
||||
|
||||
To improve this process, `json-iterator/go` is optimized so that it can be branched by switch-case when the number of fields in the structure is 10 or less (switch-case is faster than map). However, there is a risk of hash collision because the value hashed by the FNV algorithm is used for conditional branching. Also, `gojay` processes this part at high speed by letting the library user yourself write `switch-case`.
|
||||
|
||||
|
||||
`go-json` considers and implements a new approach that is different from these. I call this **bitmap field optimization**.
|
||||
|
||||
The range of values per character can be represented by `[256]byte`. Also, if the number of fields in the structure is 8 or less, `int8` type can represent the state of each field.
|
||||
In other words, it has the following structure.
|
||||
|
||||
- Base ( 8bit ): `00000000`
|
||||
- Key "a": `00000001` ( assign key "a" to the first bit )
|
||||
- Key "b": `00000010` ( assign key "b" to the second bit )
|
||||
- Key "c": `00000100` ( assign key "c" to the third bit )
|
||||
|
||||
Bitmap structure is the following
|
||||
|
||||
```
|
||||
| key index(0) |
|
||||
------------------------
|
||||
0 | 00000000 |
|
||||
1 | 00000000 |
|
||||
~~ | |
|
||||
97 (a) | 00000001 |
|
||||
98 (b) | 00000010 |
|
||||
99 (c) | 00000100 |
|
||||
~~ | |
|
||||
255 | 00000000 |
|
||||
```
|
||||
|
||||
You can think of this as a Bitmap with a height of `256` and a width of the maximum string length in the field name.
|
||||
In other words, it can be represented by the following type .
|
||||
|
||||
```go
|
||||
[maxFieldKeyLength][256]int8
|
||||
```
|
||||
|
||||
When decoding a field character, check whether the corresponding character exists by referring to the pre-built bitmap like the following.
|
||||
|
||||
```go
|
||||
var curBit int8 = math.MaxInt8 // 11111111
|
||||
|
||||
c := char(buf, cursor)
|
||||
bit := bitmap[keyIdx][c]
|
||||
curBit &= bit
|
||||
if curBit == 0 {
|
||||
// not found field
|
||||
}
|
||||
```
|
||||
|
||||
If `curBit` is not `0` until the end of the field string, then the string is
|
||||
You may have hit one of the fields.
|
||||
But the possibility is that if the decoded string is shorter than the field string, you will get a false hit.
|
||||
|
||||
- input: `{"a":1}`
|
||||
```go
|
||||
type T struct {
|
||||
X int `json:"abc"`
|
||||
}
|
||||
```
|
||||
※ Since `a` is shorter than `abc`, it can decode to the end of the field character without `curBit` being 0.
|
||||
|
||||
Rest assured. In this case, it doesn't matter because you can tell if you hit by comparing the string length of `a` with the string length of `abc`.
|
||||
|
||||
Finally, calculate the position of the bit where `1` is set and get the corresponding value, and you're done.
|
||||
|
||||
Using this technique, field lookups are possible with only bitwise operations and access to slices.
|
||||
|
||||
`go-json` uses a similar technique for fields with 9 or more and 16 or less fields. At this time, Bitmap is constructed as `[maxKeyLen][256]int16` type.
|
||||
|
||||
Currently, this optimization is not performed when the maximum length of the field name is long (specifically, 64 bytes or more) in addition to the limitation of the number of fields from the viewpoint of saving memory usage.
|
||||
|
||||
### Others
|
||||
|
||||
I have done a lot of other optimizations. I will find time to write about them. If you have any questions about what's written here or other optimizations, please visit the `#go-json` channel on `gophers.slack.com` .
|
||||
|
||||
## Reference
|
||||
|
||||
Regarding the story of go-json, there are the following articles in Japanese only.
|
||||
|
||||
- https://speakerdeck.com/goccy/zui-su-falsejsonraiburariwoqiu-mete
|
||||
- https://engineering.mercari.com/blog/entry/1599563768-081104c850/
|
||||
|
||||
# Looking for Sponsors
|
||||
|
||||
I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free.
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
)
|
||||
|
||||
type (
|
||||
ColorFormat = encoder.ColorFormat
|
||||
ColorScheme = encoder.ColorScheme
|
||||
)
|
||||
|
||||
const escape = "\x1b"
|
||||
|
||||
type colorAttr int
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgBlackColor colorAttr = iota + 30
|
||||
fgRedColor
|
||||
fgGreenColor
|
||||
fgYellowColor
|
||||
fgBlueColor
|
||||
fgMagentaColor
|
||||
fgCyanColor
|
||||
fgWhiteColor
|
||||
)
|
||||
|
||||
//nolint:deadcode,varcheck
|
||||
const (
|
||||
fgHiBlackColor colorAttr = iota + 90
|
||||
fgHiRedColor
|
||||
fgHiGreenColor
|
||||
fgHiYellowColor
|
||||
fgHiBlueColor
|
||||
fgHiMagentaColor
|
||||
fgHiCyanColor
|
||||
fgHiWhiteColor
|
||||
)
|
||||
|
||||
func createColorFormat(attr colorAttr) ColorFormat {
|
||||
return ColorFormat{
|
||||
Header: wrapColor(attr),
|
||||
Footer: resetColor(),
|
||||
}
|
||||
}
|
||||
|
||||
func wrapColor(attr colorAttr) string {
|
||||
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||
}
|
||||
|
||||
func resetColor() string {
|
||||
return wrapColor(colorAttr(0))
|
||||
}
|
||||
|
||||
var (
|
||||
DefaultColorScheme = &ColorScheme{
|
||||
Int: createColorFormat(fgHiMagentaColor),
|
||||
Uint: createColorFormat(fgHiMagentaColor),
|
||||
Float: createColorFormat(fgHiMagentaColor),
|
||||
Bool: createColorFormat(fgHiYellowColor),
|
||||
String: createColorFormat(fgHiGreenColor),
|
||||
Binary: createColorFormat(fgHiRedColor),
|
||||
ObjectKey: createColorFormat(fgHiCyanColor),
|
||||
Null: createColorFormat(fgBlueColor),
|
||||
}
|
||||
)
|
263
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
263
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/decoder"
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type Decoder struct {
|
||||
s *decoder.Stream
|
||||
}
|
||||
|
||||
const (
|
||||
nul = '\000'
|
||||
)
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func unmarshal(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func unmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rctx := decoder.TakeRuntimeContext()
|
||||
rctx.Buf = src
|
||||
rctx.Option.Flags = 0
|
||||
rctx.Option.Flags |= decoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(rctx, 0, 0, header.ptr)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(rctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
var (
|
||||
pathDecoder = decoder.NewPathDecoder()
|
||||
)
|
||||
|
||||
func extractFromPath(path *Path, data []byte, optFuncs ...DecodeOptionFunc) ([][]byte, error) {
|
||||
if path.path.RootSelectorOnly {
|
||||
return [][]byte{data}, nil
|
||||
}
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
ctx.Option.Flags |= decoder.PathOption
|
||||
ctx.Option.Path = path.path
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
paths, cursor, err := pathDecoder.DecodePath(ctx, 0, 0)
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
if err := validateEndBuf(src, cursor); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return paths, nil
|
||||
}
|
||||
|
||||
func unmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||
copy(src, data)
|
||||
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
|
||||
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||
return err
|
||||
}
|
||||
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ctx := decoder.TakeRuntimeContext()
|
||||
ctx.Buf = src
|
||||
ctx.Option.Flags = 0
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
cursor, err := dec.Decode(ctx, 0, 0, noescape(header.ptr))
|
||||
if err != nil {
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
decoder.ReleaseRuntimeContext(ctx)
|
||||
return validateEndBuf(src, cursor)
|
||||
}
|
||||
|
||||
func validateEndBuf(src []byte, cursor int64) error {
|
||||
for {
|
||||
switch src[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case nul:
|
||||
return nil
|
||||
}
|
||||
return errors.ErrSyntax(
|
||||
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||
cursor+1,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:staticcheck
|
||||
//go:nosplit
|
||||
func noescape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
||||
|
||||
func validateType(typ *runtime.Type, p uintptr) error {
|
||||
if typ == nil || typ.Kind() != reflect.Ptr || p == 0 {
|
||||
return &InvalidUnmarshalError{Type: runtime.RType2Type(typ)}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may
|
||||
// read data from r beyond the JSON values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
s := decoder.NewStream(r)
|
||||
return &Decoder{
|
||||
s: s,
|
||||
}
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's
|
||||
// buffer. The reader is valid until the next call to Decode.
|
||||
func (d *Decoder) Buffered() io.Reader {
|
||||
return d.s.Buffered()
|
||||
}
|
||||
|
||||
// Decode reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about
|
||||
// the conversion of JSON into a Go value.
|
||||
func (d *Decoder) Decode(v interface{}) error {
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
// DecodeContext reads the next JSON-encoded value from its
|
||||
// input and stores it in the value pointed to by v with context.Context.
|
||||
func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error {
|
||||
d.s.Option.Flags |= decoder.ContextOption
|
||||
d.s.Option.Context = ctx
|
||||
return d.DecodeWithOption(v)
|
||||
}
|
||||
|
||||
func (d *Decoder) DecodeWithOption(v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
ptr := uintptr(header.ptr)
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
// noescape trick for header.typ ( reflect.*rtype )
|
||||
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||
|
||||
if err := validateType(copiedType, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec, err := decoder.CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := d.s.PrepareForDecode(); err != nil {
|
||||
return err
|
||||
}
|
||||
s := d.s
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(s.Option)
|
||||
}
|
||||
if err := dec.DecodeStream(s, 0, header.ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
s.Reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Decoder) More() bool {
|
||||
return d.s.More()
|
||||
}
|
||||
|
||||
func (d *Decoder) Token() (Token, error) {
|
||||
return d.s.Token()
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (d *Decoder) DisallowUnknownFields() {
|
||||
d.s.DisallowUnknownFields = true
|
||||
}
|
||||
|
||||
func (d *Decoder) InputOffset() int64 {
|
||||
return d.s.TotalOffset()
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (d *Decoder) UseNumber() {
|
||||
d.s.UseNumber = true
|
||||
}
|
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: '2'
|
||||
services:
|
||||
go-json:
|
||||
image: golang:1.18
|
||||
volumes:
|
||||
- '.:/go/src/go-json'
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 620M
|
||||
working_dir: /go/src/go-json
|
||||
command: |
|
||||
sh -c "go test -c . && ls go-json.test"
|
326
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
326
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/encoder"
|
||||
"github.com/goccy/go-json/internal/encoder/vm"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||
"github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||
)
|
||||
|
||||
// An Encoder writes JSON values to an output stream.
|
||||
type Encoder struct {
|
||||
w io.Writer
|
||||
enabledIndent bool
|
||||
enabledHTMLEscape bool
|
||||
prefix string
|
||||
indentStr string
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{w: w, enabledHTMLEscape: true}
|
||||
}
|
||||
|
||||
// Encode writes the JSON encoding of v to the stream, followed by a newline character.
|
||||
//
|
||||
// See the documentation for Marshal for details about the conversion of Go values to JSON.
|
||||
func (e *Encoder) Encode(v interface{}) error {
|
||||
return e.EncodeWithOption(v)
|
||||
}
|
||||
|
||||
// EncodeWithOption call Encode with EncodeOption.
|
||||
func (e *Encoder) EncodeWithOption(v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
ctx.Option.Flag = 0
|
||||
|
||||
err := e.encodeWithOption(ctx, v, optFuncs...)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
// EncodeContext call Encode with context.Context and EncodeOption.
|
||||
func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag |= encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
|
||||
err := e.encodeWithOption(rctx, v, optFuncs...)
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (e *Encoder) encodeWithOption(ctx *encoder.RuntimeContext, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||
if e.enabledHTMLEscape {
|
||||
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||
}
|
||||
ctx.Option.Flag |= encoder.NormalizeUTF8Option
|
||||
ctx.Option.DebugOut = os.Stdout
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
var (
|
||||
buf []byte
|
||||
err error
|
||||
)
|
||||
if e.enabledIndent {
|
||||
buf, err = encodeIndent(ctx, v, e.prefix, e.indentStr)
|
||||
} else {
|
||||
buf, err = encode(ctx, v)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.enabledIndent {
|
||||
buf = buf[:len(buf)-2]
|
||||
} else {
|
||||
buf = buf[:len(buf)-1]
|
||||
}
|
||||
buf = append(buf, '\n')
|
||||
if _, err := e.w.Write(buf); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings.
|
||||
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||
//
|
||||
// In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior.
|
||||
func (e *Encoder) SetEscapeHTML(on bool) {
|
||||
e.enabledHTMLEscape = on
|
||||
}
|
||||
|
||||
// SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||
// Calling SetIndent("", "") disables indentation.
|
||||
func (e *Encoder) SetIndent(prefix, indent string) {
|
||||
if prefix == "" && indent == "" {
|
||||
e.enabledIndent = false
|
||||
return
|
||||
}
|
||||
e.prefix = prefix
|
||||
e.indentStr = indent
|
||||
e.enabledIndent = true
|
||||
}
|
||||
|
||||
func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
rctx := encoder.TakeRuntimeContext()
|
||||
rctx.Option.Flag = 0
|
||||
rctx.Option.Flag = encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.ContextOption
|
||||
rctx.Option.Context = ctx
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(rctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(rctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(rctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshal(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option)
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encode(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalNoEscape(v interface{}) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option)
|
||||
|
||||
buf, err := encodeNoEscape(ctx, v)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||
// dst buffer size and src buffer size are differrent.
|
||||
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||
buf = buf[:len(buf)-1]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func marshalIndent(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||
ctx := encoder.TakeRuntimeContext()
|
||||
|
||||
ctx.Option.Flag = 0
|
||||
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.NormalizeUTF8Option | encoder.IndentOption)
|
||||
for _, optFunc := range optFuncs {
|
||||
optFunc(ctx.Option)
|
||||
}
|
||||
|
||||
buf, err := encodeIndent(ctx, v, prefix, indent)
|
||||
if err != nil {
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf = buf[:len(buf)-2]
|
||||
copied := make([]byte, len(buf))
|
||||
copy(copied, buf)
|
||||
|
||||
encoder.ReleaseRuntimeContext(ctx)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func encode(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeNoEscape(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendComma(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeIndent(ctx *encoder.RuntimeContext, v interface{}, prefix, indent string) ([]byte, error) {
|
||||
b := ctx.Buf[:0]
|
||||
if v == nil {
|
||||
b = encoder.AppendNull(ctx, b)
|
||||
b = encoder.AppendCommaIndent(ctx, b)
|
||||
return b, nil
|
||||
}
|
||||
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||
typ := header.typ
|
||||
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
codeSet, err := encoder.CompileToGetCodeSet(ctx, typeptr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := uintptr(header.ptr)
|
||||
ctx.Init(p, codeSet.CodeLength)
|
||||
buf, err := encodeRunIndentCode(ctx, b, codeSet, prefix, indent)
|
||||
|
||||
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx.Buf = buf
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func encodeRunCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm.Run(ctx, b, codeSet)
|
||||
}
|
||||
|
||||
func encodeRunIndentCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet, prefix, indent string) ([]byte, error) {
|
||||
ctx.Prefix = []byte(prefix)
|
||||
ctx.IndentStr = []byte(indent)
|
||||
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.DebugRun(ctx, b, codeSet)
|
||||
}
|
||||
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||
return vm_color_indent.Run(ctx, b, codeSet)
|
||||
}
|
||||
return vm_indent.Run(ctx, b, codeSet)
|
||||
}
|
41
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
41
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
|
||||
// attempting to encode a string value with invalid UTF-8 sequences.
|
||||
// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
|
||||
// replacing invalid bytes with the Unicode replacement rune U+FFFD.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type InvalidUTF8Error = errors.InvalidUTF8Error
|
||||
|
||||
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
|
||||
// (The argument to Unmarshal must be a non-nil pointer.)
|
||||
type InvalidUnmarshalError = errors.InvalidUnmarshalError
|
||||
|
||||
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
|
||||
type MarshalerError = errors.MarshalerError
|
||||
|
||||
// A SyntaxError is a description of a JSON syntax error.
|
||||
type SyntaxError = errors.SyntaxError
|
||||
|
||||
// An UnmarshalFieldError describes a JSON object key that
|
||||
// led to an unexported (and therefore unwritable) struct field.
|
||||
//
|
||||
// Deprecated: No longer used; kept for compatibility.
|
||||
type UnmarshalFieldError = errors.UnmarshalFieldError
|
||||
|
||||
// An UnmarshalTypeError describes a JSON value that was
|
||||
// not appropriate for a value of a specific Go type.
|
||||
type UnmarshalTypeError = errors.UnmarshalTypeError
|
||||
|
||||
// An UnsupportedTypeError is returned by Marshal when attempting
|
||||
// to encode an unsupported value type.
|
||||
type UnsupportedTypeError = errors.UnsupportedTypeError
|
||||
|
||||
type UnsupportedValueError = errors.UnsupportedValueError
|
||||
|
||||
type PathError = errors.PathError
|
41
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
41
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type anonymousFieldDecoder struct {
|
||||
structType *runtime.Type
|
||||
offset uintptr
|
||||
dec Decoder
|
||||
}
|
||||
|
||||
func newAnonymousFieldDecoder(structType *runtime.Type, offset uintptr, dec Decoder) *anonymousFieldDecoder {
|
||||
return &anonymousFieldDecoder{
|
||||
structType: structType,
|
||||
offset: offset,
|
||||
dec: dec,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
if *(*unsafe.Pointer)(p) == nil {
|
||||
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||
}
|
||||
p = *(*unsafe.Pointer)(p)
|
||||
return d.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||
}
|
||||
|
||||
func (d *anonymousFieldDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return d.dec.DecodePath(ctx, cursor, depth)
|
||||
}
|
176
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
176
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type arrayDecoder struct {
|
||||
elemType *runtime.Type
|
||||
size uintptr
|
||||
valueDecoder Decoder
|
||||
alen int
|
||||
structName string
|
||||
fieldName string
|
||||
zeroValue unsafe.Pointer
|
||||
}
|
||||
|
||||
func newArrayDecoder(dec Decoder, elemType *runtime.Type, alen int, structName, fieldName string) *arrayDecoder {
|
||||
// workaround to avoid checkptr errors. cannot use `*(*unsafe.Pointer)(unsafe_New(elemType))` directly.
|
||||
zeroValuePtr := unsafe_New(elemType)
|
||||
zeroValue := **(**unsafe.Pointer)(unsafe.Pointer(&zeroValuePtr))
|
||||
return &arrayDecoder{
|
||||
valueDecoder: dec,
|
||||
elemType: elemType,
|
||||
size: elemType.Size(),
|
||||
alen: alen,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
zeroValue: zeroValue,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case '[':
|
||||
idx := 0
|
||||
s.cursor++
|
||||
if s.skipWhiteSpace() == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
if err := d.valueDecoder.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
idx++
|
||||
switch s.skipWhiteSpace() {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
s.cursor++
|
||||
return nil
|
||||
case ',':
|
||||
s.cursor++
|
||||
continue
|
||||
case nul:
|
||||
if s.read() {
|
||||
s.cursor++
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("array", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case '[':
|
||||
idx := 0
|
||||
cursor++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == ']' {
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
}
|
||||
for {
|
||||
if idx < d.alen {
|
||||
c, err := d.valueDecoder.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
} else {
|
||||
c, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor = c
|
||||
}
|
||||
idx++
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case ']':
|
||||
for idx < d.alen {
|
||||
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||
idx++
|
||||
}
|
||||
cursor++
|
||||
return cursor, nil
|
||||
case ',':
|
||||
cursor++
|
||||
continue
|
||||
default:
|
||||
return 0, errors.ErrInvalidCharacter(buf[cursor], "array", cursor)
|
||||
}
|
||||
}
|
||||
default:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *arrayDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: array decoder does not support decode path")
|
||||
}
|
438
vendor/github.com/goccy/go-json/internal/decoder/assign.go
generated
vendored
Normal file
438
vendor/github.com/goccy/go-json/internal/decoder/assign.go
generated
vendored
Normal file
@@ -0,0 +1,438 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
var (
|
||||
nilValue = reflect.ValueOf(nil)
|
||||
)
|
||||
|
||||
func AssignValue(src, dst reflect.Value) error {
|
||||
if dst.Type().Kind() != reflect.Ptr {
|
||||
return fmt.Errorf("invalid dst type. required pointer type: %T", dst.Type())
|
||||
}
|
||||
casted, err := castValue(dst.Elem().Type(), src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dst.Elem().Set(casted)
|
||||
return nil
|
||||
}
|
||||
|
||||
func castValue(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
switch t.Kind() {
|
||||
case reflect.Int:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int(vv.Int())), nil
|
||||
case reflect.Int8:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int8(vv.Int())), nil
|
||||
case reflect.Int16:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int16(vv.Int())), nil
|
||||
case reflect.Int32:
|
||||
vv, err := castInt(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(int32(vv.Int())), nil
|
||||
case reflect.Int64:
|
||||
return castInt(v)
|
||||
case reflect.Uint:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint(vv.Uint())), nil
|
||||
case reflect.Uint8:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint8(vv.Uint())), nil
|
||||
case reflect.Uint16:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint16(vv.Uint())), nil
|
||||
case reflect.Uint32:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uint32(vv.Uint())), nil
|
||||
case reflect.Uint64:
|
||||
return castUint(v)
|
||||
case reflect.Uintptr:
|
||||
vv, err := castUint(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(uintptr(vv.Uint())), nil
|
||||
case reflect.String:
|
||||
return castString(v)
|
||||
case reflect.Bool:
|
||||
return castBool(v)
|
||||
case reflect.Float32:
|
||||
vv, err := castFloat(v)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(float32(vv.Float())), nil
|
||||
case reflect.Float64:
|
||||
return castFloat(v)
|
||||
case reflect.Array:
|
||||
return castArray(t, v)
|
||||
case reflect.Slice:
|
||||
return castSlice(t, v)
|
||||
case reflect.Map:
|
||||
return castMap(t, v)
|
||||
case reflect.Struct:
|
||||
return castStruct(t, v)
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func castInt(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v, nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(int64(v.Uint())), nil
|
||||
case reflect.String:
|
||||
i64, err := strconv.ParseInt(v.String(), 10, 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(i64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(int64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(int64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(int64(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castInt(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castInt(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castInt(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castInt(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to int64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castUint(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(uint64(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v, nil
|
||||
case reflect.String:
|
||||
u64, err := strconv.ParseUint(v.String(), 10, 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(u64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(uint64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(uint64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(uint64(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castUint(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castUint(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castUint(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castUint(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to uint64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castString(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Uint())), nil
|
||||
case reflect.String:
|
||||
return v, nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf("true"), nil
|
||||
}
|
||||
return reflect.ValueOf("false"), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return reflect.ValueOf(fmt.Sprint(v.Float())), nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castString(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castString(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty slice")
|
||||
case reflect.Interface:
|
||||
return castString(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from struct")
|
||||
case reflect.Ptr:
|
||||
return castString(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castBool(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch v.Int() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %d", v.Int())
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
switch v.Uint() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %d", v.Uint())
|
||||
case reflect.String:
|
||||
b, err := strconv.ParseBool(v.String())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(b), nil
|
||||
case reflect.Bool:
|
||||
return v, nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch v.Float() {
|
||||
case 0:
|
||||
return reflect.ValueOf(false), nil
|
||||
case 1:
|
||||
return reflect.ValueOf(true), nil
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %f", v.Float())
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castBool(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castBool(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to string from empty slice")
|
||||
case reflect.Interface:
|
||||
return castBool(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to string from struct")
|
||||
case reflect.Ptr:
|
||||
return castBool(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to bool from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castFloat(v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return reflect.ValueOf(float64(v.Int())), nil
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return reflect.ValueOf(float64(v.Uint())), nil
|
||||
case reflect.String:
|
||||
f64, err := strconv.ParseFloat(v.String(), 64)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
return reflect.ValueOf(f64), nil
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return reflect.ValueOf(float64(1)), nil
|
||||
}
|
||||
return reflect.ValueOf(float64(0)), nil
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v, nil
|
||||
case reflect.Array:
|
||||
if v.Len() > 0 {
|
||||
return castFloat(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from empty array")
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castFloat(v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from empty slice")
|
||||
case reflect.Interface:
|
||||
return castFloat(reflect.ValueOf(v.Interface()))
|
||||
case reflect.Map:
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from map")
|
||||
case reflect.Struct:
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from struct")
|
||||
case reflect.Ptr:
|
||||
return castFloat(v.Elem())
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to float64 from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castArray(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
kind := v.Type().Kind()
|
||||
if kind == reflect.Interface {
|
||||
return castArray(t, reflect.ValueOf(v.Interface()))
|
||||
}
|
||||
if kind != reflect.Slice && kind != reflect.Array {
|
||||
return nilValue, fmt.Errorf("failed to cast to array from %s", kind)
|
||||
}
|
||||
if t.Elem() == v.Type().Elem() {
|
||||
return v, nil
|
||||
}
|
||||
if t.Len() != v.Len() {
|
||||
return nilValue, fmt.Errorf("failed to cast [%d]array from slice of %d length", t.Len(), v.Len())
|
||||
}
|
||||
ret := reflect.New(t).Elem()
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
vv, err := castValue(t.Elem(), v.Index(i))
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.Index(i).Set(vv)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func castSlice(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
kind := v.Type().Kind()
|
||||
if kind == reflect.Interface {
|
||||
return castSlice(t, reflect.ValueOf(v.Interface()))
|
||||
}
|
||||
if kind != reflect.Slice && kind != reflect.Array {
|
||||
return nilValue, fmt.Errorf("failed to cast to slice from %s", kind)
|
||||
}
|
||||
if t.Elem() == v.Type().Elem() {
|
||||
return v, nil
|
||||
}
|
||||
ret := reflect.MakeSlice(t, v.Len(), v.Len())
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
vv, err := castValue(t.Elem(), v.Index(i))
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.Index(i).Set(vv)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func castMap(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
ret := reflect.MakeMap(t)
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
key, err := castValue(t.Key(), iter.Key())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
value, err := castValue(t.Elem(), iter.Value())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.SetMapIndex(key, value)
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Interface:
|
||||
return castMap(t, reflect.ValueOf(v.Interface()))
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castMap(t, v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to map from empty slice")
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to map from %s", v.Type().Kind())
|
||||
}
|
||||
|
||||
func castStruct(t reflect.Type, v reflect.Value) (reflect.Value, error) {
|
||||
ret := reflect.New(t).Elem()
|
||||
switch v.Type().Kind() {
|
||||
case reflect.Map:
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
key := iter.Key()
|
||||
k, err := castString(key)
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
fieldName := k.String()
|
||||
field, ok := t.FieldByName(fieldName)
|
||||
if ok {
|
||||
value, err := castValue(field.Type, iter.Value())
|
||||
if err != nil {
|
||||
return nilValue, err
|
||||
}
|
||||
ret.FieldByName(fieldName).Set(value)
|
||||
}
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.Type().NumField(); i++ {
|
||||
name := v.Type().Field(i).Name
|
||||
ret.FieldByName(name).Set(v.FieldByName(name))
|
||||
}
|
||||
return ret, nil
|
||||
case reflect.Interface:
|
||||
return castStruct(t, reflect.ValueOf(v.Interface()))
|
||||
case reflect.Slice:
|
||||
if v.Len() > 0 {
|
||||
return castStruct(t, v.Index(0))
|
||||
}
|
||||
return nilValue, fmt.Errorf("failed to cast to struct from empty slice")
|
||||
default:
|
||||
return nilValue, fmt.Errorf("failed to cast to struct from %s", v.Type().Kind())
|
||||
}
|
||||
}
|
83
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
83
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type boolDecoder struct {
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newBoolDecoder(structName, fieldName string) *boolDecoder {
|
||||
return &boolDecoder{structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
func (d *boolDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
}
|
||||
break
|
||||
}
|
||||
ERROR:
|
||||
return errors.ErrUnexpectedEndOfJSON("bool", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *boolDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**bool)(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**bool)(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("bool", cursor)
|
||||
}
|
||||
|
||||
func (d *boolDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: bool decoder does not support decode path")
|
||||
}
|
118
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
118
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type bytesDecoder struct {
|
||||
typ *runtime.Type
|
||||
sliceDecoder Decoder
|
||||
stringDecoder *stringDecoder
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func byteUnmarshalerSliceDecoder(typ *runtime.Type, structName string, fieldName string) Decoder {
|
||||
var unmarshalDecoder Decoder
|
||||
switch {
|
||||
case runtime.PtrTo(typ).Implements(unmarshalJSONType):
|
||||
unmarshalDecoder = newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
unmarshalDecoder = newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||
default:
|
||||
unmarshalDecoder, _ = compileUint8(typ, structName, fieldName)
|
||||
}
|
||||
return newSliceDecoder(unmarshalDecoder, typ, 1, structName, fieldName)
|
||||
}
|
||||
|
||||
func newBytesDecoder(typ *runtime.Type, structName string, fieldName string) *bytesDecoder {
|
||||
return &bytesDecoder{
|
||||
typ: typ,
|
||||
sliceDecoder: byteUnmarshalerSliceDecoder(typ, structName, fieldName),
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamBinary(s, depth, p)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
buf := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(buf, bytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*(*[]byte)(p) = buf[:n]
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeBinary(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||
b := make([]byte, decodedLen)
|
||||
n, err := base64.StdEncoding.Decode(b, bytes)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
*(*[]byte)(p) = b[:n]
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: []byte decoder does not support decode path")
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeStreamBinary(s *Stream, depth int64, p unsafe.Pointer) ([]byte, error) {
|
||||
c := s.skipWhiteSpace()
|
||||
if c == '[' {
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
err := d.sliceDecoder.DecodeStream(s, depth, p)
|
||||
return nil, err
|
||||
}
|
||||
return d.stringDecoder.decodeStreamByte(s)
|
||||
}
|
||||
|
||||
func (d *bytesDecoder) decodeBinary(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) ([]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == '[' {
|
||||
if d.sliceDecoder == nil {
|
||||
return nil, 0, &errors.UnmarshalTypeError{
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: cursor,
|
||||
}
|
||||
}
|
||||
c, err := d.sliceDecoder.Decode(ctx, cursor, depth, p)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
return nil, c, nil
|
||||
}
|
||||
return d.stringDecoder.decodeByte(buf, cursor)
|
||||
}
|
487
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
487
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
@@ -0,0 +1,487 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"unicode"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var (
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
typeAddr *runtime.TypeAddr
|
||||
cachedDecoderMap unsafe.Pointer // map[uintptr]decoder
|
||||
cachedDecoder []Decoder
|
||||
)
|
||||
|
||||
func init() {
|
||||
typeAddr = runtime.AnalyzeTypeAddr()
|
||||
if typeAddr == nil {
|
||||
typeAddr = &runtime.TypeAddr{}
|
||||
}
|
||||
cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift+1)
|
||||
}
|
||||
|
||||
func loadDecoderMap() map[uintptr]Decoder {
|
||||
p := atomic.LoadPointer(&cachedDecoderMap)
|
||||
return *(*map[uintptr]Decoder)(unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) {
|
||||
newDecoderMap := make(map[uintptr]Decoder, len(m)+1)
|
||||
newDecoderMap[typ] = dec
|
||||
|
||||
for k, v := range m {
|
||||
newDecoderMap[k] = v
|
||||
}
|
||||
|
||||
atomic.StorePointer(&cachedDecoderMap, *(*unsafe.Pointer)(unsafe.Pointer(&newDecoderMap)))
|
||||
}
|
||||
|
||||
func compileToGetDecoderSlowPath(typeptr uintptr, typ *runtime.Type) (Decoder, error) {
|
||||
decoderMap := loadDecoderMap()
|
||||
if dec, exists := decoderMap[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
storeDecoder(typeptr, dec, decoderMap)
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
func compileHead(typ *runtime.Type, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||
}
|
||||
return compile(typ.Elem(), "", "", structTypeToDecoder)
|
||||
}
|
||||
|
||||
func compile(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
|
||||
switch typ.Kind() {
|
||||
case reflect.Ptr:
|
||||
return compilePtr(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Struct:
|
||||
return compileStruct(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Slice:
|
||||
elem := typ.Elem()
|
||||
if elem.Kind() == reflect.Uint8 {
|
||||
return compileBytes(elem, structName, fieldName)
|
||||
}
|
||||
return compileSlice(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Array:
|
||||
return compileArray(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Map:
|
||||
return compileMap(typ, structName, fieldName, structTypeToDecoder)
|
||||
case reflect.Interface:
|
||||
return compileInterface(typ, structName, fieldName)
|
||||
case reflect.Uintptr:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Int:
|
||||
return compileInt(typ, structName, fieldName)
|
||||
case reflect.Int8:
|
||||
return compileInt8(typ, structName, fieldName)
|
||||
case reflect.Int16:
|
||||
return compileInt16(typ, structName, fieldName)
|
||||
case reflect.Int32:
|
||||
return compileInt32(typ, structName, fieldName)
|
||||
case reflect.Int64:
|
||||
return compileInt64(typ, structName, fieldName)
|
||||
case reflect.Uint:
|
||||
return compileUint(typ, structName, fieldName)
|
||||
case reflect.Uint8:
|
||||
return compileUint8(typ, structName, fieldName)
|
||||
case reflect.Uint16:
|
||||
return compileUint16(typ, structName, fieldName)
|
||||
case reflect.Uint32:
|
||||
return compileUint32(typ, structName, fieldName)
|
||||
case reflect.Uint64:
|
||||
return compileUint64(typ, structName, fieldName)
|
||||
case reflect.String:
|
||||
return compileString(typ, structName, fieldName)
|
||||
case reflect.Bool:
|
||||
return compileBool(structName, fieldName)
|
||||
case reflect.Float32:
|
||||
return compileFloat32(structName, fieldName)
|
||||
case reflect.Float64:
|
||||
return compileFloat64(structName, fieldName)
|
||||
case reflect.Func:
|
||||
return compileFunc(typ, structName, fieldName)
|
||||
}
|
||||
return newInvalidDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func isStringTagSupportedType(typ *runtime.Type) bool {
|
||||
switch {
|
||||
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||
return false
|
||||
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||
return false
|
||||
}
|
||||
switch typ.Kind() {
|
||||
case reflect.Map:
|
||||
return false
|
||||
case reflect.Slice:
|
||||
return false
|
||||
case reflect.Array:
|
||||
return false
|
||||
case reflect.Struct:
|
||||
return false
|
||||
case reflect.Interface:
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func compileMapKey(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
if runtime.PtrTo(typ).Implements(unmarshalTextType) {
|
||||
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||
}
|
||||
if typ.Kind() == reflect.String {
|
||||
return newStringDecoder(structName, fieldName), nil
|
||||
}
|
||||
dec, err := compile(typ, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for {
|
||||
switch t := dec.(type) {
|
||||
case *stringDecoder, *interfaceDecoder:
|
||||
return dec, nil
|
||||
case *boolDecoder, *intDecoder, *uintDecoder, *numberDecoder:
|
||||
return newWrappedStringDecoder(typ, dec, structName, fieldName), nil
|
||||
case *ptrDecoder:
|
||||
dec = t.dec
|
||||
default:
|
||||
return newInvalidDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func compilePtr(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
dec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newPtrDecoder(dec, typ.Elem(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInt(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int)(p) = int(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int8)(p) = int8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int16)(p) = int16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int32)(p) = int32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileInt64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||
*(*int64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint)(p) = uint(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint8)(p) = uint8(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint16)(p) = uint16(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint32)(p) = uint32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileUint64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||
*(*uint64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat32(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float32)(p) = float32(v)
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileFloat64(structName, fieldName string) (Decoder, error) {
|
||||
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*float64)(p) = v
|
||||
}), nil
|
||||
}
|
||||
|
||||
func compileString(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
if typ == runtime.Type2RType(jsonNumberType) {
|
||||
return newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*json.Number)(p) = v
|
||||
}), nil
|
||||
}
|
||||
return newStringDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBool(structName, fieldName string) (Decoder, error) {
|
||||
return newBoolDecoder(structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileBytes(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newBytesDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileSlice(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newSliceDecoder(decoder, elem, elem.Size(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileArray(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
elem := typ.Elem()
|
||||
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newArrayDecoder(decoder, elem, typ.Len(), structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileMap(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
keyDec, err := compileMapKey(typ.Key(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
valueDec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return newMapDecoder(typ, typ.Key(), keyDec, typ.Elem(), valueDec, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileInterface(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||
return newInterfaceDecoder(typ, structName, fieldName), nil
|
||||
}
|
||||
|
||||
func compileFunc(typ *runtime.Type, strutName, fieldName string) (Decoder, error) {
|
||||
return newFuncDecoder(typ, strutName, fieldName), nil
|
||||
}
|
||||
|
||||
func typeToStructTags(typ *runtime.Type) runtime.StructTags {
|
||||
tags := runtime.StructTags{}
|
||||
fieldNum := typ.NumField()
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
tags = append(tags, runtime.StructTagFromField(field))
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
func compileStruct(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||
fieldNum := typ.NumField()
|
||||
fieldMap := map[string]*structFieldSet{}
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if dec, exists := structTypeToDecoder[typeptr]; exists {
|
||||
return dec, nil
|
||||
}
|
||||
structDec := newStructDecoder(structName, fieldName, fieldMap)
|
||||
structTypeToDecoder[typeptr] = structDec
|
||||
structName = typ.Name()
|
||||
tags := typeToStructTags(typ)
|
||||
allFields := []*structFieldSet{}
|
||||
for i := 0; i < fieldNum; i++ {
|
||||
field := typ.Field(i)
|
||||
if runtime.IsIgnoredStructField(field) {
|
||||
continue
|
||||
}
|
||||
isUnexportedField := unicode.IsLower([]rune(field.Name)[0])
|
||||
tag := runtime.StructTagFromField(field)
|
||||
dec, err := compile(runtime.Type2RType(field.Type), structName, field.Name, structTypeToDecoder)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if field.Anonymous && !tag.IsTaggedKey {
|
||||
if stDec, ok := dec.(*structDecoder); ok {
|
||||
if runtime.Type2RType(field.Type) == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
for k, v := range stDec.fieldMap {
|
||||
if tags.ExistsKey(k) {
|
||||
continue
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: v.dec,
|
||||
offset: field.Offset + v.offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else if pdec, ok := dec.(*ptrDecoder); ok {
|
||||
contentDec := pdec.contentDecoder()
|
||||
if pdec.typ == typ {
|
||||
// recursive definition
|
||||
continue
|
||||
}
|
||||
var fieldSetErr error
|
||||
if isUnexportedField {
|
||||
fieldSetErr = fmt.Errorf(
|
||||
"json: cannot set embedded pointer to unexported struct: %v",
|
||||
field.Type.Elem(),
|
||||
)
|
||||
}
|
||||
if dec, ok := contentDec.(*structDecoder); ok {
|
||||
for k, v := range dec.fieldMap {
|
||||
if tags.ExistsKey(k) {
|
||||
continue
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||
offset: field.Offset,
|
||||
isTaggedKey: v.isTaggedKey,
|
||||
key: k,
|
||||
keyLen: int64(len(k)),
|
||||
err: fieldSetErr,
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: pdec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: field.Name,
|
||||
keyLen: int64(len(field.Name)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
fieldSet := &structFieldSet{
|
||||
dec: dec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: field.Name,
|
||||
keyLen: int64(len(field.Name)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
} else {
|
||||
if tag.IsString && isStringTagSupportedType(runtime.Type2RType(field.Type)) {
|
||||
dec = newWrappedStringDecoder(runtime.Type2RType(field.Type), dec, structName, field.Name)
|
||||
}
|
||||
var key string
|
||||
if tag.Key != "" {
|
||||
key = tag.Key
|
||||
} else {
|
||||
key = field.Name
|
||||
}
|
||||
fieldSet := &structFieldSet{
|
||||
dec: dec,
|
||||
offset: field.Offset,
|
||||
isTaggedKey: tag.IsTaggedKey,
|
||||
key: key,
|
||||
keyLen: int64(len(key)),
|
||||
}
|
||||
allFields = append(allFields, fieldSet)
|
||||
}
|
||||
}
|
||||
for _, set := range filterDuplicatedFields(allFields) {
|
||||
fieldMap[set.key] = set
|
||||
lower := strings.ToLower(set.key)
|
||||
if _, exists := fieldMap[lower]; !exists {
|
||||
// first win
|
||||
fieldMap[lower] = set
|
||||
}
|
||||
}
|
||||
delete(structTypeToDecoder, typeptr)
|
||||
structDec.tryOptimize()
|
||||
return structDec, nil
|
||||
}
|
||||
|
||||
func filterDuplicatedFields(allFields []*structFieldSet) []*structFieldSet {
|
||||
fieldMap := map[string][]*structFieldSet{}
|
||||
for _, field := range allFields {
|
||||
fieldMap[field.key] = append(fieldMap[field.key], field)
|
||||
}
|
||||
duplicatedFieldMap := map[string]struct{}{}
|
||||
for k, sets := range fieldMap {
|
||||
sets = filterFieldSets(sets)
|
||||
if len(sets) != 1 {
|
||||
duplicatedFieldMap[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
filtered := make([]*structFieldSet, 0, len(allFields))
|
||||
for _, field := range allFields {
|
||||
if _, exists := duplicatedFieldMap[field.key]; exists {
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, field)
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
func filterFieldSets(sets []*structFieldSet) []*structFieldSet {
|
||||
if len(sets) == 1 {
|
||||
return sets
|
||||
}
|
||||
filtered := make([]*structFieldSet, 0, len(sets))
|
||||
for _, set := range sets {
|
||||
if set.isTaggedKey {
|
||||
filtered = append(filtered, set)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
func implementsUnmarshalJSONType(typ *runtime.Type) bool {
|
||||
return typ.Implements(unmarshalJSONType) || typ.Implements(unmarshalJSONContextType)
|
||||
}
|
29
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
29
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
//go:build !race
|
||||
// +build !race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
return dec, nil
|
||||
}
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cachedDecoder[index] = dec
|
||||
return dec, nil
|
||||
}
|
37
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
37
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
//go:build race
|
||||
// +build race
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
var decMu sync.RWMutex
|
||||
|
||||
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||
typeptr := uintptr(unsafe.Pointer(typ))
|
||||
if typeptr > typeAddr.MaxTypeAddr {
|
||||
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||
}
|
||||
|
||||
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||
decMu.RLock()
|
||||
if dec := cachedDecoder[index]; dec != nil {
|
||||
decMu.RUnlock()
|
||||
return dec, nil
|
||||
}
|
||||
decMu.RUnlock()
|
||||
|
||||
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
decMu.Lock()
|
||||
cachedDecoder[index] = dec
|
||||
decMu.Unlock()
|
||||
return dec, nil
|
||||
}
|
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
@@ -0,0 +1,254 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type RuntimeContext struct {
|
||||
Buf []byte
|
||||
Option *Option
|
||||
}
|
||||
|
||||
var (
|
||||
runtimeContextPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
return &RuntimeContext{
|
||||
Option: &Option{},
|
||||
}
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func TakeRuntimeContext() *RuntimeContext {
|
||||
return runtimeContextPool.Get().(*RuntimeContext)
|
||||
}
|
||||
|
||||
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||
runtimeContextPool.Put(ctx)
|
||||
}
|
||||
|
||||
var (
|
||||
isWhiteSpace = [256]bool{}
|
||||
)
|
||||
|
||||
func init() {
|
||||
isWhiteSpace[' '] = true
|
||||
isWhiteSpace['\n'] = true
|
||||
isWhiteSpace['\t'] = true
|
||||
isWhiteSpace['\r'] = true
|
||||
}
|
||||
|
||||
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||
}
|
||||
|
||||
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||
for isWhiteSpace[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
return cursor
|
||||
}
|
||||
|
||||
func skipObject(buf []byte, cursor, depth int64) (int64, error) {
|
||||
braceCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
braceCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
braceCount--
|
||||
if braceCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '[':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipArray(buf []byte, cursor, depth int64) (int64, error) {
|
||||
bracketCount := 1
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case '[':
|
||||
bracketCount++
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case ']':
|
||||
bracketCount--
|
||||
depth--
|
||||
if bracketCount == 0 {
|
||||
return cursor + 1, nil
|
||||
}
|
||||
case '{':
|
||||
depth++
|
||||
if depth > maxDecodeNestingDepth {
|
||||
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||
}
|
||||
case '}':
|
||||
depth--
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
goto SWITCH_OUT
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||
}
|
||||
SWITCH_OUT:
|
||||
cursor++
|
||||
}
|
||||
}
|
||||
|
||||
func skipValue(buf []byte, cursor, depth int64) (int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\t', '\n', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '{':
|
||||
return skipObject(buf, cursor+1, depth+1)
|
||||
case '[':
|
||||
return skipArray(buf, cursor+1, depth+1)
|
||||
case '"':
|
||||
for {
|
||||
cursor++
|
||||
switch buf[cursor] {
|
||||
case '\\':
|
||||
cursor++
|
||||
if buf[cursor] == nul {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
case '"':
|
||||
return cursor + 1, nil
|
||||
case nul:
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||
}
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
for {
|
||||
cursor++
|
||||
if floatTable[buf[cursor]] {
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return cursor, nil
|
||||
default:
|
||||
return cursor, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func validateTrue(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'r' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "true", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "true", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "true", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateFalse(buf []byte, cursor int64) error {
|
||||
if cursor+4 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'a' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "false", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "false", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 's' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "false", cursor)
|
||||
}
|
||||
if buf[cursor+4] != 'e' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+4], "false", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateNull(buf []byte, cursor int64) error {
|
||||
if cursor+3 >= int64(len(buf)) {
|
||||
return errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||
}
|
||||
if buf[cursor+1] != 'u' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+1], "null", cursor)
|
||||
}
|
||||
if buf[cursor+2] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+2], "null", cursor)
|
||||
}
|
||||
if buf[cursor+3] != 'l' {
|
||||
return errors.ErrInvalidCharacter(buf[cursor+3], "null", cursor)
|
||||
}
|
||||
return nil
|
||||
}
|
170
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
170
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
)
|
||||
|
||||
type floatDecoder struct {
|
||||
op func(unsafe.Pointer, float64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFloatDecoder(structName, fieldName string, op func(unsafe.Pointer, float64)) *floatDecoder {
|
||||
return &floatDecoder{op: op, structName: structName, fieldName: fieldName}
|
||||
}
|
||||
|
||||
var (
|
||||
floatTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
'.': true,
|
||||
'e': true,
|
||||
'E': true,
|
||||
'+': true,
|
||||
'-': true,
|
||||
}
|
||||
|
||||
validEndNumberChar = [256]bool{
|
||||
nul: true,
|
||||
' ': true,
|
||||
'\t': true,
|
||||
'\r': true,
|
||||
'\n': true,
|
||||
',': true,
|
||||
':': true,
|
||||
'}': true,
|
||||
']': true,
|
||||
}
|
||||
)
|
||||
|
||||
func floatBytes(s *Stream) []byte {
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if floatTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return s.buf[start:s.cursor]
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return floatBytes(s), nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
goto ERROR
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("float", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *floatDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
for {
|
||||
switch buf[cursor] {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for floatTable[buf[cursor]] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *floatDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
str := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(str, 64)
|
||||
if err != nil {
|
||||
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||
}
|
||||
d.op(p, f64)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *floatDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
bytes, c, err := d.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
if !validEndNumberChar[buf[cursor]] {
|
||||
return 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||
}
|
||||
s := *(*string)(unsafe.Pointer(&bytes))
|
||||
f64, err := strconv.ParseFloat(s, 64)
|
||||
if err != nil {
|
||||
return 0, errors.ErrSyntax(err.Error(), cursor)
|
||||
}
|
||||
d.op(p, f64)
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *floatDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
bytes, c, err := d.decodeByte(buf, cursor)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return [][]byte{nullbytes}, c, nil
|
||||
}
|
||||
return [][]byte{bytes}, c, nil
|
||||
}
|
146
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
146
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type funcDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newFuncDecoder(typ *runtime.Type, structName, fieldName string) *funcDecoder {
|
||||
fnDecoder := &funcDecoder{typ, structName, fieldName}
|
||||
return fnDecoder
|
||||
}
|
||||
|
||||
func (d *funcDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
s.skipWhiteSpace()
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '[':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '{':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
case 't':
|
||||
if err := trueBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := falseBytes(s); err == nil {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: s.totalOffset(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors.ErrInvalidBeginningOfValue(s.buf[s.cursor], s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *funcDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if len(src) > 0 {
|
||||
switch src[0] {
|
||||
case '"':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "string",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '[':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "array",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '{':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "object",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "number",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
case 'n':
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
case 't':
|
||||
if err := validateTrue(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
case 'f':
|
||||
if err := validateFalse(buf, start); err == nil {
|
||||
return 0, &errors.UnmarshalTypeError{
|
||||
Value: "boolean",
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Offset: start,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
func (d *funcDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: func decoder does not support decode path")
|
||||
}
|
246
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
246
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type intDecoder struct {
|
||||
typ *runtime.Type
|
||||
kind reflect.Kind
|
||||
op func(unsafe.Pointer, int64)
|
||||
structName string
|
||||
fieldName string
|
||||
}
|
||||
|
||||
func newIntDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, int64)) *intDecoder {
|
||||
return &intDecoder{
|
||||
typ: typ,
|
||||
kind: typ.Kind(),
|
||||
op: op,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: fmt.Sprintf("number %s", string(buf)),
|
||||
Type: runtime.RType2Type(d.typ),
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
Offset: offset,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
pow10i64 = [...]int64{
|
||||
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18,
|
||||
}
|
||||
pow10i64Len = len(pow10i64)
|
||||
)
|
||||
|
||||
func (d *intDecoder) parseInt(b []byte) (int64, error) {
|
||||
isNegative := false
|
||||
if b[0] == '-' {
|
||||
b = b[1:]
|
||||
isNegative = true
|
||||
}
|
||||
maxDigit := len(b)
|
||||
if maxDigit > pow10i64Len {
|
||||
return 0, fmt.Errorf("invalid length of number")
|
||||
}
|
||||
sum := int64(0)
|
||||
for i := 0; i < maxDigit; i++ {
|
||||
c := int64(b[i]) - 48
|
||||
digitValue := pow10i64[maxDigit-i-1]
|
||||
sum += c * digitValue
|
||||
}
|
||||
if isNegative {
|
||||
return -1 * sum, nil
|
||||
}
|
||||
return sum, nil
|
||||
}
|
||||
|
||||
var (
|
||||
numTable = [256]bool{
|
||||
'0': true,
|
||||
'1': true,
|
||||
'2': true,
|
||||
'3': true,
|
||||
'4': true,
|
||||
'5': true,
|
||||
'6': true,
|
||||
'7': true,
|
||||
'8': true,
|
||||
'9': true,
|
||||
}
|
||||
)
|
||||
|
||||
var (
|
||||
numZeroBuf = []byte{'0'}
|
||||
)
|
||||
|
||||
func (d *intDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||
for {
|
||||
switch s.char() {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
s.cursor++
|
||||
continue
|
||||
case '-':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
if len(num) < 2 {
|
||||
goto ERROR
|
||||
}
|
||||
return num, nil
|
||||
case '0':
|
||||
s.cursor++
|
||||
return numZeroBuf, nil
|
||||
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := s.cursor
|
||||
for {
|
||||
s.cursor++
|
||||
if numTable[s.char()] {
|
||||
continue
|
||||
} else if s.char() == nul {
|
||||
if s.read() {
|
||||
s.cursor-- // for retry current character
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
num := s.buf[start:s.cursor]
|
||||
return num, nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
goto ERROR
|
||||
default:
|
||||
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||
}
|
||||
}
|
||||
ERROR:
|
||||
return nil, errors.ErrUnexpectedEndOfJSON("number(integer)", s.totalOffset())
|
||||
}
|
||||
|
||||
func (d *intDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||
for {
|
||||
switch char(b, cursor) {
|
||||
case ' ', '\n', '\t', '\r':
|
||||
cursor++
|
||||
continue
|
||||
case '0':
|
||||
cursor++
|
||||
return numZeroBuf, cursor, nil
|
||||
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
start := cursor
|
||||
cursor++
|
||||
for numTable[char(b, cursor)] {
|
||||
cursor++
|
||||
}
|
||||
num := buf[start:cursor]
|
||||
return num, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return nil, cursor, nil
|
||||
default:
|
||||
return nil, 0, d.typeError([]byte{char(b, cursor)}, cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *intDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
bytes, err := d.decodeStreamByte(s)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if bytes == nil {
|
||||
return nil
|
||||
}
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 < -1*(1<<7) || (1<<7) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 < -1*(1<<15) || (1<<15) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 < -1*(1<<31) || (1<<31) <= i64 {
|
||||
return d.typeError(bytes, s.totalOffset())
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
s.reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *intDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if bytes == nil {
|
||||
return c, nil
|
||||
}
|
||||
cursor = c
|
||||
|
||||
i64, err := d.parseInt(bytes)
|
||||
if err != nil {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
switch d.kind {
|
||||
case reflect.Int8:
|
||||
if i64 < -1*(1<<7) || (1<<7) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if i64 < -1*(1<<15) || (1<<15) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if i64 < -1*(1<<31) || (1<<31) <= i64 {
|
||||
return 0, d.typeError(bytes, cursor)
|
||||
}
|
||||
}
|
||||
d.op(p, i64)
|
||||
return cursor, nil
|
||||
}
|
||||
|
||||
func (d *intDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
return nil, 0, fmt.Errorf("json: int decoder does not support decode path")
|
||||
}
|
528
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
528
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
@@ -0,0 +1,528 @@
|
||||
package decoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/goccy/go-json/internal/errors"
|
||||
"github.com/goccy/go-json/internal/runtime"
|
||||
)
|
||||
|
||||
type interfaceDecoder struct {
|
||||
typ *runtime.Type
|
||||
structName string
|
||||
fieldName string
|
||||
sliceDecoder *sliceDecoder
|
||||
mapDecoder *mapDecoder
|
||||
floatDecoder *floatDecoder
|
||||
numberDecoder *numberDecoder
|
||||
stringDecoder *stringDecoder
|
||||
}
|
||||
|
||||
func newEmptyInterfaceDecoder(structName, fieldName string) *interfaceDecoder {
|
||||
ifaceDecoder := &interfaceDecoder{
|
||||
typ: emptyInterfaceType,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: newStringDecoder(structName, fieldName),
|
||||
}
|
||||
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||
ifaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
)
|
||||
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
ifaceDecoder.stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
ifaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
)
|
||||
return ifaceDecoder
|
||||
}
|
||||
|
||||
func newInterfaceDecoder(typ *runtime.Type, structName, fieldName string) *interfaceDecoder {
|
||||
emptyIfaceDecoder := newEmptyInterfaceDecoder(structName, fieldName)
|
||||
stringDecoder := newStringDecoder(structName, fieldName)
|
||||
return &interfaceDecoder{
|
||||
typ: typ,
|
||||
structName: structName,
|
||||
fieldName: fieldName,
|
||||
sliceDecoder: newSliceDecoder(
|
||||
emptyIfaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
structName, fieldName,
|
||||
),
|
||||
mapDecoder: newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
emptyIfaceDecoder,
|
||||
structName,
|
||||
fieldName,
|
||||
),
|
||||
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: stringDecoder,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) numDecoder(s *Stream) Decoder {
|
||||
if s.UseNumber {
|
||||
return d.numberDecoder
|
||||
}
|
||||
return d.floatDecoder
|
||||
}
|
||||
|
||||
var (
|
||||
emptyInterfaceType = runtime.Type2RType(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
EmptyInterfaceType = emptyInterfaceType
|
||||
interfaceMapType = runtime.Type2RType(
|
||||
reflect.TypeOf((*map[string]interface{})(nil)).Elem(),
|
||||
)
|
||||
stringType = runtime.Type2RType(
|
||||
reflect.TypeOf(""),
|
||||
)
|
||||
)
|
||||
|
||||
func decodeStreamUnmarshaler(s *Stream, depth int64, unmarshaler json.Unmarshaler) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeStreamUnmarshalerContext(s *Stream, depth int64, unmarshaler unmarshalerContext) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeUnmarshaler(buf []byte, cursor, depth int64, unmarshaler json.Unmarshaler) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeUnmarshalerContext(ctx *RuntimeContext, buf []byte, cursor, depth int64, unmarshaler unmarshalerContext) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func decodeStreamTextUnmarshaler(s *Stream, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) error {
|
||||
start := s.cursor
|
||||
if err := s.skipValue(depth); err != nil {
|
||||
return err
|
||||
}
|
||||
src := s.buf[start:s.cursor]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
dst := make([]byte, len(src))
|
||||
copy(dst, src)
|
||||
|
||||
if err := unmarshaler.UnmarshalText(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeTextUnmarshaler(buf []byte, cursor, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) (int64, error) {
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
start := cursor
|
||||
end, err := skipValue(buf, cursor, depth)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
src := buf[start:end]
|
||||
if bytes.Equal(src, nullbytes) {
|
||||
*(*unsafe.Pointer)(p) = nil
|
||||
return end, nil
|
||||
}
|
||||
if s, ok := unquoteBytes(src); ok {
|
||||
src = s
|
||||
}
|
||||
if err := unmarshaler.UnmarshalText(src); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return end, nil
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeStreamEmptyInterface(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
c := s.skipWhiteSpace()
|
||||
for {
|
||||
switch c {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.mapDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
if err := d.sliceDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = v
|
||||
return nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.numDecoder(s).DecodeStream(s, depth, p)
|
||||
case '"':
|
||||
s.cursor++
|
||||
start := s.cursor
|
||||
for {
|
||||
switch s.char() {
|
||||
case '\\':
|
||||
if _, err := decodeEscapeString(s, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
case '"':
|
||||
literal := s.buf[start:s.cursor]
|
||||
s.cursor++
|
||||
*(*interface{})(p) = string(literal)
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
continue
|
||||
}
|
||||
return errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||
}
|
||||
s.cursor++
|
||||
}
|
||||
case 't':
|
||||
if err := trueBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return nil
|
||||
case 'f':
|
||||
if err := falseBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return nil
|
||||
case 'n':
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
case nul:
|
||||
if s.read() {
|
||||
c = s.char()
|
||||
continue
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
return errors.ErrInvalidBeginningOfValue(c, s.totalOffset())
|
||||
}
|
||||
|
||||
type emptyInterface struct {
|
||||
typ *runtime.Type
|
||||
ptr unsafe.Pointer
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeStreamUnmarshalerContext(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeStreamUnmarshaler(s, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeStreamTextUnmarshaler(s, depth, u, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
return d.errUnmarshalType(rv.Type(), s.totalOffset())
|
||||
}
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||
}
|
||||
if s.skipWhiteSpace() == 'n' {
|
||||
if err := nullBytes(s); err != nil {
|
||||
return err
|
||||
}
|
||||
*(*interface{})(p) = nil
|
||||
return nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return decoder.DecodeStream(s, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) errUnmarshalType(typ reflect.Type, offset int64) *errors.UnmarshalTypeError {
|
||||
return &errors.UnmarshalTypeError{
|
||||
Value: typ.String(),
|
||||
Type: typ,
|
||||
Offset: offset,
|
||||
Struct: d.structName,
|
||||
Field: d.fieldName,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||
typ: d.typ,
|
||||
ptr: p,
|
||||
}))
|
||||
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||
return decodeUnmarshalerContext(ctx, buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return decodeUnmarshaler(buf, cursor, depth, u)
|
||||
}
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return decodeTextUnmarshaler(buf, cursor, depth, u, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return 0, d.errUnmarshalType(rv.Type(), cursor)
|
||||
}
|
||||
|
||||
iface := rv.Interface()
|
||||
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||
typ := ifaceHeader.typ
|
||||
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||
// concrete type is empty interface
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||
}
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
if buf[cursor] == 'n' {
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
decoder, err := CompileToGetDecoder(typ)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return decoder.Decode(ctx, cursor, depth, ifaceHeader.ptr)
|
||||
}
|
||||
|
||||
func (d *interfaceDecoder) decodeEmptyInterface(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
var v map[string]interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.mapDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '[':
|
||||
var v []interface{}
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.sliceDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.floatDecoder.Decode(ctx, cursor, depth, p)
|
||||
case '"':
|
||||
var v string
|
||||
ptr := unsafe.Pointer(&v)
|
||||
cursor, err := d.stringDecoder.Decode(ctx, cursor, depth, ptr)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||
return cursor, nil
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||
return cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 5
|
||||
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||
return cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
cursor += 4
|
||||
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||
return cursor, nil
|
||||
}
|
||||
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
||||
|
||||
func NewPathDecoder() Decoder {
|
||||
ifaceDecoder := &interfaceDecoder{
|
||||
typ: emptyInterfaceType,
|
||||
structName: "",
|
||||
fieldName: "",
|
||||
floatDecoder: newFloatDecoder("", "", func(p unsafe.Pointer, v float64) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
numberDecoder: newNumberDecoder("", "", func(p unsafe.Pointer, v json.Number) {
|
||||
*(*interface{})(p) = v
|
||||
}),
|
||||
stringDecoder: newStringDecoder("", ""),
|
||||
}
|
||||
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||
ifaceDecoder,
|
||||
emptyInterfaceType,
|
||||
emptyInterfaceType.Size(),
|
||||
"", "",
|
||||
)
|
||||
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||
interfaceMapType,
|
||||
stringType,
|
||||
ifaceDecoder.stringDecoder,
|
||||
interfaceMapType.Elem(),
|
||||
ifaceDecoder,
|
||||
"", "",
|
||||
)
|
||||
return ifaceDecoder
|
||||
}
|
||||
|
||||
var (
|
||||
truebytes = []byte("true")
|
||||
falsebytes = []byte("false")
|
||||
)
|
||||
|
||||
func (d *interfaceDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int64) ([][]byte, int64, error) {
|
||||
buf := ctx.Buf
|
||||
cursor = skipWhiteSpace(buf, cursor)
|
||||
switch buf[cursor] {
|
||||
case '{':
|
||||
return d.mapDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '[':
|
||||
return d.sliceDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
return d.floatDecoder.DecodePath(ctx, cursor, depth)
|
||||
case '"':
|
||||
return d.stringDecoder.DecodePath(ctx, cursor, depth)
|
||||
case 't':
|
||||
if err := validateTrue(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{truebytes}, cursor, nil
|
||||
case 'f':
|
||||
if err := validateFalse(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 5
|
||||
return [][]byte{falsebytes}, cursor, nil
|
||||
case 'n':
|
||||
if err := validateNull(buf, cursor); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
cursor += 4
|
||||
return [][]byte{nullbytes}, cursor, nil
|
||||
}
|
||||
return nil, cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user