mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[chore]: Bump github.com/gin-contrib/gzip from 1.0.1 to 1.1.0 (#3639)
Bumps [github.com/gin-contrib/gzip](https://github.com/gin-contrib/gzip) from 1.0.1 to 1.1.0. - [Release notes](https://github.com/gin-contrib/gzip/releases) - [Changelog](https://github.com/gin-contrib/gzip/blob/master/.goreleaser.yaml) - [Commits](https://github.com/gin-contrib/gzip/compare/v1.0.1...v1.1.0) --- updated-dependencies: - dependency-name: github.com/gin-contrib/gzip dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
@ -14,51 +14,52 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package api
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`encoding/json`
|
||||
`reflect`
|
||||
`runtime`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/decoder/consts`
|
||||
`github.com/bytedance/sonic/internal/decoder/errors`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/bytedance/sonic/option`
|
||||
`github.com/bytedance/sonic/utf8`
|
||||
)
|
||||
|
||||
const (
|
||||
_F_use_int64 = 0
|
||||
_F_disable_urc = 2
|
||||
_F_disable_unknown = 3
|
||||
_F_copy_string = 4
|
||||
_F_allow_control = consts.F_allow_control
|
||||
_F_copy_string = consts.F_copy_string
|
||||
_F_disable_unknown = consts.F_disable_unknown
|
||||
_F_disable_urc = consts.F_disable_urc
|
||||
_F_use_int64 = consts.F_use_int64
|
||||
_F_use_number = consts.F_use_number
|
||||
_F_validate_string = consts.F_validate_string
|
||||
|
||||
_F_use_number = types.B_USE_NUMBER
|
||||
_F_validate_string = types.B_VALIDATE_STRING
|
||||
_F_allow_control = types.B_ALLOW_CONTROL
|
||||
_MaxStack = consts.MaxStack
|
||||
|
||||
OptionUseInt64 = consts.OptionUseInt64
|
||||
OptionUseNumber = consts.OptionUseNumber
|
||||
OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors
|
||||
OptionDisableUnknown = consts.OptionDisableUnknown
|
||||
OptionCopyString = consts.OptionCopyString
|
||||
OptionValidateString = consts.OptionValidateString
|
||||
OptionNoValidateJSON = consts.OptionNoValidateJSON
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
|
||||
const (
|
||||
OptionUseInt64 Options = 1 << _F_use_int64
|
||||
OptionUseNumber Options = 1 << _F_use_number
|
||||
OptionUseUnicodeErrors Options = 1 << _F_disable_urc
|
||||
OptionDisableUnknown Options = 1 << _F_disable_unknown
|
||||
OptionCopyString Options = 1 << _F_copy_string
|
||||
OptionValidateString Options = 1 << _F_validate_string
|
||||
type (
|
||||
Options = consts.Options
|
||||
MismatchTypeError = errors.MismatchTypeError
|
||||
SyntaxError = errors.SyntaxError
|
||||
)
|
||||
|
||||
func (self *Decoder) SetOptions(opts Options) {
|
||||
if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
|
||||
if (opts & consts.OptionUseNumber != 0) && (opts & consts.OptionUseInt64 != 0) {
|
||||
panic("can't set OptionUseInt64 and OptionUseNumber both!")
|
||||
}
|
||||
self.f = uint64(opts)
|
||||
}
|
||||
|
||||
|
||||
// Decoder is the decoder context object
|
||||
type Decoder struct {
|
||||
i int
|
||||
@ -109,44 +110,7 @@ func (self *Decoder) CheckTrailings() error {
|
||||
// Decode parses the JSON-encoded data from current position and stores the result
|
||||
// in the value pointed to by val.
|
||||
func (self *Decoder) Decode(val interface{}) error {
|
||||
/* validate json if needed */
|
||||
if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){
|
||||
dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd")
|
||||
self.s = rt.Mem2Str(dbuf)
|
||||
}
|
||||
|
||||
vv := rt.UnpackEface(val)
|
||||
vp := vv.Value
|
||||
|
||||
/* check for nil type */
|
||||
if vv.Type == nil {
|
||||
return &json.InvalidUnmarshalError{}
|
||||
}
|
||||
|
||||
/* must be a non-nil pointer */
|
||||
if vp == nil || vv.Type.Kind() != reflect.Ptr {
|
||||
return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
|
||||
}
|
||||
|
||||
etp := rt.PtrElem(vv.Type)
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if vv.Type.IsNamed() {
|
||||
newp := vp
|
||||
etp = vv.Type
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
/* create a new stack, and call the decoder */
|
||||
sb := newStack()
|
||||
nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f)
|
||||
/* return the stack back */
|
||||
self.i = nb
|
||||
freeStack(sb)
|
||||
|
||||
/* avoid GC ahead */
|
||||
runtime.KeepAlive(vv)
|
||||
return err
|
||||
return decodeImpl(&self.s, &self.i, self.f, val)
|
||||
}
|
||||
|
||||
// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
|
||||
@ -194,53 +158,7 @@ func (self *Decoder) ValidateString() {
|
||||
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
|
||||
// a compile option to set the depth of recursive compile for the nested struct type.
|
||||
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
||||
cfg := option.DefaultCompileOptions()
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
}
|
||||
return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
|
||||
}
|
||||
|
||||
func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
|
||||
/* compile function */
|
||||
compiler := newCompiler().apply(opts)
|
||||
decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
if pp, err := compiler.compile(_vt); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
as := newAssembler(pp)
|
||||
as.name = _vt.String()
|
||||
return as.Load(), nil
|
||||
}
|
||||
}
|
||||
|
||||
/* find or compile */
|
||||
vt := rt.UnpackType(_vt)
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return nil, nil
|
||||
} else if _, err := programCache.Compute(vt, decoder); err == nil {
|
||||
return compiler.rec, nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
|
||||
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
|
||||
return nil
|
||||
}
|
||||
next := make(map[reflect.Type]bool)
|
||||
for vt := range(vtm) {
|
||||
sub, err := pretouchType(vt, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for svt := range(sub) {
|
||||
next[svt] = true
|
||||
}
|
||||
}
|
||||
opts.RecursiveDepth -= 1
|
||||
return pretouchRec(next, opts)
|
||||
return pretouchImpl(vt, opts...)
|
||||
}
|
||||
|
||||
// Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
|
38
vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go
generated
vendored
Normal file
38
vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
//go:build go1.17 && !go1.24
|
||||
// +build go1.17,!go1.24
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package api
|
||||
|
||||
import (
|
||||
"github.com/bytedance/sonic/internal/envs"
|
||||
"github.com/bytedance/sonic/internal/decoder/jitdec"
|
||||
"github.com/bytedance/sonic/internal/decoder/optdec"
|
||||
)
|
||||
|
||||
var (
|
||||
pretouchImpl = jitdec.Pretouch
|
||||
decodeImpl = jitdec.Decode
|
||||
)
|
||||
|
||||
func init() {
|
||||
if envs.UseOptDec {
|
||||
pretouchImpl = optdec.Pretouch
|
||||
decodeImpl = optdec.Decode
|
||||
}
|
||||
}
|
38
vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go
generated
vendored
Normal file
38
vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
// +build go1.17,!go1.24
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package api
|
||||
|
||||
import (
|
||||
`github.com/bytedance/sonic/internal/decoder/optdec`
|
||||
`github.com/bytedance/sonic/internal/envs`
|
||||
)
|
||||
|
||||
var (
|
||||
pretouchImpl = optdec.Pretouch
|
||||
decodeImpl = optdec.Decode
|
||||
)
|
||||
|
||||
|
||||
func init() {
|
||||
// whe in aarch64. we enable all optimize
|
||||
envs.EnableOptDec()
|
||||
envs.EnableFastMap()
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package api
|
||||
|
||||
import (
|
||||
`bytes`
|
||||
@ -47,6 +47,12 @@ var bufPool = sync.Pool{
|
||||
},
|
||||
}
|
||||
|
||||
func freeBytes(buf []byte) {
|
||||
if rt.CanSizeResue(cap(buf)) {
|
||||
bufPool.Put(buf[:0])
|
||||
}
|
||||
}
|
||||
|
||||
// NewStreamDecoder adapts to encoding/json.NewDecoder API.
|
||||
//
|
||||
// NewStreamDecoder returns a new decoder that reads from r.
|
||||
@ -61,25 +67,16 @@ func NewStreamDecoder(r io.Reader) *StreamDecoder {
|
||||
func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
// read more data into buf
|
||||
if self.More() {
|
||||
// println(string(self.buf))
|
||||
var s = self.scanp
|
||||
try_skip:
|
||||
var e = len(self.buf)
|
||||
// println("s:", s, "e:", e, "scanned:",self.scanned, "scanp:",self.scanp, self.buf)
|
||||
var src = rt.Mem2Str(self.buf[s:e])
|
||||
// if len(src) > 5 {
|
||||
// println(src[:5], src[len(src)-5:])
|
||||
// } else {
|
||||
// println(src)
|
||||
// }
|
||||
// try skip
|
||||
var x = 0;
|
||||
if y := native.SkipOneFast(&src, &x); y < 0 {
|
||||
if self.readMore() {
|
||||
// println("more")
|
||||
goto try_skip
|
||||
} else {
|
||||
// println("no more")
|
||||
err = SyntaxError{e, self.s, types.ParsingError(-s), ""}
|
||||
self.setErr(err)
|
||||
return
|
||||
@ -89,7 +86,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
e = x + s
|
||||
}
|
||||
|
||||
// println("decode: ", s, e)
|
||||
// must copy string here for safety
|
||||
self.Decoder.Reset(string(self.buf[s:e]))
|
||||
err = self.Decoder.Decode(val)
|
||||
@ -101,13 +97,11 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
self.scanp = e
|
||||
_, empty := self.scan()
|
||||
if empty {
|
||||
// println("recycle")
|
||||
// no remain valid bytes, thus we just recycle buffer
|
||||
mem := self.buf
|
||||
self.buf = nil
|
||||
bufPool.Put(mem[:0])
|
||||
freeBytes(mem)
|
||||
} else {
|
||||
// println("keep")
|
||||
// remain undecoded bytes, move them onto head
|
||||
n := copy(self.buf, self.buf[self.scanp:])
|
||||
self.buf = self.buf[:n]
|
||||
@ -123,7 +117,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
// InputOffset returns the input stream byte offset of the current decoder position.
|
||||
// The offset gives the location of the end of the most recently returned token and the beginning of the next token.
|
||||
func (self *StreamDecoder) InputOffset() int64 {
|
||||
// println("input offset",self.scanned, self.scanp)
|
||||
return self.scanned + int64(self.scanp)
|
||||
}
|
||||
|
||||
@ -178,7 +171,7 @@ func (self *StreamDecoder) setErr(err error) {
|
||||
self.err = err
|
||||
mem := self.buf[:0]
|
||||
self.buf = nil
|
||||
bufPool.Put(mem)
|
||||
freeBytes(mem)
|
||||
}
|
||||
|
||||
func (self *StreamDecoder) peek() (byte, error) {
|
||||
@ -237,12 +230,10 @@ func realloc(buf *[]byte) bool {
|
||||
l := uint(len(*buf))
|
||||
c := uint(cap(*buf))
|
||||
if c == 0 {
|
||||
// println("use pool!")
|
||||
*buf = bufPool.Get().([]byte)
|
||||
return true
|
||||
}
|
||||
if c - l <= c >> minLeftBufferShift {
|
||||
// println("realloc!")
|
||||
e := l+(l>>minLeftBufferShift)
|
||||
if e <= c {
|
||||
e = c*2
|
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
@ -1,130 +0,0 @@
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
_ `unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
1950
vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go
generated
vendored
1950
vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go
generated
vendored
File diff suppressed because it is too large
Load Diff
36
vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go
generated
vendored
Normal file
36
vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
|
||||
package consts
|
||||
|
||||
import (
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
)
|
||||
|
||||
|
||||
const (
|
||||
F_use_int64 = 0
|
||||
F_disable_urc = 2
|
||||
F_disable_unknown = 3
|
||||
F_copy_string = 4
|
||||
|
||||
|
||||
F_use_number = types.B_USE_NUMBER
|
||||
F_validate_string = types.B_VALIDATE_STRING
|
||||
F_allow_control = types.B_ALLOW_CONTROL
|
||||
F_no_validate_json = types.B_NO_VALIDATE_JSON
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
|
||||
const (
|
||||
OptionUseInt64 Options = 1 << F_use_int64
|
||||
OptionUseNumber Options = 1 << F_use_number
|
||||
OptionUseUnicodeErrors Options = 1 << F_disable_urc
|
||||
OptionDisableUnknown Options = 1 << F_disable_unknown
|
||||
OptionCopyString Options = 1 << F_copy_string
|
||||
OptionValidateString Options = 1 << F_validate_string
|
||||
OptionNoValidateJSON Options = 1 << F_no_validate_json
|
||||
)
|
||||
|
||||
const (
|
||||
MaxStack = 4096
|
||||
)
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package errors
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
@ -46,7 +46,7 @@ func (self SyntaxError) Description() string {
|
||||
func (self SyntaxError) description() string {
|
||||
/* check for empty source */
|
||||
if self.Src == "" {
|
||||
return fmt.Sprintf("no sources available: %#v", self)
|
||||
return fmt.Sprintf("no sources available, the input json is empty: %#v", self)
|
||||
}
|
||||
|
||||
p, x, q, y := calcBounds(len(self.Src), self.Pos)
|
||||
@ -112,12 +112,12 @@ func clamp_zero(v int) int {
|
||||
|
||||
/** JIT Error Helpers **/
|
||||
|
||||
var stackOverflow = &json.UnsupportedValueError {
|
||||
var StackOverflow = &json.UnsupportedValueError {
|
||||
Str : "Value nesting too deep",
|
||||
Value : reflect.ValueOf("..."),
|
||||
}
|
||||
|
||||
func error_wrap(src string, pos int, code types.ParsingError) error {
|
||||
func ErrorWrap(src string, pos int, code types.ParsingError) error {
|
||||
return *error_wrap_heap(src, pos, code)
|
||||
}
|
||||
|
||||
@ -130,7 +130,7 @@ func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError
|
||||
}
|
||||
}
|
||||
|
||||
func error_type(vt *rt.GoType) error {
|
||||
func ErrorType(vt *rt.GoType) error {
|
||||
return &json.UnmarshalTypeError{Type: vt.Pack()}
|
||||
}
|
||||
|
||||
@ -171,7 +171,7 @@ func (self MismatchTypeError) Description() string {
|
||||
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
|
||||
}
|
||||
|
||||
func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
func ErrorMismatch(src string, pos int, vt *rt.GoType) error {
|
||||
return &MismatchTypeError {
|
||||
Pos : pos,
|
||||
Src : src,
|
||||
@ -179,11 +179,11 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
}
|
||||
}
|
||||
|
||||
func error_field(name string) error {
|
||||
func ErrorField(name string) error {
|
||||
return errors.New("json: unknown field " + strconv.Quote(name))
|
||||
}
|
||||
|
||||
func error_value(value string, vtype reflect.Type) error {
|
||||
func ErrorValue(value string, vtype reflect.Type) error {
|
||||
return &json.UnmarshalTypeError {
|
||||
Type : vtype,
|
||||
Value : value,
|
733
vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go
generated
vendored
733
vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go
generated
vendored
@ -1,733 +0,0 @@
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
*
|
||||
* ST(BX) : ro, decoder stack
|
||||
* DF(R10) : ro, decoder flags
|
||||
* EP(R11) : wo, error pointer
|
||||
* IP(R12) : ro, input pointer
|
||||
* IL(R13) : ro, input length
|
||||
* IC(R14) : rw, input cursor
|
||||
* VP(R15) : ro, value pointer (to an interface{})
|
||||
*/
|
||||
|
||||
const (
|
||||
_VD_args = 8 // 8 bytes for passing arguments to this functions
|
||||
_VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
|
||||
_VD_saves = 40 // 40 bytes for saving the registers before CALL instructions
|
||||
_VD_locals = 88 // 88 bytes for local variables
|
||||
)
|
||||
|
||||
const (
|
||||
_VD_offs = _VD_fargs + _VD_saves + _VD_locals
|
||||
_VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss = _VAR_ss_Vt
|
||||
_VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
|
||||
_VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
|
||||
_VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
|
||||
_VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
|
||||
_VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
|
||||
_VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
|
||||
_VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
|
||||
_VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
|
||||
_VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
|
||||
)
|
||||
|
||||
type _ValueDecoder struct {
|
||||
jit.BaseAssembler
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) build() uintptr {
|
||||
self.Init(self.compile)
|
||||
return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
|
||||
}
|
||||
|
||||
/** Function Calling Helpers **/
|
||||
|
||||
func (self *_ValueDecoder) save(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to save")
|
||||
} else {
|
||||
self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) load(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to load")
|
||||
} else {
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call(fn obj.Addr) {
|
||||
self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _AX) // CALL AX
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call_go(fn obj.Addr) {
|
||||
self.save(_REG_go...) // SAVE $REG_go
|
||||
self.call(fn) // CALL ${fn}
|
||||
self.load(_REG_go...) // LOAD $REG_go
|
||||
}
|
||||
|
||||
/** Decoder Assembler **/
|
||||
|
||||
const (
|
||||
_S_val = iota + 1
|
||||
_S_arr
|
||||
_S_arr_0
|
||||
_S_obj
|
||||
_S_obj_0
|
||||
_S_obj_delim
|
||||
_S_obj_sep
|
||||
)
|
||||
|
||||
const (
|
||||
_S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
|
||||
_S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
|
||||
_S_vmask = (1 << _S_val) | (1 << _S_arr_0)
|
||||
)
|
||||
|
||||
const (
|
||||
_A_init_len = 1
|
||||
_A_init_cap = 16
|
||||
)
|
||||
|
||||
const (
|
||||
_ST_Sp = 0
|
||||
_ST_Vt = _PtrBytes
|
||||
_ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
|
||||
)
|
||||
|
||||
var (
|
||||
_V_true = jit.Imm(int64(pbool(true)))
|
||||
_V_false = jit.Imm(int64(pbool(false)))
|
||||
_F_value = jit.Imm(int64(native.S_value))
|
||||
)
|
||||
|
||||
var (
|
||||
_V_max = jit.Imm(int64(types.V_MAX))
|
||||
_E_eof = jit.Imm(int64(types.ERR_EOF))
|
||||
_E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
|
||||
_E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
|
||||
)
|
||||
|
||||
var (
|
||||
_F_convTslice = jit.Func(convTslice)
|
||||
_F_convTstring = jit.Func(convTstring)
|
||||
_F_invalid_vtype = jit.Func(invalid_vtype)
|
||||
)
|
||||
|
||||
var (
|
||||
_T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
|
||||
_T_bool = jit.Type(reflect.TypeOf(false))
|
||||
_T_int64 = jit.Type(reflect.TypeOf(int64(0)))
|
||||
_T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
_T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
|
||||
_T_string = jit.Type(reflect.TypeOf(""))
|
||||
_T_number = jit.Type(reflect.TypeOf(json.Number("")))
|
||||
_T_float64 = jit.Type(reflect.TypeOf(float64(0)))
|
||||
)
|
||||
|
||||
var _R_tab = map[int]string {
|
||||
'[': "_decode_V_ARRAY",
|
||||
'{': "_decode_V_OBJECT",
|
||||
':': "_decode_V_KEY_SEP",
|
||||
',': "_decode_V_ELEM_SEP",
|
||||
']': "_decode_V_ARRAY_END",
|
||||
'}': "_decode_V_OBJECT_END",
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) compile() {
|
||||
self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
|
||||
self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
|
||||
self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
|
||||
|
||||
/* initialize the state machine */
|
||||
self.Emit("XORL", _CX, _CX) // XORL CX, CX
|
||||
self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
|
||||
/* initialize digital buffer first */
|
||||
self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
|
||||
self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
|
||||
self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
|
||||
/* add ST offset */
|
||||
self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* set the value from previous round */
|
||||
self.Link("_set_value") // _set_value:
|
||||
self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
|
||||
self.Emit("XORL" , _SI, _SI) // XORL SI, SI
|
||||
self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
|
||||
self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
|
||||
|
||||
/* check for value stack */
|
||||
self.Link("_next") // _next:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_return") // JS _return
|
||||
|
||||
/* fast path: test up to 4 characters manually */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
|
||||
/* at least 1 to 3 spaces */
|
||||
for i := 0; i < 3; i++ {
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
}
|
||||
|
||||
/* at least 4 spaces */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
|
||||
/* fast path: use lookup table to select decoder */
|
||||
self.Link("_decode_fast") // _decode_fast:
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_decode_tab", 4) // .... &_decode_tab
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
|
||||
self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/* decode with native decoder */
|
||||
self.Link("_decode_native") // _decode_native:
|
||||
self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
|
||||
self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
|
||||
self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
|
||||
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
|
||||
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
|
||||
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
|
||||
self.call(_F_value) // CALL value
|
||||
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_parsing_error")
|
||||
self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
|
||||
self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
|
||||
self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
|
||||
|
||||
/* jump table selector */
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_switch_table", 4) // .... &_switch_table
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/** V_EOF **/
|
||||
self.Link("_decode_V_EOF") // _decode_V_EOF:
|
||||
self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
|
||||
/** V_NULL **/
|
||||
self.Link("_decode_V_NULL") // _decode_V_NULL:
|
||||
self.Emit("XORL", _R8, _R8) // XORL R8, R8
|
||||
self.Emit("XORL", _R9, _R9) // XORL R9, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_TRUE **/
|
||||
self.Link("_decode_V_TRUE") // _decode_V_TRUE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
// TODO: maybe modified by users?
|
||||
self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_FALSE **/
|
||||
self.Link("_decode_V_FALSE") // _decode_V_FALSE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_ARRAY **/
|
||||
self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
|
||||
/* create a new array */
|
||||
self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
|
||||
self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX
|
||||
|
||||
/* pack into an interface */
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
|
||||
self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8
|
||||
|
||||
/* replace current state with an array */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
|
||||
self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
|
||||
self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
|
||||
|
||||
/* add a new slot for the first element */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT **/
|
||||
self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
|
||||
self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_STRING **/
|
||||
self.Link("_decode_V_STRING") // _decode_V_STRING:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
|
||||
self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
|
||||
|
||||
/* check for escapes */
|
||||
self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
|
||||
self.Sjmp("JNE" , "_unquote") // JNE _unquote
|
||||
self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
|
||||
self.Sref("_copy_string_end", 4)
|
||||
self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
|
||||
self.Sjmp("JC", "copy_string")
|
||||
self.Link("_copy_string_end")
|
||||
self.Emit("XORL", _DX, _DX) // XORL DX, DX
|
||||
/* strings with no escape sequences */
|
||||
self.Link("_noescape") // _noescape:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
|
||||
self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
|
||||
self.Sjmp("JC" , "_object_key") // JC _object_key
|
||||
|
||||
/* check for pre-packed strings, avoid 1 allocation */
|
||||
self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
|
||||
self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP)
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
|
||||
|
||||
/* packed string already in R9 */
|
||||
self.Link("_packed_str") // _packed_str:
|
||||
self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
|
||||
self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* the string is an object key, get the map */
|
||||
self.Link("_object_key")
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
|
||||
/* add a new delimiter */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
|
||||
|
||||
/* add a new slot int the map */
|
||||
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
|
||||
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
|
||||
self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX
|
||||
|
||||
/* add to the pointer stack */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* allocate memory to store the string header and unquoted result */
|
||||
self.Link("_unquote") // _unquote:
|
||||
self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
|
||||
self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
|
||||
self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP)
|
||||
self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9
|
||||
|
||||
/* prepare the unquoting parameters */
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
|
||||
self.Emit("NEGQ" , _CX) // NEGQ CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
|
||||
self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
|
||||
self.Emit("XORL" , _R8, _R8) // XORL R8, R8
|
||||
self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
|
||||
self.Emit("SETCC", _R8) // SETCC R8
|
||||
self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
|
||||
|
||||
/* unquote the string, with R9 been preserved */
|
||||
self.save(_R9) // SAVE R9
|
||||
self.call(_F_unquote) // CALL unquote
|
||||
self.load(_R9) // LOAD R9
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
|
||||
self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
|
||||
self.Sjmp("JMP" , "_noescape") // JMP _noescape
|
||||
|
||||
/** V_DOUBLE **/
|
||||
self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
|
||||
self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
|
||||
|
||||
/** V_INTEGER **/
|
||||
self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
|
||||
self.Sjmp("JC" , "_use_int64") // JC _use_int64
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
|
||||
self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0
|
||||
|
||||
/* represent numbers as `float64` */
|
||||
self.Link("_use_float64") // _use_float64:
|
||||
self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP)
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
|
||||
self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `json.Number` */
|
||||
self.Link("_use_number") // _use_number
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
|
||||
self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
|
||||
self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
|
||||
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `int64` */
|
||||
self.Link("_use_int64") // _use_int64:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_KEY_SEP **/
|
||||
self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ELEM_SEP **/
|
||||
self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr
|
||||
self.Sjmp("JE" , "_array_sep") // JZ _next
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* arrays */
|
||||
self.Link("_array_sep")
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
|
||||
self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
|
||||
self.Sjmp("JAE" , "_array_more") // JAE _array_more
|
||||
|
||||
/* add a slot for the new element */
|
||||
self.Link("_array_append") // _array_append:
|
||||
self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow")
|
||||
self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
|
||||
self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ARRAY_END **/
|
||||
self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
|
||||
self.Emit("XORL", _DX, _DX) // XORL DX, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
|
||||
self.Sjmp("JE" , "_first_item") // JE _first_item
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* first element of an array */
|
||||
self.Link("_first_item") // _first_item:
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT_END **/
|
||||
self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX)
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("XORL", _AX, _AX) // XORL AX, AX
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* return from decoder */
|
||||
self.Link("_return") // _return:
|
||||
self.Emit("XORL", _EP, _EP) // XORL EP, EP
|
||||
self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
|
||||
self.Link("_epilogue") // _epilogue:
|
||||
self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
|
||||
self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
|
||||
self.Emit("RET") // RET
|
||||
|
||||
/* array expand */
|
||||
self.Link("_array_more") // _array_more:
|
||||
self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0
|
||||
self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
|
||||
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP)
|
||||
self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX
|
||||
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP)
|
||||
self.call_go(_F_growslice) // CALL_GO runtime.growslice
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX
|
||||
|
||||
/* update the slice */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
|
||||
self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
|
||||
self.Sjmp("JMP" , "_array_append") // JMP _array_append
|
||||
|
||||
/* copy string */
|
||||
self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_p)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_n)
|
||||
self.Emit("MOVQ", _DI, _VAR_cs_LR)
|
||||
self.Emit("MOVQ", _T_byte, _R8)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_makeslice)
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8)
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_d)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _VAR_cs_p, _R8)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _VAR_cs_n, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_memmove)
|
||||
self.Emit("MOVQ", _VAR_cs_d, _R8)
|
||||
self.Emit("MOVQ", _VAR_cs_n, _AX)
|
||||
self.Emit("MOVQ", _VAR_cs_LR, _DI)
|
||||
// self.Byte(0xcc)
|
||||
self.Rjmp("JMP", _DI)
|
||||
|
||||
/* error handlers */
|
||||
self.Link("_stack_overflow")
|
||||
self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_vtype_error") // _vtype_error:
|
||||
self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_invalid_char") // _invalid_char:
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_unquote_error") // _unquote_error:
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Link("_parsing_error") // _parsing_error:
|
||||
self.Emit("NEGQ" , _AX) // NEGQ AX
|
||||
self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
|
||||
self.Link("_error") // _error:
|
||||
self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
|
||||
self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
|
||||
self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
|
||||
|
||||
/* invalid value type, never returns */
|
||||
self.Link("_invalid_vtype")
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.call(_F_invalid_vtype) // CALL invalid_type
|
||||
self.Emit("UD2") // UD2
|
||||
|
||||
/* switch jump table */
|
||||
self.Link("_switch_table") // _switch_table:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
|
||||
self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
|
||||
self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
|
||||
self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
|
||||
self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
|
||||
self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
|
||||
self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
|
||||
self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
|
||||
self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
|
||||
self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
|
||||
self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
|
||||
self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
|
||||
|
||||
/* fast character lookup table */
|
||||
self.Link("_decode_tab") // _decode_tab:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
|
||||
/* generate rest of the tabs */
|
||||
for i := 1; i < 256; i++ {
|
||||
if to, ok := _R_tab[i]; ok {
|
||||
self.Sref(to, -int64(i) * 4)
|
||||
} else {
|
||||
self.Byte(0x00, 0x00, 0x00, 0x00)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
||||
_subr_decode_value = new(_ValueDecoder).build()
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
func invalid_vtype(vt types.ValueType) {
|
||||
throw(fmt.Sprintf("invalid value type: %d", vt))
|
||||
}
|
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s
generated
vendored
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s
generated
vendored
@ -1,37 +0,0 @@
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#include "go_asm.h"
|
||||
#include "funcdata.h"
|
||||
#include "textflag.h"
|
||||
|
||||
TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
|
||||
NO_LOCAL_POINTERS
|
||||
PXOR X0, X0
|
||||
MOVOU X0, rv+48(FP)
|
||||
MOVQ st+0(FP), BX
|
||||
MOVQ sp+8(FP), R12
|
||||
MOVQ sn+16(FP), R13
|
||||
MOVQ ic+24(FP), R14
|
||||
MOVQ vp+32(FP), R15
|
||||
MOVQ df+40(FP), R10
|
||||
MOVQ ·_subr_decode_value(SB), AX
|
||||
CALL AX
|
||||
MOVQ R14, rp+48(FP)
|
||||
MOVQ R11, ex+56(FP)
|
||||
RET
|
@ -14,7 +14,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`strconv`
|
@ -1,4 +1,4 @@
|
||||
// +build go1.21,!go1.23
|
||||
// +build go1.21,!go1.24
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
@ -14,7 +14,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`strconv`
|
@ -1,4 +1,5 @@
|
||||
// +build go1.17,!go1.23
|
||||
//go:build go1.17 && !go1.24
|
||||
// +build go1.17,!go1.24
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -16,21 +17,22 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`math`
|
||||
`reflect`
|
||||
`unsafe`
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unsafe"
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
"github.com/bytedance/sonic/internal/caching"
|
||||
"github.com/bytedance/sonic/internal/jit"
|
||||
"github.com/bytedance/sonic/internal/native"
|
||||
"github.com/bytedance/sonic/internal/native/types"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/twitchyliquid64/golang-asm/obj"
|
||||
)
|
||||
|
||||
/** Register Allocations
|
||||
@ -292,7 +294,6 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
|
||||
_OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p,
|
||||
_OP_slice_init : (*_Assembler)._asm_OP_slice_init,
|
||||
_OP_slice_append : (*_Assembler)._asm_OP_slice_append,
|
||||
_OP_object_skip : (*_Assembler)._asm_OP_object_skip,
|
||||
_OP_object_next : (*_Assembler)._asm_OP_object_next,
|
||||
_OP_struct_field : (*_Assembler)._asm_OP_struct_field,
|
||||
_OP_unmarshal : (*_Assembler)._asm_OP_unmarshal,
|
||||
@ -312,6 +313,7 @@ var _OpFuncTab = [256]func(*_Assembler, *_Instr) {
|
||||
_OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0,
|
||||
_OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err,
|
||||
_OP_go_skip : (*_Assembler)._asm_OP_go_skip,
|
||||
_OP_skip_emtpy : (*_Assembler)._asm_OP_skip_empty,
|
||||
_OP_add : (*_Assembler)._asm_OP_add,
|
||||
_OP_check_empty : (*_Assembler)._asm_OP_check_empty,
|
||||
_OP_debug : (*_Assembler)._asm_OP_debug,
|
||||
@ -385,7 +387,7 @@ func (self *_Assembler) prologue() {
|
||||
|
||||
var (
|
||||
_REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
|
||||
_REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL }
|
||||
_REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC }
|
||||
)
|
||||
|
||||
func (self *_Assembler) save(r ...obj.Addr) {
|
||||
@ -481,6 +483,7 @@ var (
|
||||
_V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow))))
|
||||
_I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError)))
|
||||
_I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError)))
|
||||
_I_json_MismatchQuotedError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchQuotedError)))
|
||||
)
|
||||
|
||||
func (self *_Assembler) type_error() {
|
||||
@ -492,9 +495,9 @@ func (self *_Assembler) type_error() {
|
||||
func (self *_Assembler) mismatch_error() {
|
||||
self.Link(_LB_mismatch_error) // _type_error:
|
||||
self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET
|
||||
self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP
|
||||
self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX
|
||||
self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _EpOffset), _EP) // MOVQ stack.Ep, EP
|
||||
self.Sjmp("JE" , _LB_error) // JE _LB_error
|
||||
self.Emit("MOVQ", _ARG_sp, _AX)
|
||||
self.Emit("MOVQ", _ARG_sl, _BX)
|
||||
@ -600,6 +603,28 @@ func (self *_Assembler) _asm_OP_go_skip(p *_Instr) {
|
||||
self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one
|
||||
}
|
||||
|
||||
var _F_IndexByte = jit.Func(strings.IndexByte)
|
||||
|
||||
func (self *_Assembler) _asm_OP_skip_empty(p *_Instr) {
|
||||
// self.Byte(0xcc)
|
||||
self.call_sf(_F_skip_one) // CALL_SF skip_one
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
|
||||
self.Emit("BTQ", jit.Imm(_F_disable_unknown), _ARG_fv)
|
||||
self.Xjmp("JNC", p.vi())
|
||||
self.Emit("LEAQ", jit.Sib(_IC, _AX, 1, 0), _BX)
|
||||
self.Emit("MOVQ", _BX, _ARG_sv_n)
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _AX)
|
||||
self.Emit("MOVQ", _AX, _ARG_sv_p)
|
||||
self.Emit("MOVQ", jit.Imm(':'), _CX)
|
||||
self.call_go(_F_IndexByte)
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("TESTQ", _AX, _AX)
|
||||
// disallow unknown field
|
||||
self.Sjmp("JNS", _LB_field_error)
|
||||
}
|
||||
|
||||
func (self *_Assembler) skip_one() {
|
||||
self.Link(_LB_skip_one) // _skip:
|
||||
self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC
|
||||
@ -972,11 +997,13 @@ var (
|
||||
|
||||
var (
|
||||
_F_decodeJsonUnmarshaler obj.Addr
|
||||
_F_decodeJsonUnmarshalerQuoted obj.Addr
|
||||
_F_decodeTextUnmarshaler obj.Addr
|
||||
)
|
||||
|
||||
func init() {
|
||||
_F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler)
|
||||
_F_decodeJsonUnmarshalerQuoted = jit.Func(decodeJsonUnmarshalerQuoted)
|
||||
_F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler)
|
||||
}
|
||||
|
||||
@ -1057,18 +1084,18 @@ func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) {
|
||||
var (
|
||||
_F_skip_one = jit.Imm(int64(native.S_skip_one))
|
||||
_F_skip_array = jit.Imm(int64(native.S_skip_array))
|
||||
_F_skip_object = jit.Imm(int64(native.S_skip_object))
|
||||
_F_skip_number = jit.Imm(int64(native.S_skip_number))
|
||||
)
|
||||
|
||||
func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) {
|
||||
func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool, f obj.Addr) {
|
||||
self.call_sf(_F_skip_one) // CALL_SF skip_one
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
|
||||
self.Emit("MOVQ", _IC, _VAR_ic) // store for mismatche error skip
|
||||
self.slice_from_r(_AX, 0) // SLICE_R AX, $0
|
||||
self.Emit("MOVQ" , _DI, _ARG_sv_p) // MOVQ DI, sv.p
|
||||
self.Emit("MOVQ" , _SI, _ARG_sv_n) // MOVQ SI, sv.n
|
||||
self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref}
|
||||
self.unmarshal_func(t, f, deref) // UNMARSHAL json, ${t}, ${deref}
|
||||
}
|
||||
|
||||
func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) {
|
||||
@ -1103,7 +1130,19 @@ func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool)
|
||||
self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI
|
||||
self.call_go(fn) // CALL_GO ${fn}
|
||||
self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET
|
||||
self.Sjmp("JNZ" , _LB_error) // JNZ _error
|
||||
if fn == _F_decodeJsonUnmarshalerQuoted {
|
||||
self.Sjmp("JZ" , "_unmarshal_func_end_{n}") // JZ _unmarshal_func_end_{n}
|
||||
self.Emit("MOVQ", _I_json_MismatchQuotedError, _CX) // MOVQ _I_json_MismatchQuotedError, CX
|
||||
self.Emit("CMPQ", _ET, _CX) // check if MismatchQuotedError
|
||||
self.Sjmp("JNE" , _LB_error) // JNE _error
|
||||
self.Emit("MOVQ", jit.Type(t), _CX) // store current type
|
||||
self.Emit("MOVQ", _CX, _VAR_et) // store current type as mismatched type
|
||||
self.Emit("MOVQ", _VAR_ic, _IC) // recover the pos at mismatched, continue to parse
|
||||
self.Emit("XORL", _ET, _ET) // clear ET
|
||||
self.Link("_unmarshal_func_end_{n}")
|
||||
} else {
|
||||
self.Sjmp("JNE" , _LB_error) // JNE _error
|
||||
}
|
||||
}
|
||||
|
||||
/** Dynamic Decoding Routine **/
|
||||
@ -1136,8 +1175,8 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
|
||||
self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX
|
||||
self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX
|
||||
self.Sjmp("JNE", _LB_error) // JNE LB_error
|
||||
self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic
|
||||
self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et
|
||||
self.WriteRecNotAX(14, _EP, jit.Ptr(_ST, _EpOffset), false, false) // MOVQ EP, stack.Ep
|
||||
self.Link("_decode_dynamic_end_{n}")
|
||||
}
|
||||
|
||||
@ -1146,7 +1185,7 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) {
|
||||
var (
|
||||
_F_memequal = jit.Func(memequal)
|
||||
_F_memmove = jit.Func(memmove)
|
||||
_F_growslice = jit.Func(growslice)
|
||||
_F_growslice = jit.Func(rt.GrowSlice)
|
||||
_F_makeslice = jit.Func(makeslice)
|
||||
_F_makemap_small = jit.Func(makemap_small)
|
||||
_F_mapassign_fast64 = jit.Func(mapassign_fast64)
|
||||
@ -1698,12 +1737,6 @@ func (self *_Assembler) _asm_OP_slice_append(p *_Instr) {
|
||||
self.Link("_append_slice_end_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) {
|
||||
self.call_sf(_F_skip_object) // CALL_SF skip_object
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v
|
||||
}
|
||||
|
||||
func (self *_Assembler) _asm_OP_object_next(_ *_Instr) {
|
||||
self.call_sf(_F_skip_one) // CALL_SF skip_one
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
@ -1774,11 +1807,19 @@ func (self *_Assembler) _asm_OP_struct_field(p *_Instr) {
|
||||
}
|
||||
|
||||
func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) {
|
||||
self.unmarshal_json(p.vt(), true)
|
||||
if iv := p.i64(); iv != 0 {
|
||||
self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshalerQuoted)
|
||||
} else {
|
||||
self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshaler)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) {
|
||||
self.unmarshal_json(p.vt(), false)
|
||||
if iv := p.i64(); iv != 0 {
|
||||
self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshalerQuoted)
|
||||
} else {
|
||||
self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshaler)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) {
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
@ -77,7 +77,6 @@ const (
|
||||
_OP_array_clear_p
|
||||
_OP_slice_init
|
||||
_OP_slice_append
|
||||
_OP_object_skip
|
||||
_OP_object_next
|
||||
_OP_struct_field
|
||||
_OP_unmarshal
|
||||
@ -97,6 +96,7 @@ const (
|
||||
_OP_check_char_0
|
||||
_OP_dismatch_err
|
||||
_OP_go_skip
|
||||
_OP_skip_emtpy
|
||||
_OP_add
|
||||
_OP_check_empty
|
||||
_OP_debug
|
||||
@ -155,7 +155,6 @@ var _OpNames = [256]string {
|
||||
_OP_array_skip : "array_skip",
|
||||
_OP_slice_init : "slice_init",
|
||||
_OP_slice_append : "slice_append",
|
||||
_OP_object_skip : "object_skip",
|
||||
_OP_object_next : "object_next",
|
||||
_OP_struct_field : "struct_field",
|
||||
_OP_unmarshal : "unmarshal",
|
||||
@ -271,6 +270,13 @@ func newInsVt(op _Op, vt reflect.Type) _Instr {
|
||||
}
|
||||
}
|
||||
|
||||
func newInsVtI(op _Op, vt reflect.Type, iv int) _Instr {
|
||||
return _Instr {
|
||||
u: packOp(op) | rt.PackInt(iv),
|
||||
p: unsafe.Pointer(rt.UnpackType(vt)),
|
||||
}
|
||||
}
|
||||
|
||||
func newInsVf(op _Op, vf *caching.FieldMap) _Instr {
|
||||
return _Instr {
|
||||
u: packOp(op),
|
||||
@ -452,6 +458,10 @@ func (self *_Program) rtt(op _Op, vt reflect.Type) {
|
||||
*self = append(*self, newInsVt(op, vt))
|
||||
}
|
||||
|
||||
func (self *_Program) rtti(op _Op, vt reflect.Type, iv int) {
|
||||
*self = append(*self, newInsVtI(op, vt, iv))
|
||||
}
|
||||
|
||||
func (self *_Program) fmv(op _Op, vf *caching.FieldMap) {
|
||||
*self = append(*self, newInsVf(op, vf))
|
||||
}
|
||||
@ -527,35 +537,54 @@ func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool {
|
||||
const (
|
||||
checkMarshalerFlags_quoted = 1
|
||||
)
|
||||
|
||||
func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type, flags int, exec bool) bool {
|
||||
pt := reflect.PtrTo(vt)
|
||||
|
||||
/* check for `json.Unmarshaler` with pointer receiver */
|
||||
if pt.Implements(jsonUnmarshalerType) {
|
||||
p.rtt(_OP_unmarshal_p, pt)
|
||||
if exec {
|
||||
p.add(_OP_lspace)
|
||||
p.rtti(_OP_unmarshal_p, pt, flags)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/* check for `json.Unmarshaler` */
|
||||
if vt.Implements(jsonUnmarshalerType) {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalJson(p, vt)
|
||||
if exec {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalJson(p, vt, flags)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if flags == checkMarshalerFlags_quoted {
|
||||
// text marshaler shouldn't be supported for quoted string
|
||||
return false
|
||||
}
|
||||
|
||||
/* check for `encoding.TextMarshaler` with pointer receiver */
|
||||
if pt.Implements(encodingTextUnmarshalerType) {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalTextPtr(p, pt)
|
||||
if exec {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalTextPtr(p, pt, flags)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/* check for `encoding.TextUnmarshaler` */
|
||||
if vt.Implements(encodingTextUnmarshalerType) {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalText(p, vt)
|
||||
if exec {
|
||||
p.add(_OP_lspace)
|
||||
self.compileUnmarshalText(p, vt, flags)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@ -567,7 +596,7 @@ func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) {
|
||||
return
|
||||
}
|
||||
|
||||
if self.checkMarshaler(p, vt) {
|
||||
if self.checkMarshaler(p, vt, 0, true) {
|
||||
return
|
||||
}
|
||||
|
||||
@ -690,7 +719,7 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) {
|
||||
|
||||
/* dereference all the way down */
|
||||
for et.Kind() == reflect.Ptr {
|
||||
if self.checkMarshaler(p, et) {
|
||||
if self.checkMarshaler(p, et, 0, true) {
|
||||
return
|
||||
}
|
||||
et = et.Elem()
|
||||
@ -872,7 +901,24 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
|
||||
n := p.pc()
|
||||
p.add(_OP_is_null)
|
||||
|
||||
skip := self.checkIfSkip(p, vt, '{')
|
||||
j := p.pc()
|
||||
p.chr(_OP_check_char_0, '{')
|
||||
p.rtt(_OP_dismatch_err, vt)
|
||||
|
||||
/* special case for empty object */
|
||||
if len(fv) == 0 {
|
||||
p.pin(j)
|
||||
s := p.pc()
|
||||
p.add(_OP_skip_emtpy)
|
||||
p.pin(s)
|
||||
p.pin(n)
|
||||
return
|
||||
}
|
||||
|
||||
skip := p.pc()
|
||||
p.add(_OP_go_skip)
|
||||
p.pin(j)
|
||||
p.int(_OP_add, 1)
|
||||
|
||||
p.add(_OP_save)
|
||||
p.add(_OP_lspace)
|
||||
@ -890,11 +936,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
|
||||
p.chr(_OP_check_char, '}')
|
||||
p.chr(_OP_match_char, ',')
|
||||
|
||||
/* special case of an empty struct */
|
||||
if len(fv) == 0 {
|
||||
p.add(_OP_object_skip)
|
||||
goto end_of_object
|
||||
}
|
||||
|
||||
/* match the remaining fields */
|
||||
p.add(_OP_lspace)
|
||||
@ -930,7 +971,6 @@ func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
|
||||
p.int(_OP_goto, y0)
|
||||
}
|
||||
|
||||
end_of_object:
|
||||
p.pin(x)
|
||||
p.pin(y1)
|
||||
p.add(_OP_drop)
|
||||
@ -938,7 +978,22 @@ end_of_object:
|
||||
p.pin(skip)
|
||||
}
|
||||
|
||||
func (self *_Compiler) compileStructFieldStrUnmarshal(p *_Program, vt reflect.Type) {
|
||||
p.add(_OP_lspace)
|
||||
n0 := p.pc()
|
||||
p.add(_OP_is_null)
|
||||
self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, true)
|
||||
p.pin(n0)
|
||||
}
|
||||
|
||||
func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
|
||||
// according to std, json.Unmarshaler should be called before stringize
|
||||
// see https://github.com/bytedance/sonic/issues/670
|
||||
if self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, false) {
|
||||
self.compileStructFieldStrUnmarshal(p, vt)
|
||||
return
|
||||
}
|
||||
|
||||
n1 := -1
|
||||
ft := vt
|
||||
sv := false
|
||||
@ -1106,7 +1161,7 @@ func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int)
|
||||
p.pin(j)
|
||||
}
|
||||
|
||||
func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
|
||||
func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type, flags int) {
|
||||
i := p.pc()
|
||||
v := _OP_unmarshal
|
||||
p.add(_OP_is_null)
|
||||
@ -1117,11 +1172,11 @@ func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) {
|
||||
}
|
||||
|
||||
/* call the unmarshaler */
|
||||
p.rtt(v, vt)
|
||||
p.rtti(v, vt, flags)
|
||||
self.compileUnmarshalEnd(p, vt, i)
|
||||
}
|
||||
|
||||
func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
|
||||
func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type, iv int) {
|
||||
i := p.pc()
|
||||
v := _OP_unmarshal_text
|
||||
p.add(_OP_is_null)
|
||||
@ -1134,15 +1189,15 @@ func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) {
|
||||
}
|
||||
|
||||
/* call the unmarshaler */
|
||||
p.rtt(v, vt)
|
||||
p.rtti(v, vt, iv)
|
||||
self.compileUnmarshalEnd(p, vt, i)
|
||||
}
|
||||
|
||||
func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) {
|
||||
func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type, iv int) {
|
||||
i := p.pc()
|
||||
p.add(_OP_is_null)
|
||||
p.chr(_OP_match_char, '"')
|
||||
p.rtt(_OP_unmarshal_text_p, vt)
|
||||
p.rtti(_OP_unmarshal_text_p, vt, iv)
|
||||
p.pin(i)
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`os`
|
140
vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go
generated
vendored
Normal file
140
vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`encoding/json`
|
||||
`reflect`
|
||||
`runtime`
|
||||
|
||||
`github.com/bytedance/sonic/internal/decoder/consts`
|
||||
`github.com/bytedance/sonic/internal/decoder/errors`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/bytedance/sonic/utf8`
|
||||
`github.com/bytedance/sonic/option`
|
||||
)
|
||||
|
||||
type (
|
||||
MismatchTypeError = errors.MismatchTypeError
|
||||
SyntaxError = errors.SyntaxError
|
||||
)
|
||||
|
||||
const (
|
||||
_F_allow_control = consts.F_allow_control
|
||||
_F_copy_string = consts.F_copy_string
|
||||
_F_disable_unknown = consts.F_disable_unknown
|
||||
_F_disable_urc = consts.F_disable_urc
|
||||
_F_use_int64 = consts.F_use_int64
|
||||
_F_use_number = consts.F_use_number
|
||||
_F_no_validate_json = consts.F_no_validate_json
|
||||
_F_validate_string = consts.F_validate_string
|
||||
)
|
||||
|
||||
var (
|
||||
error_wrap = errors.ErrorWrap
|
||||
error_type = errors.ErrorType
|
||||
error_field = errors.ErrorField
|
||||
error_value = errors.ErrorValue
|
||||
error_mismatch = errors.ErrorMismatch
|
||||
stackOverflow = errors.StackOverflow
|
||||
)
|
||||
|
||||
|
||||
// Decode parses the JSON-encoded data from current position and stores the result
|
||||
// in the value pointed to by val.
|
||||
func Decode(s *string, i *int, f uint64, val interface{}) error {
|
||||
/* validate json if needed */
|
||||
if (f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(*s){
|
||||
dbuf := utf8.CorrectWith(nil, rt.Str2Mem(*s), "\ufffd")
|
||||
*s = rt.Mem2Str(dbuf)
|
||||
}
|
||||
|
||||
vv := rt.UnpackEface(val)
|
||||
vp := vv.Value
|
||||
|
||||
/* check for nil type */
|
||||
if vv.Type == nil {
|
||||
return &json.InvalidUnmarshalError{}
|
||||
}
|
||||
|
||||
/* must be a non-nil pointer */
|
||||
if vp == nil || vv.Type.Kind() != reflect.Ptr {
|
||||
return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
|
||||
}
|
||||
|
||||
etp := rt.PtrElem(vv.Type)
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if vv.Type.IsNamed() {
|
||||
newp := vp
|
||||
etp = vv.Type
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
/* create a new stack, and call the decoder */
|
||||
sb := newStack()
|
||||
nb, err := decodeTypedPointer(*s, *i, etp, vp, sb, f)
|
||||
/* return the stack back */
|
||||
*i = nb
|
||||
freeStack(sb)
|
||||
|
||||
/* avoid GC ahead */
|
||||
runtime.KeepAlive(vv)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
|
||||
// order to reduce the first-hit latency.
|
||||
//
|
||||
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
|
||||
// a compile option to set the depth of recursive compile for the nested struct type.
|
||||
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
||||
cfg := option.DefaultCompileOptions()
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
}
|
||||
return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
|
||||
}
|
||||
|
||||
func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
|
||||
/* compile function */
|
||||
compiler := newCompiler().apply(opts)
|
||||
decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
if pp, err := compiler.compile(_vt); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
as := newAssembler(pp)
|
||||
as.name = _vt.String()
|
||||
return as.Load(), nil
|
||||
}
|
||||
}
|
||||
|
||||
/* find or compile */
|
||||
vt := rt.UnpackType(_vt)
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return nil, nil
|
||||
} else if _, err := programCache.Compute(vt, decoder); err == nil {
|
||||
return compiler.rec, nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
|
||||
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
|
||||
return nil
|
||||
}
|
||||
next := make(map[reflect.Type]bool)
|
||||
for vt := range(vtm) {
|
||||
sub, err := pretouchType(vt, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for svt := range(sub) {
|
||||
next[svt] = true
|
||||
}
|
||||
}
|
||||
opts.RecursiveDepth -= 1
|
||||
return pretouchRec(next, opts)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
// +build go1.17,!go1.23
|
||||
// +build go1.17,!go1.24
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -16,7 +16,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`encoding/json`
|
@ -1,4 +1,4 @@
|
||||
// +build go1.17,!go1.23
|
||||
// +build go1.17,!go1.24
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`sync`
|
||||
@ -36,6 +36,7 @@ const (
|
||||
_PtrBytes = _PTR_SIZE / 8
|
||||
_FsmOffset = (_MaxStack + 1) * _PtrBytes
|
||||
_DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes
|
||||
_EpOffset = _DbufOffset + _MaxDigitNums
|
||||
_StackSize = unsafe.Sizeof(_Stack{})
|
||||
)
|
||||
|
||||
@ -53,6 +54,7 @@ type _Stack struct {
|
||||
mm types.StateMachine
|
||||
vp [types.MAX_RECURSE]unsafe.Pointer
|
||||
dp [_MaxDigitNums]byte
|
||||
ep unsafe.Pointer
|
||||
}
|
||||
|
||||
type _Decoder func(
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
@ -39,6 +39,20 @@ func decodeJsonUnmarshaler(vv interface{}, s string) error {
|
||||
return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s))
|
||||
}
|
||||
|
||||
// used to distinguish between MismatchQuoted and other MismatchedTyped errors, see issue #670 and #716
|
||||
type MismatchQuotedError struct {}
|
||||
|
||||
func (*MismatchQuotedError) Error() string {
|
||||
return "mismatch quoted"
|
||||
}
|
||||
|
||||
func decodeJsonUnmarshalerQuoted(vv interface{}, s string) error {
|
||||
if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
|
||||
return &MismatchQuotedError{}
|
||||
}
|
||||
return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s[1:len(s)-1]))
|
||||
}
|
||||
|
||||
func decodeTextUnmarshaler(vv interface{}, s string) error {
|
||||
return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s))
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// +build go1.16,!go1.20
|
||||
// +build go1.17,!go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -16,7 +16,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname growslice runtime.growslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
|
||||
|
||||
//go:linkname makemap_small runtime.makemap_small
|
||||
func makemap_small() unsafe.Pointer
|
||||
|
@ -16,7 +16,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname growslice reflect.growslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
|
||||
|
||||
//go:linkname makemap_small runtime.makemap_small
|
||||
func makemap_small() unsafe.Pointer
|
||||
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`encoding`
|
@ -14,7 +14,7 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
package jitdec
|
||||
|
||||
import (
|
||||
`unsafe`
|
174
vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go
generated
vendored
Normal file
174
vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go
generated
vendored
Normal file
@ -0,0 +1,174 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
caching "github.com/bytedance/sonic/internal/optcaching"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/internal/resolver"
|
||||
)
|
||||
|
||||
const (
|
||||
_MAX_FIELDS = 50 // cutoff at 50 fields struct
|
||||
)
|
||||
|
||||
func (c *compiler) compileIntStringOption(vt reflect.Type) decFunc {
|
||||
switch vt.Size() {
|
||||
case 4:
|
||||
switch vt.Kind() {
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
return &u32StringDecoder{}
|
||||
case reflect.Int:
|
||||
return &i32StringDecoder{}
|
||||
}
|
||||
case 8:
|
||||
switch vt.Kind() {
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
return &u64StringDecoder{}
|
||||
case reflect.Int:
|
||||
return &i64StringDecoder{}
|
||||
}
|
||||
default:
|
||||
panic("not supported pointer size: " + fmt.Sprint(vt.Size()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func isInteger(vt reflect.Type) bool {
|
||||
switch vt.Kind() {
|
||||
case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr, reflect.Int: return true
|
||||
default: return false
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) assertStringOptTypes(vt reflect.Type) {
|
||||
if c.depth > _CompileMaxDepth {
|
||||
panic(*stackOverflow)
|
||||
}
|
||||
|
||||
c.depth += 1
|
||||
defer func () {
|
||||
c.depth -= 1
|
||||
}()
|
||||
|
||||
if isInteger(vt) {
|
||||
return
|
||||
}
|
||||
|
||||
switch vt.Kind() {
|
||||
case reflect.String, reflect.Bool, reflect.Float32, reflect.Float64:
|
||||
return
|
||||
case reflect.Ptr: c.assertStringOptTypes(vt.Elem())
|
||||
default:
|
||||
panicForInvalidStrType(vt)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileFieldStringOption(vt reflect.Type) decFunc {
|
||||
c.assertStringOptTypes(vt)
|
||||
unmDec := c.tryCompilePtrUnmarshaler(vt, true)
|
||||
if unmDec != nil {
|
||||
return unmDec
|
||||
}
|
||||
|
||||
switch vt.Kind() {
|
||||
case reflect.String:
|
||||
if vt == jsonNumberType {
|
||||
return &numberStringDecoder{}
|
||||
}
|
||||
return &strStringDecoder{}
|
||||
case reflect.Bool:
|
||||
return &boolStringDecoder{}
|
||||
case reflect.Int8:
|
||||
return &i8StringDecoder{}
|
||||
case reflect.Int16:
|
||||
return &i16StringDecoder{}
|
||||
case reflect.Int32:
|
||||
return &i32StringDecoder{}
|
||||
case reflect.Int64:
|
||||
return &i64StringDecoder{}
|
||||
case reflect.Uint8:
|
||||
return &u8StringDecoder{}
|
||||
case reflect.Uint16:
|
||||
return &u16StringDecoder{}
|
||||
case reflect.Uint32:
|
||||
return &u32StringDecoder{}
|
||||
case reflect.Uint64:
|
||||
return &u64StringDecoder{}
|
||||
case reflect.Float32:
|
||||
return &f32StringDecoder{}
|
||||
case reflect.Float64:
|
||||
return &f64StringDecoder{}
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
fallthrough
|
||||
case reflect.Int:
|
||||
return c.compileIntStringOption(vt)
|
||||
case reflect.Ptr:
|
||||
return &ptrStrDecoder{
|
||||
typ: rt.UnpackType(vt.Elem()),
|
||||
deref: c.compileFieldStringOption(vt.Elem()),
|
||||
}
|
||||
default:
|
||||
panicForInvalidStrType(vt)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileStruct(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
if c.namedPtr {
|
||||
c.namedPtr = false
|
||||
return c.compileStructBody(vt)
|
||||
}
|
||||
|
||||
if c.depth >= c.opts.MaxInlineDepth + 1 || (c.counts > 0 && vt.NumField() >= _MAX_FIELDS) {
|
||||
return &recuriveDecoder{
|
||||
typ: rt.UnpackType(vt),
|
||||
}
|
||||
} else {
|
||||
return c.compileStructBody(vt)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileStructBody(vt reflect.Type) decFunc {
|
||||
fv := resolver.ResolveStruct(vt)
|
||||
entries := make([]fieldEntry, 0, len(fv))
|
||||
|
||||
for _, f := range fv {
|
||||
var dec decFunc
|
||||
/* dealt with field tag options */
|
||||
if f.Opts&resolver.F_stringize != 0 {
|
||||
dec = c.compileFieldStringOption(f.Type)
|
||||
} else {
|
||||
dec = c.compile(f.Type)
|
||||
}
|
||||
|
||||
/* deal with embedded pointer fields */
|
||||
if f.Path[0].Kind == resolver.F_deref {
|
||||
dec = &embeddedFieldPtrDecoder{
|
||||
field: f,
|
||||
fieldDec: dec,
|
||||
fieldName: f.Name,
|
||||
}
|
||||
}
|
||||
|
||||
entries = append(entries, fieldEntry{
|
||||
FieldMeta: f,
|
||||
fieldDec: dec,
|
||||
})
|
||||
}
|
||||
return &structDecoder{
|
||||
fieldMap: caching.NewFieldLookup(fv),
|
||||
fields: entries,
|
||||
structName: vt.Name(),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
449
vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go
generated
vendored
Normal file
449
vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go
generated
vendored
Normal file
@ -0,0 +1,449 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/bytedance/sonic/option"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/internal/caching"
|
||||
)
|
||||
|
||||
var (
|
||||
programCache = caching.CreateProgramCache()
|
||||
)
|
||||
|
||||
func findOrCompile(vt *rt.GoType) (decFunc, error) {
|
||||
makeDecoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
ret, err := newCompiler().compileType(vt.Pack())
|
||||
return ret, err
|
||||
}
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return val.(decFunc), nil
|
||||
} else if ret, err := programCache.Compute(vt, makeDecoder); err == nil {
|
||||
return ret.(decFunc), nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
type compiler struct {
|
||||
visited map[reflect.Type]bool
|
||||
depth int
|
||||
counts int
|
||||
opts option.CompileOptions
|
||||
namedPtr bool
|
||||
}
|
||||
|
||||
func newCompiler() *compiler {
|
||||
return &compiler{
|
||||
visited: make(map[reflect.Type]bool),
|
||||
opts: option.DefaultCompileOptions(),
|
||||
}
|
||||
}
|
||||
|
||||
func (self *compiler) apply(opts option.CompileOptions) *compiler {
|
||||
self.opts = opts
|
||||
return self
|
||||
}
|
||||
|
||||
const _CompileMaxDepth = 4096
|
||||
|
||||
func (c *compiler) enter(vt reflect.Type) {
|
||||
c.visited[vt] = true
|
||||
c.depth += 1
|
||||
|
||||
if c.depth > _CompileMaxDepth {
|
||||
panic(*stackOverflow)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) exit(vt reflect.Type) {
|
||||
c.visited[vt] = false
|
||||
c.depth -= 1
|
||||
}
|
||||
|
||||
func (c *compiler) compileInt(vt reflect.Type) decFunc {
|
||||
switch vt.Size() {
|
||||
case 4:
|
||||
switch vt.Kind() {
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
return &u32Decoder{}
|
||||
case reflect.Int:
|
||||
return &i32Decoder{}
|
||||
}
|
||||
case 8:
|
||||
switch vt.Kind() {
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
return &u64Decoder{}
|
||||
case reflect.Int:
|
||||
return &i64Decoder{}
|
||||
}
|
||||
default:
|
||||
panic("not supported pointer size: " + fmt.Sprint(vt.Size()))
|
||||
}
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func (c *compiler) rescue(ep *error) {
|
||||
if val := recover(); val != nil {
|
||||
if err, ok := val.(error); ok {
|
||||
*ep = err
|
||||
} else {
|
||||
panic(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileType(vt reflect.Type) (rt decFunc, err error) {
|
||||
defer c.rescue(&err)
|
||||
rt = c.compile(vt)
|
||||
return rt, err
|
||||
}
|
||||
|
||||
func (c *compiler) compile(vt reflect.Type) decFunc {
|
||||
if c.visited[vt] {
|
||||
return &recuriveDecoder{
|
||||
typ: rt.UnpackType(vt),
|
||||
}
|
||||
}
|
||||
|
||||
dec := c.tryCompilePtrUnmarshaler(vt, false)
|
||||
if dec != nil {
|
||||
return dec
|
||||
}
|
||||
|
||||
return c.compileBasic(vt)
|
||||
}
|
||||
|
||||
func (c *compiler) compileBasic(vt reflect.Type) decFunc {
|
||||
defer func() {
|
||||
c.counts += 1
|
||||
}()
|
||||
switch vt.Kind() {
|
||||
case reflect.Bool:
|
||||
return &boolDecoder{}
|
||||
case reflect.Int8:
|
||||
return &i8Decoder{}
|
||||
case reflect.Int16:
|
||||
return &i16Decoder{}
|
||||
case reflect.Int32:
|
||||
return &i32Decoder{}
|
||||
case reflect.Int64:
|
||||
return &i64Decoder{}
|
||||
case reflect.Uint8:
|
||||
return &u8Decoder{}
|
||||
case reflect.Uint16:
|
||||
return &u16Decoder{}
|
||||
case reflect.Uint32:
|
||||
return &u32Decoder{}
|
||||
case reflect.Uint64:
|
||||
return &u64Decoder{}
|
||||
case reflect.Float32:
|
||||
return &f32Decoder{}
|
||||
case reflect.Float64:
|
||||
return &f64Decoder{}
|
||||
case reflect.Uint:
|
||||
fallthrough
|
||||
case reflect.Uintptr:
|
||||
fallthrough
|
||||
case reflect.Int:
|
||||
return c.compileInt(vt)
|
||||
case reflect.String:
|
||||
return c.compileString(vt)
|
||||
case reflect.Array:
|
||||
return c.compileArray(vt)
|
||||
case reflect.Interface:
|
||||
return c.compileInterface(vt)
|
||||
case reflect.Map:
|
||||
return c.compileMap(vt)
|
||||
case reflect.Ptr:
|
||||
return c.compilePtr(vt)
|
||||
case reflect.Slice:
|
||||
return c.compileSlice(vt)
|
||||
case reflect.Struct:
|
||||
return c.compileStruct(vt)
|
||||
default:
|
||||
panic(&json.UnmarshalTypeError{Type: vt})
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compilePtr(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
|
||||
// specail logic for Named Ptr, issue 379
|
||||
if reflect.PtrTo(vt.Elem()) != vt {
|
||||
c.namedPtr = true
|
||||
return &ptrDecoder{
|
||||
typ: rt.UnpackType(vt.Elem()),
|
||||
deref: c.compileBasic(vt.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
return &ptrDecoder{
|
||||
typ: rt.UnpackType(vt.Elem()),
|
||||
deref: c.compile(vt.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileArray(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
return &arrayDecoder{
|
||||
len: vt.Len(),
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileString(vt reflect.Type) decFunc {
|
||||
if vt == jsonNumberType {
|
||||
return &numberDecoder{}
|
||||
}
|
||||
return &stringDecoder{}
|
||||
|
||||
}
|
||||
|
||||
func (c *compiler) tryCompileSliceUnmarshaler(vt reflect.Type) decFunc {
|
||||
pt := reflect.PtrTo(vt.Elem())
|
||||
if pt.Implements(jsonUnmarshalerType) {
|
||||
return &sliceDecoder{
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
|
||||
if pt.Implements(encodingTextUnmarshalerType) {
|
||||
return &sliceDecoder{
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compiler) compileSlice(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
|
||||
// Some common slice, use a decoder, to avoid function calls
|
||||
et := rt.UnpackType(vt.Elem())
|
||||
|
||||
/* first checking `[]byte` */
|
||||
if et.Kind() == reflect.Uint8 /* []byte */ {
|
||||
return c.compileSliceBytes(vt)
|
||||
}
|
||||
|
||||
dec := c.tryCompileSliceUnmarshaler(vt)
|
||||
if dec != nil {
|
||||
return dec
|
||||
}
|
||||
|
||||
if vt == reflect.TypeOf([]interface{}{}) {
|
||||
return &sliceEfaceDecoder{}
|
||||
}
|
||||
if et.IsInt32() {
|
||||
return &sliceI32Decoder{}
|
||||
}
|
||||
if et.IsInt64() {
|
||||
return &sliceI64Decoder{}
|
||||
}
|
||||
if et.IsUint32() {
|
||||
return &sliceU32Decoder{}
|
||||
}
|
||||
if et.IsUint64() {
|
||||
return &sliceU64Decoder{}
|
||||
}
|
||||
if et.Kind() == reflect.String {
|
||||
return &sliceStringDecoder{}
|
||||
}
|
||||
|
||||
return &sliceDecoder{
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileSliceBytes(vt reflect.Type) decFunc {
|
||||
ep := reflect.PtrTo(vt.Elem())
|
||||
|
||||
if ep.Implements(jsonUnmarshalerType) {
|
||||
return &sliceBytesUnmarshalerDecoder{
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
|
||||
if ep.Implements(encodingTextUnmarshalerType) {
|
||||
return &sliceBytesUnmarshalerDecoder{
|
||||
elemType: rt.UnpackType(vt.Elem()),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
typ: vt,
|
||||
}
|
||||
}
|
||||
|
||||
return &sliceBytesDecoder{}
|
||||
}
|
||||
|
||||
func (c *compiler) compileInterface(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
if vt.NumMethod() == 0 {
|
||||
return &efaceDecoder{}
|
||||
}
|
||||
|
||||
if vt.Implements(jsonUnmarshalerType) {
|
||||
return &unmarshalJSONDecoder{
|
||||
typ: rt.UnpackType(vt),
|
||||
}
|
||||
}
|
||||
|
||||
if vt.Implements(encodingTextUnmarshalerType) {
|
||||
return &unmarshalTextDecoder{
|
||||
typ: rt.UnpackType(vt),
|
||||
}
|
||||
}
|
||||
|
||||
return &ifaceDecoder{
|
||||
typ: rt.UnpackType(vt),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compiler) compileMap(vt reflect.Type) decFunc {
|
||||
c.enter(vt)
|
||||
defer c.exit(vt)
|
||||
// check the key unmarshaler at first
|
||||
decKey := tryCompileKeyUnmarshaler(vt)
|
||||
if decKey != nil {
|
||||
return &mapDecoder{
|
||||
mapType: rt.MapType(rt.UnpackType(vt)),
|
||||
keyDec: decKey,
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
// Most common map, use a decoder, to avoid function calls
|
||||
if vt == reflect.TypeOf(map[string]interface{}{}) {
|
||||
return &mapEfaceDecoder{}
|
||||
} else if vt == reflect.TypeOf(map[string]string{}) {
|
||||
return &mapStringDecoder{}
|
||||
}
|
||||
|
||||
// Some common integer map later
|
||||
mt := rt.MapType(rt.UnpackType(vt))
|
||||
|
||||
if mt.Key.Kind() == reflect.String {
|
||||
return &mapStrKeyDecoder{
|
||||
mapType: mt,
|
||||
assign: rt.GetMapStrAssign(vt),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
if mt.Key.IsInt64() {
|
||||
return &mapI64KeyDecoder{
|
||||
mapType: mt,
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
assign: rt.GetMap64Assign(vt),
|
||||
}
|
||||
}
|
||||
|
||||
if mt.Key.IsInt32() {
|
||||
return &mapI32KeyDecoder{
|
||||
mapType: mt,
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
assign: rt.GetMap32Assign(vt),
|
||||
}
|
||||
}
|
||||
|
||||
if mt.Key.IsUint64() {
|
||||
return &mapU64KeyDecoder{
|
||||
mapType: mt,
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
assign: rt.GetMap64Assign(vt),
|
||||
}
|
||||
}
|
||||
|
||||
if mt.Key.IsUint32() {
|
||||
return &mapU32KeyDecoder{
|
||||
mapType: mt,
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
assign: rt.GetMap32Assign(vt),
|
||||
}
|
||||
}
|
||||
|
||||
// Generic map
|
||||
return &mapDecoder{
|
||||
mapType: mt,
|
||||
keyDec: c.compileMapKey(vt),
|
||||
elemDec: c.compile(vt.Elem()),
|
||||
}
|
||||
}
|
||||
|
||||
func tryCompileKeyUnmarshaler(vt reflect.Type) decKey {
|
||||
pt := reflect.PtrTo(vt.Key())
|
||||
|
||||
/* check for `encoding.TextUnmarshaler` with pointer receiver */
|
||||
if pt.Implements(encodingTextUnmarshalerType) {
|
||||
return decodeKeyTextUnmarshaler
|
||||
}
|
||||
|
||||
/* not support map key with `json.Unmarshaler` */
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compiler) compileMapKey(vt reflect.Type) decKey {
|
||||
switch vt.Key().Kind() {
|
||||
case reflect.Int8:
|
||||
return decodeKeyI8
|
||||
case reflect.Int16:
|
||||
return decodeKeyI16
|
||||
case reflect.Uint8:
|
||||
return decodeKeyU8
|
||||
case reflect.Uint16:
|
||||
return decodeKeyU16
|
||||
default:
|
||||
panic(&json.UnmarshalTypeError{Type: vt})
|
||||
}
|
||||
}
|
||||
|
||||
// maybe vt is a named type, and not a pointer receiver, see issue 379
|
||||
func (c *compiler) tryCompilePtrUnmarshaler(vt reflect.Type, strOpt bool) decFunc {
|
||||
pt := reflect.PtrTo(vt)
|
||||
|
||||
/* check for `json.Unmarshaler` with pointer receiver */
|
||||
if pt.Implements(jsonUnmarshalerType) {
|
||||
return &unmarshalJSONDecoder{
|
||||
typ: rt.UnpackType(pt),
|
||||
strOpt: strOpt,
|
||||
}
|
||||
}
|
||||
|
||||
/* check for `encoding.TextMarshaler` with pointer receiver */
|
||||
if pt.Implements(encodingTextUnmarshalerType) {
|
||||
/* TextUnmarshal not support ,strig tag */
|
||||
if strOpt {
|
||||
panicForInvalidStrType(vt)
|
||||
}
|
||||
return &unmarshalTextDecoder{
|
||||
typ: rt.UnpackType(pt),
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func panicForInvalidStrType(vt reflect.Type) {
|
||||
panic(error_type(rt.UnpackType(vt)))
|
||||
}
|
60
vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go
generated
vendored
Normal file
60
vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
package optdec
|
||||
|
||||
import "math"
|
||||
|
||||
/*
|
||||
Copied from sonic-rs
|
||||
// JSON Value Type
|
||||
const NULL: u64 = 0;
|
||||
const BOOL: u64 = 2;
|
||||
const FALSE: u64 = BOOL;
|
||||
const TRUE: u64 = (1 << 3) | BOOL;
|
||||
const NUMBER: u64 = 3;
|
||||
const UINT: u64 = NUMBER;
|
||||
const SINT: u64 = (1 << 3) | NUMBER;
|
||||
const REAL: u64 = (2 << 3) | NUMBER;
|
||||
const RAWNUMBER: u64 = (3 << 3) | NUMBER;
|
||||
const STRING: u64 = 4;
|
||||
const STRING_COMMON: u64 = STRING;
|
||||
const STRING_HASESCAPED: u64 = (1 << 3) | STRING;
|
||||
const OBJECT: u64 = 6;
|
||||
const ARRAY: u64 = 7;
|
||||
|
||||
/// JSON Type Mask
|
||||
const POS_MASK: u64 = (!0) << 32;
|
||||
const POS_BITS: u64 = 32;
|
||||
const TYPE_MASK: u64 = 0xFF;
|
||||
const TYPE_BITS: u64 = 8;
|
||||
|
||||
*/
|
||||
|
||||
const (
|
||||
// BasicType: 3 bits
|
||||
KNull = 0 // xxxxx000
|
||||
KBool = 2 // xxxxx010
|
||||
KNumber = 3 // xxxxx011
|
||||
KString = 4 // xxxxx100
|
||||
KRaw = 5 // xxxxx101
|
||||
KObject = 6 // xxxxx110
|
||||
KArray = 7 // xxxxx111
|
||||
|
||||
// SubType: 2 bits
|
||||
KFalse = (0 << 3) | KBool // xxx00_010, 2
|
||||
KTrue = (1 << 3) | KBool // xxx01_010, 10
|
||||
KUint = (0 << 3) | KNumber // xxx00_011, 3
|
||||
KSint = (1 << 3) | KNumber // xxx01_011, 11
|
||||
KReal = (2 << 3) | KNumber // xxx10_011, 19
|
||||
KRawNumber = (3 << 3) | KNumber // xxx11_011, 27
|
||||
KStringCommon = KString // xxx00_100, 4
|
||||
KStringEscaped = (1 << 3) | KString // xxx01_100, 12
|
||||
)
|
||||
|
||||
const (
|
||||
PosMask = math.MaxUint64 << 32
|
||||
PosBits = 32
|
||||
TypeMask = 0xFF
|
||||
TypeBits = 8
|
||||
|
||||
ConLenMask = uint64(math.MaxUint32)
|
||||
ConLenBits = 32
|
||||
)
|
3
vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go
generated
vendored
Normal file
3
vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
package optdec
|
||||
|
||||
type context = Context
|
160
vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go
generated
vendored
Normal file
160
vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go
generated
vendored
Normal file
@ -0,0 +1,160 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"encoding/json"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/option"
|
||||
"github.com/bytedance/sonic/internal/decoder/errors"
|
||||
"github.com/bytedance/sonic/internal/decoder/consts"
|
||||
)
|
||||
|
||||
|
||||
type (
|
||||
MismatchTypeError = errors.MismatchTypeError
|
||||
SyntaxError = errors.SyntaxError
|
||||
)
|
||||
|
||||
const (
|
||||
_F_allow_control = consts.F_allow_control
|
||||
_F_copy_string = consts.F_copy_string
|
||||
_F_disable_unknown = consts.F_disable_unknown
|
||||
_F_disable_urc = consts.F_disable_urc
|
||||
_F_use_int64 = consts.F_use_int64
|
||||
_F_use_number = consts.F_use_number
|
||||
_F_validate_string = consts.F_validate_string
|
||||
)
|
||||
|
||||
type Options = consts.Options
|
||||
|
||||
const (
|
||||
OptionUseInt64 = consts.OptionUseInt64
|
||||
OptionUseNumber = consts.OptionUseNumber
|
||||
OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors
|
||||
OptionDisableUnknown = consts.OptionDisableUnknown
|
||||
OptionCopyString = consts.OptionCopyString
|
||||
OptionValidateString = consts.OptionValidateString
|
||||
)
|
||||
|
||||
|
||||
func Decode(s *string, i *int, f uint64, val interface{}) error {
|
||||
vv := rt.UnpackEface(val)
|
||||
vp := vv.Value
|
||||
|
||||
/* check for nil type */
|
||||
if vv.Type == nil {
|
||||
return &json.InvalidUnmarshalError{}
|
||||
}
|
||||
|
||||
/* must be a non-nil pointer */
|
||||
if vp == nil || vv.Type.Kind() != reflect.Ptr {
|
||||
return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
|
||||
}
|
||||
|
||||
etp := rt.PtrElem(vv.Type)
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if vv.Type.IsNamed() {
|
||||
newp := vp
|
||||
etp = vv.Type
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
dec, err := findOrCompile(etp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
/* parse into document */
|
||||
ctx, err := NewContext(*s, *i, uint64(f), etp)
|
||||
defer ctx.Delete()
|
||||
if ctx.Parser.Utf8Inv {
|
||||
*s = ctx.Parser.Json
|
||||
}
|
||||
if err != nil {
|
||||
goto fix_error;
|
||||
}
|
||||
err = dec.FromDom(vp, ctx.Root(), &ctx)
|
||||
|
||||
fix_error:
|
||||
err = fix_error(*s, *i, err)
|
||||
|
||||
// update position at last
|
||||
*i += ctx.Parser.Pos()
|
||||
return err
|
||||
}
|
||||
|
||||
func fix_error(json string, pos int, err error) error {
|
||||
if e, ok := err.(SyntaxError); ok {
|
||||
return SyntaxError{
|
||||
Pos: int(e.Pos) + pos,
|
||||
Src: json,
|
||||
Msg: e.Msg,
|
||||
}
|
||||
}
|
||||
|
||||
if e, ok := err.(MismatchTypeError); ok {
|
||||
return &MismatchTypeError {
|
||||
Pos: int(e.Pos) + pos,
|
||||
Src: json,
|
||||
Type: e.Type,
|
||||
}
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
|
||||
// order to reduce the first-hit latency.
|
||||
//
|
||||
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
|
||||
// a compile option to set the depth of recursive compile for the nested struct type.
|
||||
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
||||
cfg := option.DefaultCompileOptions()
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
}
|
||||
return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
|
||||
}
|
||||
|
||||
func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
|
||||
/* compile function */
|
||||
compiler := newCompiler().apply(opts)
|
||||
decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
if f, err := compiler.compileType(_vt); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return f, nil
|
||||
}
|
||||
}
|
||||
|
||||
/* find or compile */
|
||||
vt := rt.UnpackType(_vt)
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return nil, nil
|
||||
} else if _, err := programCache.Compute(vt, decoder); err == nil {
|
||||
return compiler.visited, nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
|
||||
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
|
||||
return nil
|
||||
}
|
||||
next := make(map[reflect.Type]bool)
|
||||
for vt := range(vtm) {
|
||||
sub, err := pretouchType(vt, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for svt := range(sub) {
|
||||
next[svt] = true
|
||||
}
|
||||
}
|
||||
opts.RecursiveDepth -= 1
|
||||
return pretouchRec(next, opts)
|
||||
}
|
73
vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go
generated
vendored
Normal file
73
vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go
generated
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
/** JIT Error Helpers **/
|
||||
|
||||
var stackOverflow = &json.UnsupportedValueError{
|
||||
Str: "Value nesting too deep",
|
||||
Value: reflect.ValueOf("..."),
|
||||
}
|
||||
|
||||
func error_type(vt *rt.GoType) error {
|
||||
return &json.UnmarshalTypeError{Type: vt.Pack()}
|
||||
}
|
||||
|
||||
func error_mismatch(node Node, ctx *context, typ reflect.Type) error {
|
||||
return MismatchTypeError{
|
||||
Pos: node.Position(),
|
||||
Src: ctx.Parser.Json,
|
||||
Type: typ,
|
||||
}
|
||||
}
|
||||
|
||||
func newUnmatched(pos int, vt *rt.GoType) error {
|
||||
return MismatchTypeError{
|
||||
Pos: pos,
|
||||
Src: "",
|
||||
Type: vt.Pack(),
|
||||
}
|
||||
}
|
||||
|
||||
func error_field(name string) error {
|
||||
return errors.New("json: unknown field " + strconv.Quote(name))
|
||||
}
|
||||
|
||||
func error_value(value string, vtype reflect.Type) error {
|
||||
return &json.UnmarshalTypeError{
|
||||
Type: vtype,
|
||||
Value: value,
|
||||
}
|
||||
}
|
||||
|
||||
func error_syntax(pos int, src string, msg string) error {
|
||||
return SyntaxError{
|
||||
Pos: pos,
|
||||
Src: src,
|
||||
Msg: msg,
|
||||
}
|
||||
}
|
||||
|
281
vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go
generated
vendored
Normal file
281
vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go
generated
vendored
Normal file
@ -0,0 +1,281 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/internal/resolver"
|
||||
)
|
||||
|
||||
type decFunc interface {
|
||||
FromDom(vp unsafe.Pointer, node Node, ctx *context) error
|
||||
}
|
||||
|
||||
type ptrDecoder struct {
|
||||
typ *rt.GoType
|
||||
deref decFunc
|
||||
}
|
||||
|
||||
// Pointer Value is allocated in the Caller
|
||||
func (d *ptrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
if *(*unsafe.Pointer)(vp) == nil {
|
||||
*(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true)
|
||||
}
|
||||
|
||||
return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx)
|
||||
}
|
||||
|
||||
type embeddedFieldPtrDecoder struct {
|
||||
field resolver.FieldMeta
|
||||
fieldDec decFunc
|
||||
fieldName string
|
||||
}
|
||||
|
||||
// Pointer Value is allocated in the Caller
|
||||
func (d *embeddedFieldPtrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// seek into the pointer
|
||||
vp = unsafe.Pointer(uintptr(vp) - uintptr(d.field.Path[0].Size))
|
||||
for _, f := range d.field.Path {
|
||||
deref := rt.UnpackType(f.Type)
|
||||
vp = unsafe.Pointer(uintptr(vp) + f.Size)
|
||||
if f.Kind == resolver.F_deref {
|
||||
if *(*unsafe.Pointer)(vp) == nil {
|
||||
*(*unsafe.Pointer)(vp) = rt.Mallocgc(deref.Size, deref, true)
|
||||
}
|
||||
vp = *(*unsafe.Pointer)(vp)
|
||||
}
|
||||
}
|
||||
return d.fieldDec.FromDom(vp, node, ctx)
|
||||
}
|
||||
|
||||
type i8Decoder struct{}
|
||||
|
||||
func (d *i8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsI64(ctx)
|
||||
if !ok || ret > math.MaxInt8 || ret < math.MinInt8 {
|
||||
return error_mismatch(node, ctx, int8Type)
|
||||
}
|
||||
|
||||
*(*int8)(vp) = int8(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i16Decoder struct{}
|
||||
|
||||
func (d *i16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsI64(ctx)
|
||||
if !ok || ret > math.MaxInt16 || ret < math.MinInt16 {
|
||||
return error_mismatch(node, ctx, int16Type)
|
||||
}
|
||||
|
||||
*(*int16)(vp) = int16(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i32Decoder struct{}
|
||||
|
||||
func (d *i32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsI64(ctx)
|
||||
if !ok || ret > math.MaxInt32 || ret < math.MinInt32 {
|
||||
return error_mismatch(node, ctx, int32Type)
|
||||
}
|
||||
|
||||
*(*int32)(vp) = int32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i64Decoder struct{}
|
||||
|
||||
func (d *i64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsI64(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, int64Type)
|
||||
}
|
||||
|
||||
*(*int64)(vp) = int64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u8Decoder struct{}
|
||||
|
||||
func (d *u8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsU64(ctx)
|
||||
if !ok || ret > math.MaxUint8 {
|
||||
err := error_mismatch(node, ctx, uint8Type)
|
||||
return err
|
||||
}
|
||||
|
||||
*(*uint8)(vp) = uint8(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u16Decoder struct{}
|
||||
|
||||
func (d *u16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsU64(ctx)
|
||||
if !ok || ret > math.MaxUint16 {
|
||||
return error_mismatch(node, ctx, uint16Type)
|
||||
}
|
||||
*(*uint16)(vp) = uint16(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u32Decoder struct{}
|
||||
|
||||
func (d *u32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsU64(ctx)
|
||||
if !ok || ret > math.MaxUint32 {
|
||||
return error_mismatch(node, ctx, uint32Type)
|
||||
}
|
||||
|
||||
*(*uint32)(vp) = uint32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u64Decoder struct{}
|
||||
|
||||
func (d *u64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsU64(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, uint64Type)
|
||||
}
|
||||
|
||||
*(*uint64)(vp) = uint64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type f32Decoder struct{}
|
||||
|
||||
func (d *f32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsF64(ctx)
|
||||
if !ok || ret > math.MaxFloat32 || ret < -math.MaxFloat32 {
|
||||
return error_mismatch(node, ctx, float32Type)
|
||||
}
|
||||
|
||||
*(*float32)(vp) = float32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type f64Decoder struct{}
|
||||
|
||||
func (d *f64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsF64(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, float64Type)
|
||||
}
|
||||
|
||||
*(*float64)(vp) = float64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type boolDecoder struct {
|
||||
}
|
||||
|
||||
func (d *boolDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsBool()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, boolType)
|
||||
}
|
||||
|
||||
*(*bool)(vp) = bool(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type stringDecoder struct {
|
||||
}
|
||||
|
||||
func (d *stringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, ok := node.AsStr(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
*(*string)(vp) = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
type numberDecoder struct {
|
||||
}
|
||||
|
||||
func (d *numberDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
num, ok := node.AsNumber(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, jsonNumberType)
|
||||
}
|
||||
*(*json.Number)(vp) = num
|
||||
return nil
|
||||
}
|
||||
|
||||
type recuriveDecoder struct {
|
||||
typ *rt.GoType
|
||||
}
|
||||
|
||||
func (d *recuriveDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
dec, err := findOrCompile(d.typ)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return dec.FromDom(vp, node, ctx)
|
||||
}
|
110
vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go
generated
vendored
Normal file
110
vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go
generated
vendored
Normal file
@ -0,0 +1,110 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
|
||||
"github.com/bytedance/sonic/internal/native"
|
||||
"github.com/bytedance/sonic/internal/utils"
|
||||
"github.com/bytedance/sonic/internal/native/types"
|
||||
)
|
||||
|
||||
|
||||
func SkipNumberFast(json string, start int) (int, bool) {
|
||||
// find the number ending, we pasred in native, it alway valid
|
||||
pos := start
|
||||
for pos < len(json) && json[pos] != ']' && json[pos] != '}' && json[pos] != ',' {
|
||||
if json[pos] >= '0' && json[pos] <= '9' || json[pos] == '.' || json[pos] == '-' || json[pos] == '+' || json[pos] == 'e' || json[pos] == 'E' {
|
||||
pos += 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// if not found number, return false
|
||||
if pos == start {
|
||||
return pos, false
|
||||
}
|
||||
return pos, true
|
||||
}
|
||||
|
||||
|
||||
func isSpace(c byte) bool {
|
||||
return c == ' ' || c == '\t' || c == '\n' || c == '\r'
|
||||
}
|
||||
|
||||
// pos is the start index of the raw
|
||||
func ValidNumberFast(raw string) bool {
|
||||
ret := utils.SkipNumber(raw, 0)
|
||||
if ret < 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// check trainling chars
|
||||
for ret < len(raw) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func SkipOneFast2(json string, pos *int) (int, error) {
|
||||
// find the number ending, we pasred in sonic-cpp, it alway valid
|
||||
start := native.SkipOneFast(&json, pos)
|
||||
if start < 0 {
|
||||
return -1, error_syntax(*pos, json, types.ParsingError(-start).Error())
|
||||
}
|
||||
return start, nil
|
||||
}
|
||||
|
||||
func SkipOneFast(json string, pos int) (string, error) {
|
||||
// find the number ending, we pasred in sonic-cpp, it alway valid
|
||||
start := native.SkipOneFast(&json, &pos)
|
||||
if start < 0 {
|
||||
// TODO: details error code
|
||||
return "", error_syntax(pos, json, types.ParsingError(-start).Error())
|
||||
}
|
||||
return json[start:pos], nil
|
||||
}
|
||||
|
||||
func ParseI64(raw string) (int64, error) {
|
||||
i64, err := strconv.ParseInt(raw, 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return i64, nil
|
||||
}
|
||||
|
||||
func ParseBool(raw string) (bool, error) {
|
||||
var b bool
|
||||
err := json.Unmarshal([]byte(raw), &b)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func ParseU64(raw string) (uint64, error) {
|
||||
u64, err := strconv.ParseUint(raw, 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return u64, nil
|
||||
}
|
||||
|
||||
func ParseF64(raw string) (float64, error) {
|
||||
f64, err := strconv.ParseFloat(raw, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return f64, nil
|
||||
}
|
||||
|
||||
func Unquote(raw string) (string, error) {
|
||||
var u string
|
||||
err := json.Unmarshal([]byte(raw), &u)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return u, nil
|
||||
}
|
169
vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go
generated
vendored
Normal file
169
vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go
generated
vendored
Normal file
@ -0,0 +1,169 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
"reflect"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
type efaceDecoder struct {
|
||||
}
|
||||
|
||||
func (d *efaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*interface{})(vp) = interface{}(nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
eface := *(*rt.GoEface)(vp)
|
||||
|
||||
// not pointer type, or nil pointer, or *interface{}
|
||||
if eface.Value == nil || eface.Type.Kind() != reflect.Ptr || rt.PtrElem(eface.Type) == anyType {
|
||||
ret, err := node.AsEface(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*(*interface{})(vp) = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
etp := rt.PtrElem(eface.Type)
|
||||
vp = eface.Value
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if eface.Type.IsNamed() {
|
||||
newp := vp
|
||||
etp = eface.Type
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
dec, err := findOrCompile(etp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return dec.FromDom(vp, node, ctx)
|
||||
}
|
||||
|
||||
type ifaceDecoder struct {
|
||||
typ *rt.GoType
|
||||
}
|
||||
|
||||
func (d *ifaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
iface := *(*rt.GoIface)(vp)
|
||||
if iface.Itab == nil {
|
||||
return error_type(d.typ)
|
||||
}
|
||||
|
||||
vt := iface.Itab.Vt
|
||||
|
||||
// not pointer type, or nil pointer, or *interface{}
|
||||
if vp == nil || vt.Kind() != reflect.Ptr || rt.PtrElem(vt) == anyType {
|
||||
ret, err := node.AsEface(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*(*interface{})(vp) = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
etp := rt.PtrElem(vt)
|
||||
vp = iface.Value
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if vt.IsNamed() {
|
||||
newp := vp
|
||||
etp = vt
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
dec, err := findOrCompile(etp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return dec.FromDom(vp, node, ctx)
|
||||
}
|
||||
|
||||
type unmarshalTextDecoder struct {
|
||||
typ *rt.GoType
|
||||
}
|
||||
|
||||
func (d *unmarshalTextDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
txt, ok := node.AsStringText(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ.Pack())
|
||||
}
|
||||
|
||||
v := *(*interface{})(unsafe.Pointer(&rt.GoEface{
|
||||
Type: d.typ,
|
||||
Value: vp,
|
||||
}))
|
||||
|
||||
// fast path
|
||||
if u, ok := v.(encoding.TextUnmarshaler); ok {
|
||||
return u.UnmarshalText(txt)
|
||||
}
|
||||
|
||||
// slow path
|
||||
rv := reflect.ValueOf(v)
|
||||
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return u.UnmarshalText(txt)
|
||||
}
|
||||
|
||||
return error_type(d.typ)
|
||||
}
|
||||
|
||||
type unmarshalJSONDecoder struct {
|
||||
typ *rt.GoType
|
||||
strOpt bool
|
||||
}
|
||||
|
||||
func (d *unmarshalJSONDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
v := *(*interface{})(unsafe.Pointer(&rt.GoEface{
|
||||
Type: d.typ,
|
||||
Value: vp,
|
||||
}))
|
||||
|
||||
var input []byte
|
||||
if d.strOpt && node.IsNull() {
|
||||
input = []byte("null")
|
||||
} else if d.strOpt {
|
||||
s, ok := node.AsStringText(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ.Pack())
|
||||
}
|
||||
input = s
|
||||
} else {
|
||||
input = []byte(node.AsRaw(ctx))
|
||||
}
|
||||
|
||||
// fast path
|
||||
if u, ok := v.(json.Unmarshaler); ok {
|
||||
return u.UnmarshalJSON((input))
|
||||
}
|
||||
|
||||
// slow path
|
||||
rv := reflect.ValueOf(v)
|
||||
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||
return u.UnmarshalJSON(input)
|
||||
}
|
||||
|
||||
return error_type(d.typ)
|
||||
}
|
430
vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go
generated
vendored
Normal file
430
vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go
generated
vendored
Normal file
@ -0,0 +1,430 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
/** Decoder for most common map types: map[string]interface{}, map[string]string **/
|
||||
|
||||
type mapEfaceDecoder struct {
|
||||
}
|
||||
|
||||
func (d *mapEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*map[string]interface{})(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsMapEface(ctx, vp)
|
||||
}
|
||||
|
||||
type mapStringDecoder struct {
|
||||
}
|
||||
|
||||
func (d *mapStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*map[string]string)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsMapString(ctx, vp)
|
||||
}
|
||||
|
||||
/** Decoder for map with string key **/
|
||||
|
||||
type mapStrKeyDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
elemDec decFunc
|
||||
assign rt.MapStrAssign
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (d *mapStrKeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
var gerr error
|
||||
next := obj.Children()
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
key, _ := keyn.AsStr(ctx)
|
||||
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
valp := d.assign(d.mapType, m, key)
|
||||
err := d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
||||
|
||||
/** Decoder for map with int32 or int64 key **/
|
||||
|
||||
type mapI32KeyDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
elemDec decFunc
|
||||
assign rt.Map32Assign
|
||||
}
|
||||
|
||||
func (d *mapI32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
next := obj.Children()
|
||||
var gerr error
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
k, ok := keyn.ParseI64(ctx)
|
||||
if !ok || k > math.MaxInt32 || k < math.MinInt32 {
|
||||
if gerr == nil {
|
||||
gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
|
||||
}
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
next = valn.Next()
|
||||
continue
|
||||
}
|
||||
|
||||
key := int32(k)
|
||||
ku32 := *(*uint32)(unsafe.Pointer(&key))
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
valp := d.assign(d.mapType, m, ku32)
|
||||
err := d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
||||
|
||||
type mapI64KeyDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
elemDec decFunc
|
||||
assign rt.Map64Assign
|
||||
}
|
||||
|
||||
func (d *mapI64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
var gerr error
|
||||
next := obj.Children()
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
key, ok := keyn.ParseI64(ctx)
|
||||
|
||||
if !ok {
|
||||
if gerr == nil {
|
||||
gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
|
||||
}
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
next = valn.Next()
|
||||
continue
|
||||
}
|
||||
|
||||
ku64 := *(*uint64)(unsafe.Pointer(&key))
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
valp := d.assign(d.mapType, m, ku64)
|
||||
err := d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
||||
|
||||
/** Decoder for map with unt32 or uint64 key **/
|
||||
|
||||
type mapU32KeyDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
elemDec decFunc
|
||||
assign rt.Map32Assign
|
||||
}
|
||||
|
||||
func (d *mapU32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
var gerr error
|
||||
next := obj.Children()
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
k, ok := keyn.ParseU64(ctx)
|
||||
if !ok || k > math.MaxUint32 {
|
||||
if gerr == nil {
|
||||
gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
|
||||
}
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
next = valn.Next()
|
||||
continue
|
||||
}
|
||||
|
||||
key := uint32(k)
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
valp := d.assign(d.mapType, m, key)
|
||||
err := d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
||||
|
||||
type mapU64KeyDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
elemDec decFunc
|
||||
assign rt.Map64Assign
|
||||
}
|
||||
|
||||
func (d *mapU64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
var gerr error
|
||||
next := obj.Children()
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
key, ok := keyn.ParseU64(ctx)
|
||||
if !ok {
|
||||
if gerr == nil {
|
||||
gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
|
||||
}
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
next = valn.Next()
|
||||
continue
|
||||
}
|
||||
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
valp := d.assign(d.mapType, m, key)
|
||||
err := d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
||||
|
||||
/** Decoder for generic cases */
|
||||
|
||||
type decKey func(dec *mapDecoder, raw string, ctx *context) (interface{}, error)
|
||||
|
||||
func decodeKeyU8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
|
||||
key, err := Unquote(raw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret, err := ParseU64(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret > math.MaxUint8 {
|
||||
return nil, error_value(key, dec.mapType.Key.Pack())
|
||||
}
|
||||
return uint8(ret), nil
|
||||
}
|
||||
|
||||
func decodeKeyU16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
|
||||
key, err := Unquote(raw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret, err := ParseU64(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret > math.MaxUint16 {
|
||||
return nil, error_value(key, dec.mapType.Key.Pack())
|
||||
}
|
||||
return uint16(ret), nil
|
||||
}
|
||||
|
||||
func decodeKeyI8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
|
||||
key, err := Unquote(raw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret, err := ParseI64(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret > math.MaxInt8 || ret < math.MinInt8 {
|
||||
return nil, error_value(key, dec.mapType.Key.Pack())
|
||||
}
|
||||
return int8(ret), nil
|
||||
}
|
||||
|
||||
func decodeKeyI16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
|
||||
key, err := Unquote(raw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret, err := ParseI64(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ret > math.MaxInt16 || ret < math.MinInt16 {
|
||||
return nil, error_value(key, dec.mapType.Key.Pack())
|
||||
}
|
||||
return int16(ret), nil
|
||||
}
|
||||
|
||||
func decodeKeyJSONUnmarshaler(dec *mapDecoder, raw string, _ *context) (interface{}, error) {
|
||||
ret := reflect.New(dec.mapType.Key.Pack()).Interface()
|
||||
err := ret.(json.Unmarshaler).UnmarshalJSON([]byte(raw))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func decodeKeyTextUnmarshaler(dec *mapDecoder, raw string, ctx *context) (interface{}, error) {
|
||||
key, err := Unquote(raw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := reflect.New(dec.mapType.Key.Pack()).Interface()
|
||||
err = ret.(encoding.TextUnmarshaler).UnmarshalText([]byte(key))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
type mapDecoder struct {
|
||||
mapType *rt.GoMapType
|
||||
keyDec decKey
|
||||
elemDec decFunc
|
||||
}
|
||||
|
||||
func (d *mapDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.mapType.Pack())
|
||||
}
|
||||
|
||||
// allocate map
|
||||
m := *(*unsafe.Pointer)(vp)
|
||||
if m == nil {
|
||||
m = rt.Makemap(&d.mapType.GoType, obj.Len())
|
||||
}
|
||||
|
||||
next := obj.Children()
|
||||
var gerr error
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
keyn := NewNode(next)
|
||||
raw := keyn.AsRaw(ctx)
|
||||
key, err := d.keyDec(d, raw, ctx)
|
||||
if err != nil {
|
||||
if gerr == nil {
|
||||
gerr = error_mismatch(keyn, ctx, d.mapType.Pack())
|
||||
}
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
next = valn.Next()
|
||||
continue
|
||||
}
|
||||
|
||||
valn := NewNode(PtrOffset(next, 1))
|
||||
keyp := rt.UnpackEface(key).Value
|
||||
valp := rt.Mapassign(d.mapType, m, keyp)
|
||||
err = d.elemDec.FromDom(valp, valn, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
|
||||
next = valn.Next()
|
||||
}
|
||||
|
||||
*(*unsafe.Pointer)(vp) = m
|
||||
return gerr
|
||||
}
|
269
vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go
generated
vendored
Normal file
269
vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go
generated
vendored
Normal file
@ -0,0 +1,269 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"sync"
|
||||
|
||||
"github.com/bytedance/sonic/internal/native"
|
||||
"github.com/bytedance/sonic/internal/native/types"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/utf8"
|
||||
)
|
||||
|
||||
|
||||
type ErrorCode int
|
||||
|
||||
const (
|
||||
SONIC_OK = 0;
|
||||
SONIC_CONTROL_CHAR = 1;
|
||||
SONIC_INVALID_ESCAPED = 2;
|
||||
SONIC_INVALID_NUM = 3;
|
||||
SONIC_FLOAT_INF = 4;
|
||||
SONIC_EOF = 5;
|
||||
SONIC_INVALID_CHAR = 6;
|
||||
SONIC_EXPECT_KEY = 7;
|
||||
SONIC_EXPECT_COLON = 8;
|
||||
SONIC_EXPECT_OBJ_COMMA_OR_END = 9;
|
||||
SONIC_EXPECT_ARR_COMMA_OR_END = 10;
|
||||
SONIC_VISIT_FAILED = 11;
|
||||
SONIC_INVALID_ESCAPED_UTF = 12;
|
||||
SONIC_INVALID_LITERAL = 13;
|
||||
SONIC_STACK_OVERFLOW = 14;
|
||||
)
|
||||
|
||||
var ParsingErrors = []string{
|
||||
SONIC_OK : "ok",
|
||||
SONIC_CONTROL_CHAR : "control chars in string",
|
||||
SONIC_INVALID_ESCAPED : "invalid escaped chars in string",
|
||||
SONIC_INVALID_NUM : "invalid number",
|
||||
SONIC_FLOAT_INF : "float infinity",
|
||||
SONIC_EOF : "eof",
|
||||
SONIC_INVALID_CHAR : "invalid chars",
|
||||
SONIC_EXPECT_KEY : "expect a json key",
|
||||
SONIC_EXPECT_COLON : "expect a `:`",
|
||||
SONIC_EXPECT_OBJ_COMMA_OR_END : "expect a `,` or `}`",
|
||||
SONIC_EXPECT_ARR_COMMA_OR_END : "expect a `,` or `]`",
|
||||
SONIC_VISIT_FAILED : "failed in json visitor",
|
||||
SONIC_INVALID_ESCAPED_UTF : "invalid escaped unicodes",
|
||||
SONIC_INVALID_LITERAL : "invalid literal(true/false/null)",
|
||||
SONIC_STACK_OVERFLOW : "json is exceeded max depth 4096, cause stack overflow",
|
||||
}
|
||||
|
||||
func (code ErrorCode) Error() string {
|
||||
return ParsingErrors[code]
|
||||
}
|
||||
|
||||
type node struct {
|
||||
typ uint64
|
||||
val uint64
|
||||
}
|
||||
|
||||
// should consitent with native/parser.c
|
||||
type _nospaceBlock struct {
|
||||
_ [8]byte
|
||||
_ [8]byte
|
||||
}
|
||||
|
||||
// should consitent with native/parser.c
|
||||
type nodeBuf struct {
|
||||
ncur uintptr
|
||||
parent int64
|
||||
depth uint64
|
||||
nstart uintptr
|
||||
nend uintptr
|
||||
stat jsonStat
|
||||
}
|
||||
|
||||
func (self *nodeBuf) init(nodes []node) {
|
||||
self.ncur = uintptr(unsafe.Pointer(&nodes[0]))
|
||||
self.nstart = self.ncur
|
||||
self.nend = self.ncur + uintptr(cap(nodes)) * unsafe.Sizeof(node{})
|
||||
self.parent = -1
|
||||
}
|
||||
|
||||
// should consitent with native/parser.c
|
||||
type Parser struct {
|
||||
Json string
|
||||
padded []byte
|
||||
nodes []node
|
||||
dbuf []byte
|
||||
backup []node
|
||||
|
||||
options uint64
|
||||
// JSON cursor
|
||||
start uintptr
|
||||
cur uintptr
|
||||
end uintptr
|
||||
_nbk _nospaceBlock
|
||||
|
||||
// node buffer cursor
|
||||
nbuf nodeBuf
|
||||
Utf8Inv bool
|
||||
isEface bool
|
||||
}
|
||||
|
||||
// only when parse non-empty object/array are needed.
|
||||
type jsonStat struct {
|
||||
object uint32
|
||||
array uint32
|
||||
str uint32
|
||||
number uint32
|
||||
array_elems uint32
|
||||
object_keys uint32
|
||||
max_depth uint32
|
||||
}
|
||||
|
||||
|
||||
var (
|
||||
defaultJsonPaddedCap uintptr = 1 << 20 // 1 Mb
|
||||
defaultNodesCap uintptr = (1 << 20) / unsafe.Sizeof(node{}) // 1 Mb
|
||||
)
|
||||
|
||||
var parsePool sync.Pool = sync.Pool {
|
||||
New: func () interface{} {
|
||||
return &Parser{
|
||||
options: 0,
|
||||
padded: make([]byte, 0, defaultJsonPaddedCap),
|
||||
nodes: make([]node, defaultNodesCap, defaultNodesCap),
|
||||
dbuf: make([]byte, types.MaxDigitNums, types.MaxDigitNums),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var padding string = "x\"x\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
|
||||
|
||||
func newParser(data string, pos int, opt uint64) *Parser {
|
||||
p := parsePool.Get().(*Parser)
|
||||
|
||||
/* validate json if needed */
|
||||
if (opt & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(data){
|
||||
dbuf := utf8.CorrectWith(nil, rt.Str2Mem(data[pos:]), "\ufffd")
|
||||
dbuf = append(dbuf, padding...)
|
||||
p.Json = rt.Mem2Str(dbuf[:len(dbuf) - len(padding)])
|
||||
p.Utf8Inv = true
|
||||
p.start = uintptr((*rt.GoString)(unsafe.Pointer(&p.Json)).Ptr)
|
||||
} else {
|
||||
p.Json = data
|
||||
// TODO: prevent too large JSON
|
||||
p.padded = append(p.padded, data[pos:]...)
|
||||
p.padded = append(p.padded, padding...)
|
||||
p.start = uintptr((*rt.GoSlice)(unsafe.Pointer(&p.padded)).Ptr)
|
||||
}
|
||||
|
||||
p.cur = p.start
|
||||
p.end = p.cur + uintptr(len(p.Json))
|
||||
p.options = opt
|
||||
p.nbuf.init(p.nodes)
|
||||
return p
|
||||
}
|
||||
|
||||
|
||||
func (p *Parser) Pos() int {
|
||||
return int(p.cur - p.start)
|
||||
}
|
||||
|
||||
func (p *Parser) JsonBytes() []byte {
|
||||
if p.Utf8Inv {
|
||||
return (rt.Str2Mem(p.Json))
|
||||
} else {
|
||||
return p.padded
|
||||
}
|
||||
}
|
||||
|
||||
var nodeType = rt.UnpackType(reflect.TypeOf(node{}))
|
||||
|
||||
//go:inline
|
||||
func calMaxNodeCap(jsonSize int) int {
|
||||
return jsonSize / 2 + 2
|
||||
}
|
||||
|
||||
func (p *Parser) parse() ErrorCode {
|
||||
// when decode into struct, we should decode number as possible
|
||||
old := p.options
|
||||
if !p.isEface {
|
||||
p.options &^= 1 << _F_use_number
|
||||
}
|
||||
|
||||
// fast path with limited node buffer
|
||||
err := ErrorCode(native.ParseWithPadding(unsafe.Pointer(p)))
|
||||
if err != SONIC_VISIT_FAILED {
|
||||
p.options = old
|
||||
return err
|
||||
}
|
||||
|
||||
// check OoB here
|
||||
offset := p.nbuf.ncur - p.nbuf.nstart
|
||||
curLen := offset / unsafe.Sizeof(node{})
|
||||
if curLen != uintptr(len(p.nodes)) {
|
||||
panic(fmt.Sprintf("current len: %d, real len: %d cap: %d", curLen, len(p.nodes), cap(p.nodes)))
|
||||
}
|
||||
|
||||
// node buf is not enough, continue parse
|
||||
// the maxCap is always meet all valid JSON
|
||||
maxCap := calMaxNodeCap(len(p.Json))
|
||||
slice := rt.GoSlice{
|
||||
Ptr: rt.Mallocgc(uintptr(maxCap) * nodeType.Size, nodeType, false),
|
||||
Len: maxCap,
|
||||
Cap: maxCap,
|
||||
}
|
||||
rt.Memmove(unsafe.Pointer(slice.Ptr), unsafe.Pointer(&p.nodes[0]), offset)
|
||||
p.backup = p.nodes
|
||||
p.nodes = *(*[]node)(unsafe.Pointer(&slice))
|
||||
|
||||
// update node cursor
|
||||
p.nbuf.nstart = uintptr(unsafe.Pointer(&p.nodes[0]))
|
||||
p.nbuf.nend = p.nbuf.nstart + uintptr(cap(p.nodes)) * unsafe.Sizeof(node{})
|
||||
p.nbuf.ncur = p.nbuf.nstart + offset
|
||||
|
||||
// continue parse json
|
||||
err = ErrorCode(native.ParseWithPadding(unsafe.Pointer(p)))
|
||||
p.options = old
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Parser) reset() {
|
||||
p.options = 0
|
||||
p.padded = p.padded[:0]
|
||||
// nodes is too large here, we will not reset it and use small backup nodes buffer
|
||||
if p.backup != nil {
|
||||
p.nodes = p.backup
|
||||
p.backup = nil
|
||||
}
|
||||
p.start = 0
|
||||
p.cur = 0
|
||||
p.end = 0
|
||||
p.Json = ""
|
||||
p.nbuf = nodeBuf{}
|
||||
p._nbk = _nospaceBlock{}
|
||||
p.Utf8Inv = false
|
||||
p.isEface = false
|
||||
}
|
||||
|
||||
func (p *Parser) free() {
|
||||
p.reset()
|
||||
parsePool.Put(p)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func (p *Parser) fixError(code ErrorCode) error {
|
||||
if code == SONIC_OK {
|
||||
return nil
|
||||
}
|
||||
|
||||
if p.Pos() == 0 {
|
||||
code = SONIC_EOF;
|
||||
}
|
||||
|
||||
pos := p.Pos() - 1
|
||||
return error_syntax(pos, p.Json, ParsingErrors[code])
|
||||
}
|
||||
|
||||
func Parse(data string, opt uint64) error {
|
||||
p := newParser(data, 0, opt)
|
||||
err := p.parse()
|
||||
p.free()
|
||||
return err
|
||||
}
|
1278
vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go
generated
vendored
Normal file
1278
vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
224
vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go
generated
vendored
Normal file
224
vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go
generated
vendored
Normal file
@ -0,0 +1,224 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
type sliceDecoder struct {
|
||||
elemType *rt.GoType
|
||||
elemDec decFunc
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
var (
|
||||
emptyPtr = &struct{}{}
|
||||
)
|
||||
|
||||
func (d *sliceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
arr, ok := node.AsArr()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ)
|
||||
}
|
||||
|
||||
slice := rt.MakeSlice(vp, d.elemType, arr.Len())
|
||||
elems := slice.Ptr
|
||||
next := arr.Children()
|
||||
|
||||
var gerr error
|
||||
for i := 0; i < arr.Len(); i++ {
|
||||
val := NewNode(next)
|
||||
elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size)
|
||||
err := d.elemDec.FromDom(elem, val, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = val.Next()
|
||||
}
|
||||
|
||||
*(*rt.GoSlice)(vp) = *slice
|
||||
return gerr
|
||||
}
|
||||
|
||||
type arrayDecoder struct {
|
||||
len int
|
||||
elemType *rt.GoType
|
||||
elemDec decFunc
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
//go:nocheckptr
|
||||
func (d *arrayDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
arr, ok := node.AsArr()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ)
|
||||
}
|
||||
|
||||
next := arr.Children()
|
||||
i := 0
|
||||
|
||||
var gerr error
|
||||
for ; i < d.len && i < arr.Len(); i++ {
|
||||
elem := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size)
|
||||
val := NewNode(next)
|
||||
err := d.elemDec.FromDom(elem, val, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = val.Next()
|
||||
}
|
||||
|
||||
/* zero rest of array */
|
||||
ptr := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size)
|
||||
n := uintptr(d.len-i) * d.elemType.Size
|
||||
rt.ClearMemory(d.elemType, ptr, n)
|
||||
return gerr
|
||||
}
|
||||
|
||||
type sliceEfaceDecoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceEface(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceI32Decoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceI32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceI32(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceI64Decoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceI64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceI64(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceU32Decoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceU32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceU32(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceU64Decoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceU64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceU64(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceStringDecoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.AsSliceString(ctx, vp)
|
||||
}
|
||||
|
||||
type sliceBytesDecoder struct {
|
||||
}
|
||||
|
||||
func (d *sliceBytesDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
s, err := node.AsSliceBytes(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*(*[]byte)(vp) = s
|
||||
return nil
|
||||
}
|
||||
|
||||
type sliceBytesUnmarshalerDecoder struct {
|
||||
elemType *rt.GoType
|
||||
elemDec decFunc
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (d *sliceBytesUnmarshalerDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*rt.GoSlice)(vp) = rt.GoSlice{}
|
||||
return nil
|
||||
}
|
||||
|
||||
/* parse JSON string into `[]byte` */
|
||||
if node.IsStr() {
|
||||
slice, err := node.AsSliceBytes(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*(*[]byte)(vp) = slice
|
||||
return nil
|
||||
}
|
||||
|
||||
/* parse JSON array into `[]byte` */
|
||||
arr, ok := node.AsArr()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ)
|
||||
}
|
||||
|
||||
slice := rt.MakeSlice(vp, d.elemType, arr.Len())
|
||||
elems := slice.Ptr
|
||||
|
||||
var gerr error
|
||||
next := arr.Children()
|
||||
for i := 0; i < arr.Len(); i++ {
|
||||
child := NewNode(next)
|
||||
elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size)
|
||||
err := d.elemDec.FromDom(elem, child, ctx)
|
||||
if gerr == nil && err != nil {
|
||||
gerr = err
|
||||
}
|
||||
next = child.Next()
|
||||
}
|
||||
|
||||
*(*rt.GoSlice)(vp) = *slice
|
||||
return gerr
|
||||
}
|
360
vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go
generated
vendored
Normal file
360
vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go
generated
vendored
Normal file
@ -0,0 +1,360 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
type ptrStrDecoder struct {
|
||||
typ *rt.GoType
|
||||
deref decFunc
|
||||
}
|
||||
|
||||
// Pointer Value is allocated in the Caller
|
||||
func (d *ptrStrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
*(*unsafe.Pointer)(vp) = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
if *(*unsafe.Pointer)(vp) == nil {
|
||||
*(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true)
|
||||
}
|
||||
|
||||
return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx)
|
||||
}
|
||||
|
||||
type boolStringDecoder struct {
|
||||
}
|
||||
|
||||
func (d *boolStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
b, err := ParseBool(s)
|
||||
if err != nil {
|
||||
return error_mismatch(node, ctx, boolType)
|
||||
}
|
||||
|
||||
*(*bool)(vp) = b
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseI64(node Node, ctx *context) (int64, error, bool) {
|
||||
if node.IsNull() {
|
||||
return 0, nil, true
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return 0, error_mismatch(node, ctx, stringType), false
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return 0, nil, true
|
||||
}
|
||||
|
||||
ret, err := ParseI64(s)
|
||||
return ret, err, false
|
||||
}
|
||||
|
||||
type i8StringDecoder struct{}
|
||||
|
||||
func (d *i8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseI64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxInt8 || ret < math.MinInt8 {
|
||||
return error_mismatch(node, ctx, int8Type)
|
||||
}
|
||||
|
||||
*(*int8)(vp) = int8(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i16StringDecoder struct{}
|
||||
|
||||
func (d *i16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseI64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxInt16 || ret < math.MinInt16 {
|
||||
return error_mismatch(node, ctx, int16Type)
|
||||
}
|
||||
|
||||
*(*int16)(vp) = int16(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i32StringDecoder struct{}
|
||||
|
||||
func (d *i32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseI64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxInt32 || ret < math.MinInt32 {
|
||||
return error_mismatch(node, ctx, int32Type)
|
||||
}
|
||||
|
||||
*(*int32)(vp) = int32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type i64StringDecoder struct{}
|
||||
|
||||
func (d *i64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseI64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*(*int64)(vp) = int64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseU64(node Node, ctx *context) (uint64, error, bool) {
|
||||
if node.IsNull() {
|
||||
return 0, nil, true
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return 0, error_mismatch(node, ctx, stringType), false
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return 0, nil, true
|
||||
}
|
||||
|
||||
ret, err := ParseU64(s)
|
||||
return ret, err, false
|
||||
}
|
||||
|
||||
type u8StringDecoder struct{}
|
||||
|
||||
func (d *u8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseU64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxUint8 {
|
||||
return error_mismatch(node, ctx, uint8Type)
|
||||
}
|
||||
|
||||
*(*uint8)(vp) = uint8(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u16StringDecoder struct{}
|
||||
|
||||
func (d *u16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseU64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxUint16 {
|
||||
return error_mismatch(node, ctx, uint16Type)
|
||||
}
|
||||
|
||||
*(*uint16)(vp) = uint16(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type u32StringDecoder struct{}
|
||||
|
||||
func (d *u32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseU64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ret > math.MaxUint32 {
|
||||
return error_mismatch(node, ctx, uint32Type)
|
||||
}
|
||||
|
||||
*(*uint32)(vp) = uint32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
type u64StringDecoder struct{}
|
||||
|
||||
func (d *u64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
ret, err, null := parseU64(node, ctx)
|
||||
if null {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*(*uint64)(vp) = uint64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type f32StringDecoder struct{}
|
||||
|
||||
func (d *f32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, err := ParseF64(s)
|
||||
if err != nil || ret > math.MaxFloat32 || ret < -math.MaxFloat32 {
|
||||
return error_mismatch(node, ctx, float32Type)
|
||||
}
|
||||
|
||||
*(*float32)(vp) = float32(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
type f64StringDecoder struct{}
|
||||
|
||||
func (d *f64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
ret, err := ParseF64(s)
|
||||
if err != nil {
|
||||
return error_mismatch(node, ctx, float64Type)
|
||||
}
|
||||
|
||||
*(*float64)(vp) = float64(ret)
|
||||
return nil
|
||||
}
|
||||
|
||||
/* parse string field with string options */
|
||||
type strStringDecoder struct{}
|
||||
|
||||
func (d *strStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, err := Unquote(s)
|
||||
if err != nil {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
*(*string)(vp) = s
|
||||
return nil
|
||||
}
|
||||
|
||||
type numberStringDecoder struct{}
|
||||
|
||||
func (d *numberStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
s, ok := node.AsStrRef(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, stringType)
|
||||
}
|
||||
|
||||
if s == "null" {
|
||||
return nil
|
||||
}
|
||||
|
||||
num, ok := node.ParseNumber(ctx)
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, jsonNumberType)
|
||||
}
|
||||
|
||||
end, ok := SkipNumberFast(s, 0)
|
||||
// has error or trailing chars
|
||||
if !ok || end != len(s) {
|
||||
return error_mismatch(node, ctx, jsonNumberType)
|
||||
}
|
||||
|
||||
*(*json.Number)(vp) = json.Number(num)
|
||||
return nil
|
||||
}
|
61
vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go
generated
vendored
Normal file
61
vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
caching "github.com/bytedance/sonic/internal/optcaching"
|
||||
"github.com/bytedance/sonic/internal/resolver"
|
||||
)
|
||||
|
||||
type fieldEntry struct {
|
||||
resolver.FieldMeta
|
||||
fieldDec decFunc
|
||||
}
|
||||
|
||||
type structDecoder struct {
|
||||
fieldMap caching.FieldLookup
|
||||
fields []fieldEntry
|
||||
structName string
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (d *structDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error {
|
||||
if node.IsNull() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var gerr error
|
||||
obj, ok := node.AsObj()
|
||||
if !ok {
|
||||
return error_mismatch(node, ctx, d.typ)
|
||||
}
|
||||
|
||||
next := obj.Children()
|
||||
for i := 0; i < obj.Len(); i++ {
|
||||
key, _ := NewNode(next).AsStrRef(ctx)
|
||||
val := NewNode(PtrOffset(next, 1))
|
||||
next = val.Next()
|
||||
|
||||
// find field idx
|
||||
idx := d.fieldMap.Get(key)
|
||||
if idx == -1 {
|
||||
if Options(ctx.Options())&OptionDisableUnknown != 0 {
|
||||
return error_field(key)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
offset := d.fields[idx].Path[0].Size
|
||||
elem := unsafe.Pointer(uintptr(vp) + offset)
|
||||
err := d.fields[idx].fieldDec.FromDom(elem, val, ctx)
|
||||
|
||||
// deal with mismatch type errors
|
||||
if gerr == nil && err != nil {
|
||||
// TODO: better error info
|
||||
gerr = err
|
||||
}
|
||||
}
|
||||
return gerr
|
||||
}
|
||||
|
60
vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go
generated
vendored
Normal file
60
vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go
generated
vendored
Normal file
@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package optdec
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
)
|
||||
|
||||
var (
|
||||
boolType = reflect.TypeOf(bool(false))
|
||||
byteType = reflect.TypeOf(byte(0))
|
||||
intType = reflect.TypeOf(int(0))
|
||||
int8Type = reflect.TypeOf(int8(0))
|
||||
int16Type = reflect.TypeOf(int16(0))
|
||||
int32Type = reflect.TypeOf(int32(0))
|
||||
int64Type = reflect.TypeOf(int64(0))
|
||||
uintType = reflect.TypeOf(uint(0))
|
||||
uint8Type = reflect.TypeOf(uint8(0))
|
||||
uint16Type = reflect.TypeOf(uint16(0))
|
||||
uint32Type = reflect.TypeOf(uint32(0))
|
||||
uint64Type = reflect.TypeOf(uint64(0))
|
||||
float32Type = reflect.TypeOf(float32(0))
|
||||
float64Type = reflect.TypeOf(float64(0))
|
||||
stringType = reflect.TypeOf("")
|
||||
bytesType = reflect.TypeOf([]byte(nil))
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0))
|
||||
anyType = rt.UnpackType(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
)
|
||||
|
||||
var (
|
||||
errorType = reflect.TypeOf((*error)(nil)).Elem()
|
||||
jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
|
||||
encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
)
|
||||
|
||||
func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) {
|
||||
p := (*rt.GoIface)(unsafe.Pointer(&t))
|
||||
return p.Itab, (*rt.GoType)(p.Value)
|
||||
}
|
Reference in New Issue
Block a user