mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[chore]: Bump github.com/gin-gonic/gin from 1.9.0 to 1.9.1 (#1855)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
0
vendor/github.com/bytedance/sonic/internal/decoder/asm.s
generated
vendored
Normal file
0
vendor/github.com/bytedance/sonic/internal/decoder/asm.s
generated
vendored
Normal file
2013
vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go116.go
generated
vendored
Normal file
2013
vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go116.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1992
vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go117.go
generated
vendored
Normal file
1992
vendor/github.com/bytedance/sonic/internal/decoder/assembler_amd64_go117.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1155
vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
generated
vendored
Normal file
1155
vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
70
vendor/github.com/bytedance/sonic/internal/decoder/debug.go
generated
vendored
Normal file
70
vendor/github.com/bytedance/sonic/internal/decoder/debug.go
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`os`
|
||||
`runtime`
|
||||
`runtime/debug`
|
||||
`strings`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
)
|
||||
|
||||
|
||||
var (
|
||||
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
|
||||
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
|
||||
)
|
||||
|
||||
var (
|
||||
_Instr_End _Instr = newInsOp(_OP_nil_1)
|
||||
|
||||
_F_gc = jit.Func(runtime.GC)
|
||||
_F_force_gc = jit.Func(debug.FreeOSMemory)
|
||||
_F_println = jit.Func(println_wrapper)
|
||||
_F_print = jit.Func(print)
|
||||
)
|
||||
|
||||
func println_wrapper(i int, op1 int, op2 int){
|
||||
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
|
||||
}
|
||||
|
||||
func print(i int){
|
||||
println(i)
|
||||
}
|
||||
|
||||
func (self *_Assembler) force_gc() {
|
||||
self.call_go(_F_gc)
|
||||
self.call_go(_F_force_gc)
|
||||
}
|
||||
|
||||
func (self *_Assembler) debug_instr(i int, v *_Instr) {
|
||||
if debugSyncGC {
|
||||
if (i+1 == len(self.p)) {
|
||||
self.print_gc(i, v, &_Instr_End)
|
||||
} else {
|
||||
next := &(self.p[i+1])
|
||||
self.print_gc(i, v, next)
|
||||
name := _OpNames[next.op()]
|
||||
if strings.Contains(name, "save") {
|
||||
return
|
||||
}
|
||||
}
|
||||
self.force_gc()
|
||||
}
|
||||
}
|
255
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
Normal file
255
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
Normal file
@ -0,0 +1,255 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`encoding/json`
|
||||
`reflect`
|
||||
`runtime`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/bytedance/sonic/option`
|
||||
`github.com/bytedance/sonic/utf8`
|
||||
)
|
||||
|
||||
const (
|
||||
_F_use_int64 = iota
|
||||
_F_use_number
|
||||
_F_disable_urc
|
||||
_F_disable_unknown
|
||||
_F_copy_string
|
||||
_F_validate_string
|
||||
|
||||
_F_allow_control = 31
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
|
||||
const (
|
||||
OptionUseInt64 Options = 1 << _F_use_int64
|
||||
OptionUseNumber Options = 1 << _F_use_number
|
||||
OptionUseUnicodeErrors Options = 1 << _F_disable_urc
|
||||
OptionDisableUnknown Options = 1 << _F_disable_unknown
|
||||
OptionCopyString Options = 1 << _F_copy_string
|
||||
OptionValidateString Options = 1 << _F_validate_string
|
||||
)
|
||||
|
||||
func (self *Decoder) SetOptions(opts Options) {
|
||||
if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
|
||||
panic("can't set OptionUseInt64 and OptionUseNumber both!")
|
||||
}
|
||||
self.f = uint64(opts)
|
||||
}
|
||||
|
||||
|
||||
// Decoder is the decoder context object
|
||||
type Decoder struct {
|
||||
i int
|
||||
f uint64
|
||||
s string
|
||||
}
|
||||
|
||||
// NewDecoder creates a new decoder instance.
|
||||
func NewDecoder(s string) *Decoder {
|
||||
return &Decoder{s: s}
|
||||
}
|
||||
|
||||
// Pos returns the current decoding position.
|
||||
func (self *Decoder) Pos() int {
|
||||
return self.i
|
||||
}
|
||||
|
||||
func (self *Decoder) Reset(s string) {
|
||||
self.s = s
|
||||
self.i = 0
|
||||
// self.f = 0
|
||||
}
|
||||
|
||||
func (self *Decoder) CheckTrailings() error {
|
||||
pos := self.i
|
||||
buf := self.s
|
||||
/* skip all the trailing spaces */
|
||||
if pos != len(buf) {
|
||||
for pos < len(buf) && (types.SPACE_MASK & (1 << buf[pos])) != 0 {
|
||||
pos++
|
||||
}
|
||||
}
|
||||
|
||||
/* then it must be at EOF */
|
||||
if pos == len(buf) {
|
||||
return nil
|
||||
}
|
||||
|
||||
/* junk after JSON value */
|
||||
return SyntaxError {
|
||||
Src : buf,
|
||||
Pos : pos,
|
||||
Code : types.ERR_INVALID_CHAR,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Decode parses the JSON-encoded data from current position and stores the result
|
||||
// in the value pointed to by val.
|
||||
func (self *Decoder) Decode(val interface{}) error {
|
||||
/* validate json if needed */
|
||||
if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){
|
||||
dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd")
|
||||
self.s = rt.Mem2Str(dbuf)
|
||||
}
|
||||
|
||||
vv := rt.UnpackEface(val)
|
||||
vp := vv.Value
|
||||
|
||||
/* check for nil type */
|
||||
if vv.Type == nil {
|
||||
return &json.InvalidUnmarshalError{}
|
||||
}
|
||||
|
||||
/* must be a non-nil pointer */
|
||||
if vp == nil || vv.Type.Kind() != reflect.Ptr {
|
||||
return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
|
||||
}
|
||||
|
||||
etp := rt.PtrElem(vv.Type)
|
||||
|
||||
/* check the defined pointer type for issue 379 */
|
||||
if vv.Type.IsNamed() {
|
||||
newp := vp
|
||||
etp = vv.Type
|
||||
vp = unsafe.Pointer(&newp)
|
||||
}
|
||||
|
||||
/* create a new stack, and call the decoder */
|
||||
sb := newStack()
|
||||
nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f)
|
||||
/* return the stack back */
|
||||
self.i = nb
|
||||
freeStack(sb)
|
||||
|
||||
/* avoid GC ahead */
|
||||
runtime.KeepAlive(vv)
|
||||
return err
|
||||
}
|
||||
|
||||
// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
|
||||
// int64 instead of as a float64.
|
||||
func (self *Decoder) UseInt64() {
|
||||
self.f |= 1 << _F_use_int64
|
||||
self.f &^= 1 << _F_use_number
|
||||
}
|
||||
|
||||
// UseNumber indicates the Decoder to unmarshal a number into an interface{} as a
|
||||
// json.Number instead of as a float64.
|
||||
func (self *Decoder) UseNumber() {
|
||||
self.f &^= 1 << _F_use_int64
|
||||
self.f |= 1 << _F_use_number
|
||||
}
|
||||
|
||||
// UseUnicodeErrors indicates the Decoder to return an error when encounter invalid
|
||||
// UTF-8 escape sequences.
|
||||
func (self *Decoder) UseUnicodeErrors() {
|
||||
self.f |= 1 << _F_disable_urc
|
||||
}
|
||||
|
||||
// DisallowUnknownFields indicates the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (self *Decoder) DisallowUnknownFields() {
|
||||
self.f |= 1 << _F_disable_unknown
|
||||
}
|
||||
|
||||
// CopyString indicates the Decoder to decode string values by copying instead of referring.
|
||||
func (self *Decoder) CopyString() {
|
||||
self.f |= 1 << _F_copy_string
|
||||
}
|
||||
|
||||
// ValidateString causes the Decoder to validate string values when decoding string value
|
||||
// in JSON. Validation is that, returning error when unescaped control chars(0x00-0x1f) or
|
||||
// invalid UTF-8 chars in the string value of JSON.
|
||||
func (self *Decoder) ValidateString() {
|
||||
self.f |= 1 << _F_validate_string
|
||||
}
|
||||
|
||||
// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
|
||||
// order to reduce the first-hit latency.
|
||||
//
|
||||
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
|
||||
// a compile option to set the depth of recursive compile for the nested struct type.
|
||||
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
||||
cfg := option.DefaultCompileOptions()
|
||||
for _, opt := range opts {
|
||||
opt(&cfg)
|
||||
}
|
||||
return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
|
||||
}
|
||||
|
||||
func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
|
||||
/* compile function */
|
||||
compiler := newCompiler().apply(opts)
|
||||
decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
if pp, err := compiler.compile(_vt); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
as := newAssembler(pp)
|
||||
as.name = _vt.String()
|
||||
return as.Load(), nil
|
||||
}
|
||||
}
|
||||
|
||||
/* find or compile */
|
||||
vt := rt.UnpackType(_vt)
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return nil, nil
|
||||
} else if _, err := programCache.Compute(vt, decoder); err == nil {
|
||||
return compiler.rec, nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
|
||||
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
|
||||
return nil
|
||||
}
|
||||
next := make(map[reflect.Type]bool)
|
||||
for vt := range(vtm) {
|
||||
sub, err := pretouchType(vt, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for svt := range(sub) {
|
||||
next[svt] = true
|
||||
}
|
||||
}
|
||||
opts.RecursiveDepth -= 1
|
||||
return pretouchRec(next, opts)
|
||||
}
|
||||
|
||||
// Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
|
||||
// Otherwise, returns negative error code using start and invalid character position using end
|
||||
func Skip(data []byte) (start int, end int) {
|
||||
s := rt.Mem2Str(data)
|
||||
p := 0
|
||||
m := types.NewStateMachine()
|
||||
ret := native.SkipOne(&s, &p, m, uint64(0))
|
||||
types.FreeStateMachine(m)
|
||||
return ret, p
|
||||
}
|
181
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
Normal file
181
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
Normal file
@ -0,0 +1,181 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`errors`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
`strings`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
type SyntaxError struct {
|
||||
Pos int
|
||||
Src string
|
||||
Code types.ParsingError
|
||||
Msg string
|
||||
}
|
||||
|
||||
func (self SyntaxError) Error() string {
|
||||
return fmt.Sprintf("%q", self.Description())
|
||||
}
|
||||
|
||||
func (self SyntaxError) Description() string {
|
||||
return "Syntax error " + self.description()
|
||||
}
|
||||
|
||||
func (self SyntaxError) description() string {
|
||||
i := 16
|
||||
p := self.Pos - i
|
||||
q := self.Pos + i
|
||||
|
||||
/* check for empty source */
|
||||
if self.Src == "" {
|
||||
return fmt.Sprintf("no sources available: %#v", self)
|
||||
}
|
||||
|
||||
/* prevent slicing before the beginning */
|
||||
if p < 0 {
|
||||
p, q, i = 0, q - p, i + p
|
||||
}
|
||||
|
||||
/* prevent slicing beyond the end */
|
||||
if n := len(self.Src); q > n {
|
||||
n = q - n
|
||||
q = len(self.Src)
|
||||
|
||||
/* move the left bound if possible */
|
||||
if p > n {
|
||||
i += n
|
||||
p -= n
|
||||
}
|
||||
}
|
||||
|
||||
/* left and right length */
|
||||
x := clamp_zero(i)
|
||||
y := clamp_zero(q - p - i - 1)
|
||||
|
||||
/* compose the error description */
|
||||
return fmt.Sprintf(
|
||||
"at index %d: %s\n\n\t%s\n\t%s^%s\n",
|
||||
self.Pos,
|
||||
self.Message(),
|
||||
self.Src[p:q],
|
||||
strings.Repeat(".", x),
|
||||
strings.Repeat(".", y),
|
||||
)
|
||||
}
|
||||
|
||||
func (self SyntaxError) Message() string {
|
||||
if self.Msg == "" {
|
||||
return self.Code.Message()
|
||||
}
|
||||
return self.Msg
|
||||
}
|
||||
|
||||
func clamp_zero(v int) int {
|
||||
if v < 0 {
|
||||
return 0
|
||||
} else {
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
/** JIT Error Helpers **/
|
||||
|
||||
var stackOverflow = &json.UnsupportedValueError {
|
||||
Str : "Value nesting too deep",
|
||||
Value : reflect.ValueOf("..."),
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_wrap(src string, pos int, code types.ParsingError) error {
|
||||
return SyntaxError {
|
||||
Pos : pos,
|
||||
Src : src,
|
||||
Code : code,
|
||||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_type(vt *rt.GoType) error {
|
||||
return &json.UnmarshalTypeError{Type: vt.Pack()}
|
||||
}
|
||||
|
||||
type MismatchTypeError struct {
|
||||
Pos int
|
||||
Src string
|
||||
Type reflect.Type
|
||||
}
|
||||
|
||||
func swithchJSONType (src string, pos int) string {
|
||||
var val string
|
||||
switch src[pos] {
|
||||
case 'f': fallthrough
|
||||
case 't': val = "bool"
|
||||
case '"': val = "string"
|
||||
case '{': val = "object"
|
||||
case '[': val = "array"
|
||||
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': val = "number"
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (self MismatchTypeError) Error() string {
|
||||
se := SyntaxError {
|
||||
Pos : self.Pos,
|
||||
Src : self.Src,
|
||||
Code : types.ERR_MISMATCH,
|
||||
}
|
||||
return fmt.Sprintf("Mismatch type %s with value %s %q", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
|
||||
}
|
||||
|
||||
func (self MismatchTypeError) Description() string {
|
||||
se := SyntaxError {
|
||||
Pos : self.Pos,
|
||||
Src : self.Src,
|
||||
Code : types.ERR_MISMATCH,
|
||||
}
|
||||
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
return &MismatchTypeError {
|
||||
Pos : pos,
|
||||
Src : src,
|
||||
Type : vt.Pack(),
|
||||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_field(name string) error {
|
||||
return errors.New("json: unknown field " + strconv.Quote(name))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_value(value string, vtype reflect.Type) error {
|
||||
return &json.UnmarshalTypeError {
|
||||
Type : vtype,
|
||||
Value : value,
|
||||
}
|
||||
}
|
776
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go116.go
generated
vendored
Normal file
776
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go116.go
generated
vendored
Normal file
@ -0,0 +1,776 @@
|
||||
// +build go1.15,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
*
|
||||
* ST(BX) : ro, decoder stack
|
||||
* DF(R10) : ro, decoder flags
|
||||
* EP(R11) : wo, error pointer
|
||||
* IP(R12) : ro, input pointer
|
||||
* IL(R13) : ro, input length
|
||||
* IC(R14) : rw, input cursor
|
||||
* VP(R15) : ro, value pointer (to an interface{})
|
||||
*/
|
||||
|
||||
const (
|
||||
_VD_args = 8 // 8 bytes for passing arguments to this functions
|
||||
_VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
|
||||
_VD_saves = 40 // 40 bytes for saving the registers before CALL instructions
|
||||
_VD_locals = 88 // 88 bytes for local variables
|
||||
)
|
||||
|
||||
const (
|
||||
_VD_offs = _VD_fargs + _VD_saves + _VD_locals
|
||||
_VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss = _VAR_ss_Vt
|
||||
_VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
|
||||
_VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
|
||||
_VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
|
||||
_VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
|
||||
_VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
|
||||
_VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
|
||||
_VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
|
||||
_VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
|
||||
_VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
|
||||
)
|
||||
|
||||
type _ValueDecoder struct {
|
||||
jit.BaseAssembler
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) build() uintptr {
|
||||
self.Init(self.compile)
|
||||
return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
|
||||
}
|
||||
|
||||
/** Function Calling Helpers **/
|
||||
|
||||
func (self *_ValueDecoder) save(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to save")
|
||||
} else {
|
||||
self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) load(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to load")
|
||||
} else {
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call(fn obj.Addr) {
|
||||
self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _AX) // CALL AX
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call_go(fn obj.Addr) {
|
||||
self.save(_REG_go...) // SAVE $REG_go
|
||||
self.call(fn) // CALL ${fn}
|
||||
self.load(_REG_go...) // LOAD $REG_go
|
||||
}
|
||||
|
||||
/** Decoder Assembler **/
|
||||
|
||||
const (
|
||||
_S_val = iota + 1
|
||||
_S_arr
|
||||
_S_arr_0
|
||||
_S_obj
|
||||
_S_obj_0
|
||||
_S_obj_delim
|
||||
_S_obj_sep
|
||||
)
|
||||
|
||||
const (
|
||||
_S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
|
||||
_S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
|
||||
_S_vmask = (1 << _S_val) | (1 << _S_arr_0)
|
||||
)
|
||||
|
||||
const (
|
||||
_A_init_len = 1
|
||||
_A_init_cap = 16
|
||||
)
|
||||
|
||||
const (
|
||||
_ST_Sp = 0
|
||||
_ST_Vt = _PtrBytes
|
||||
_ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
|
||||
)
|
||||
|
||||
var (
|
||||
_V_true = jit.Imm(int64(pbool(true)))
|
||||
_V_false = jit.Imm(int64(pbool(false)))
|
||||
_F_value = jit.Imm(int64(native.S_value))
|
||||
)
|
||||
|
||||
var (
|
||||
_V_max = jit.Imm(int64(types.V_MAX))
|
||||
_E_eof = jit.Imm(int64(types.ERR_EOF))
|
||||
_E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
|
||||
_E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
|
||||
)
|
||||
|
||||
var (
|
||||
_F_convTslice = jit.Func(convTslice)
|
||||
_F_convTstring = jit.Func(convTstring)
|
||||
_F_invalid_vtype = jit.Func(invalid_vtype)
|
||||
)
|
||||
|
||||
var (
|
||||
_T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
|
||||
_T_bool = jit.Type(reflect.TypeOf(false))
|
||||
_T_int64 = jit.Type(reflect.TypeOf(int64(0)))
|
||||
_T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
_T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
|
||||
_T_string = jit.Type(reflect.TypeOf(""))
|
||||
_T_number = jit.Type(reflect.TypeOf(json.Number("")))
|
||||
_T_float64 = jit.Type(reflect.TypeOf(float64(0)))
|
||||
)
|
||||
|
||||
var _R_tab = map[int]string {
|
||||
'[': "_decode_V_ARRAY",
|
||||
'{': "_decode_V_OBJECT",
|
||||
':': "_decode_V_KEY_SEP",
|
||||
',': "_decode_V_ELEM_SEP",
|
||||
']': "_decode_V_ARRAY_END",
|
||||
'}': "_decode_V_OBJECT_END",
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) compile() {
|
||||
self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
|
||||
self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
|
||||
self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
|
||||
|
||||
/* initialize the state machine */
|
||||
self.Emit("XORL", _CX, _CX) // XORL CX, CX
|
||||
self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
|
||||
/* initialize digital buffer first */
|
||||
self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
|
||||
self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
|
||||
self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
|
||||
/* add ST offset */
|
||||
self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* set the value from previous round */
|
||||
self.Link("_set_value") // _set_value:
|
||||
self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
|
||||
self.Emit("XORL" , _SI, _SI) // XORL SI, SI
|
||||
self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
|
||||
self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
|
||||
|
||||
/* check for value stack */
|
||||
self.Link("_next") // _next:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_return") // JS _return
|
||||
|
||||
/* fast path: test up to 4 characters manually */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
|
||||
/* at least 1 to 3 spaces */
|
||||
for i := 0; i < 3; i++ {
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
}
|
||||
|
||||
/* at least 4 spaces */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
|
||||
/* fast path: use lookup table to select decoder */
|
||||
self.Link("_decode_fast") // _decode_fast:
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_decode_tab", 4) // .... &_decode_tab
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
|
||||
self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/* decode with native decoder */
|
||||
self.Link("_decode_native") // _decode_native:
|
||||
self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
|
||||
self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
|
||||
self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
|
||||
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
|
||||
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
|
||||
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
|
||||
self.call(_F_value) // CALL value
|
||||
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_parsing_error")
|
||||
self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
|
||||
self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
|
||||
self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
|
||||
|
||||
/* jump table selector */
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_switch_table", 4) // .... &_switch_table
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/** V_EOF **/
|
||||
self.Link("_decode_V_EOF") // _decode_V_EOF:
|
||||
self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
|
||||
/** V_NULL **/
|
||||
self.Link("_decode_V_NULL") // _decode_V_NULL:
|
||||
self.Emit("XORL", _R8, _R8) // XORL R8, R8
|
||||
self.Emit("XORL", _R9, _R9) // XORL R9, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_TRUE **/
|
||||
self.Link("_decode_V_TRUE") // _decode_V_TRUE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
// TODO: maybe modified by users?
|
||||
self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_FALSE **/
|
||||
self.Link("_decode_V_FALSE") // _decode_V_FALSE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_ARRAY **/
|
||||
self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
|
||||
/* create a new array */
|
||||
self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
|
||||
self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX
|
||||
|
||||
/* pack into an interface */
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
|
||||
self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8
|
||||
|
||||
/* replace current state with an array */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
|
||||
self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
|
||||
self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
|
||||
|
||||
/* add a new slot for the first element */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT **/
|
||||
self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
|
||||
self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_STRING **/
|
||||
self.Link("_decode_V_STRING") // _decode_V_STRING:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
|
||||
self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
|
||||
|
||||
/* check for escapes */
|
||||
self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
|
||||
self.Sjmp("JNE" , "_unquote") // JNE _unquote
|
||||
self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
|
||||
self.Sref("_copy_string_end", 4)
|
||||
self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
|
||||
self.Sjmp("JC", "copy_string")
|
||||
self.Link("_copy_string_end")
|
||||
self.Emit("XORL", _DX, _DX) // XORL DX, DX
|
||||
/* strings with no escape sequences */
|
||||
self.Link("_noescape") // _noescape:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
|
||||
self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
|
||||
self.Sjmp("JC" , "_object_key") // JC _object_key
|
||||
|
||||
/* check for pre-packed strings, avoid 1 allocation */
|
||||
self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
|
||||
self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP)
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
|
||||
|
||||
/* packed string already in R9 */
|
||||
self.Link("_packed_str") // _packed_str:
|
||||
self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
|
||||
self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* the string is an object key, get the map */
|
||||
self.Link("_object_key")
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
|
||||
/* add a new delimiter */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
|
||||
|
||||
/* add a new slot int the map */
|
||||
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
|
||||
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
|
||||
self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX
|
||||
|
||||
/* add to the pointer stack */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* allocate memory to store the string header and unquoted result */
|
||||
self.Link("_unquote") // _unquote:
|
||||
self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
|
||||
self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
|
||||
self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP)
|
||||
self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9
|
||||
|
||||
/* prepare the unquoting parameters */
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
|
||||
self.Emit("NEGQ" , _CX) // NEGQ CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
|
||||
self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
|
||||
self.Emit("XORL" , _R8, _R8) // XORL R8, R8
|
||||
self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
|
||||
self.Emit("SETCC", _R8) // SETCC R8
|
||||
self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
|
||||
|
||||
/* unquote the string, with R9 been preserved */
|
||||
self.save(_R9) // SAVE R9
|
||||
self.call(_F_unquote) // CALL unquote
|
||||
self.load(_R9) // LOAD R9
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
|
||||
self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
|
||||
self.Sjmp("JMP" , "_noescape") // JMP _noescape
|
||||
|
||||
/** V_DOUBLE **/
|
||||
self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
|
||||
self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
|
||||
|
||||
/** V_INTEGER **/
|
||||
self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
|
||||
self.Sjmp("JC" , "_use_int64") // JC _use_int64
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
|
||||
self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0
|
||||
|
||||
/* represent numbers as `float64` */
|
||||
self.Link("_use_float64") // _use_float64:
|
||||
self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP)
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
|
||||
self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `json.Number` */
|
||||
self.Link("_use_number") // _use_number
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
|
||||
self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
|
||||
self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
|
||||
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `int64` */
|
||||
self.Link("_use_int64") // _use_int64:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_KEY_SEP **/
|
||||
self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ELEM_SEP **/
|
||||
self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr
|
||||
self.Sjmp("JE" , "_array_sep") // JZ _next
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* arrays */
|
||||
self.Link("_array_sep")
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
|
||||
self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
|
||||
self.Sjmp("JAE" , "_array_more") // JAE _array_more
|
||||
|
||||
/* add a slot for the new element */
|
||||
self.Link("_array_append") // _array_append:
|
||||
self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow")
|
||||
self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
|
||||
self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ARRAY_END **/
|
||||
self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
|
||||
self.Emit("XORL", _DX, _DX) // XORL DX, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
|
||||
self.Sjmp("JE" , "_first_item") // JE _first_item
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* first element of an array */
|
||||
self.Link("_first_item") // _first_item:
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT_END **/
|
||||
self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX)
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("XORL", _AX, _AX) // XORL AX, AX
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* return from decoder */
|
||||
self.Link("_return") // _return:
|
||||
self.Emit("XORL", _EP, _EP) // XORL EP, EP
|
||||
self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
|
||||
self.Link("_epilogue") // _epilogue:
|
||||
self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
|
||||
self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
|
||||
self.Emit("RET") // RET
|
||||
|
||||
/* array expand */
|
||||
self.Link("_array_more") // _array_more:
|
||||
self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0
|
||||
self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
|
||||
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP)
|
||||
self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX
|
||||
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP)
|
||||
self.call_go(_F_growslice) // CALL_GO runtime.growslice
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX
|
||||
self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX
|
||||
|
||||
/* update the slice */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
|
||||
self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
|
||||
self.Sjmp("JMP" , "_array_append") // JMP _array_append
|
||||
|
||||
/* copy string */
|
||||
self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
|
||||
// self.Byte(0xcc)
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_p)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_n)
|
||||
self.Emit("MOVQ", _DI, _VAR_cs_LR)
|
||||
self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_makeslice)
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8)
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_d)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _VAR_cs_p, _R8)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _VAR_cs_n, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_memmove)
|
||||
self.Emit("MOVQ", _VAR_cs_d, _R8)
|
||||
self.Emit("MOVQ", _VAR_cs_n, _AX)
|
||||
self.Emit("MOVQ", _VAR_cs_LR, _DI)
|
||||
// self.Byte(0xcc)
|
||||
self.Rjmp("JMP", _DI)
|
||||
|
||||
/* error handlers */
|
||||
self.Link("_stack_overflow")
|
||||
self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_vtype_error") // _vtype_error:
|
||||
self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_invalid_char") // _invalid_char:
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_unquote_error") // _unquote_error:
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Link("_parsing_error") // _parsing_error:
|
||||
self.Emit("NEGQ" , _AX) // NEGQ AX
|
||||
self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
|
||||
self.Link("_error") // _error:
|
||||
self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
|
||||
self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
|
||||
self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
|
||||
|
||||
/* invalid value type, never returns */
|
||||
self.Link("_invalid_vtype")
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
|
||||
self.call(_F_invalid_vtype) // CALL invalid_type
|
||||
self.Emit("UD2") // UD2
|
||||
|
||||
/* switch jump table */
|
||||
self.Link("_switch_table") // _switch_table:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
|
||||
self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
|
||||
self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
|
||||
self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
|
||||
self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
|
||||
self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
|
||||
self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
|
||||
self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
|
||||
self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
|
||||
self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
|
||||
self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
|
||||
self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
|
||||
|
||||
/* fast character lookup table */
|
||||
self.Link("_decode_tab") // _decode_tab:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
|
||||
/* generate rest of the tabs */
|
||||
for i := 1; i < 256; i++ {
|
||||
if to, ok := _R_tab[i]; ok {
|
||||
self.Sref(to, -int64(i) * 4)
|
||||
} else {
|
||||
self.Byte(0x00, 0x00, 0x00, 0x00)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
||||
_subr_decode_value = new(_ValueDecoder).build()
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
func invalid_vtype(vt types.ValueType) {
|
||||
throw(fmt.Sprintf("invalid value type: %d", vt))
|
||||
}
|
772
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117.go
generated
vendored
Normal file
772
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117.go
generated
vendored
Normal file
@ -0,0 +1,772 @@
|
||||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
*
|
||||
* ST(R13) && 0(SP) : ro, decoder stack
|
||||
* DF(AX) : ro, decoder flags
|
||||
* EP(BX) : wo, error pointer
|
||||
* IP(R10) : ro, input pointer
|
||||
* IL(R12) : ro, input length
|
||||
* IC(R11) : rw, input cursor
|
||||
* VP(R15) : ro, value pointer (to an interface{})
|
||||
*/
|
||||
|
||||
const (
|
||||
_VD_args = 8 // 8 bytes for passing arguments to this functions
|
||||
_VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
|
||||
_VD_saves = 48 // 48 bytes for saving the registers before CALL instructions
|
||||
_VD_locals = 96 // 96 bytes for local variables
|
||||
)
|
||||
|
||||
const (
|
||||
_VD_offs = _VD_fargs + _VD_saves + _VD_locals
|
||||
_VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss = _VAR_ss_Vt
|
||||
_VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
|
||||
_VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
|
||||
_VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
|
||||
_VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
|
||||
_VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
|
||||
_VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
|
||||
)
|
||||
|
||||
var (
|
||||
_VAR_R9 = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
|
||||
)
|
||||
type _ValueDecoder struct {
|
||||
jit.BaseAssembler
|
||||
}
|
||||
|
||||
var (
|
||||
_VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
|
||||
_VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
|
||||
_VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
|
||||
_VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 88)
|
||||
)
|
||||
|
||||
func (self *_ValueDecoder) build() uintptr {
|
||||
self.Init(self.compile)
|
||||
return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
|
||||
}
|
||||
|
||||
/** Function Calling Helpers **/
|
||||
|
||||
func (self *_ValueDecoder) save(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to save")
|
||||
} else {
|
||||
self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) load(r ...obj.Addr) {
|
||||
for i, v := range r {
|
||||
if i > _VD_saves / 8 - 1 {
|
||||
panic("too many registers to load")
|
||||
} else {
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call(fn obj.Addr) {
|
||||
self.Emit("MOVQ", fn, _R9) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R9) // CALL AX
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call_go(fn obj.Addr) {
|
||||
self.save(_REG_go...) // SAVE $REG_go
|
||||
self.call(fn) // CALL ${fn}
|
||||
self.load(_REG_go...) // LOAD $REG_go
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) callc(fn obj.Addr) {
|
||||
self.Emit("XCHGQ", _IP, _BP)
|
||||
self.call(fn)
|
||||
self.Emit("XCHGQ", _IP, _BP)
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) call_c(fn obj.Addr) {
|
||||
self.Emit("XCHGQ", _IC, _BX)
|
||||
self.callc(fn)
|
||||
self.Emit("XCHGQ", _IC, _BX)
|
||||
}
|
||||
|
||||
/** Decoder Assembler **/
|
||||
|
||||
const (
|
||||
_S_val = iota + 1
|
||||
_S_arr
|
||||
_S_arr_0
|
||||
_S_obj
|
||||
_S_obj_0
|
||||
_S_obj_delim
|
||||
_S_obj_sep
|
||||
)
|
||||
|
||||
const (
|
||||
_S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
|
||||
_S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
|
||||
_S_vmask = (1 << _S_val) | (1 << _S_arr_0)
|
||||
)
|
||||
|
||||
const (
|
||||
_A_init_len = 1
|
||||
_A_init_cap = 16
|
||||
)
|
||||
|
||||
const (
|
||||
_ST_Sp = 0
|
||||
_ST_Vt = _PtrBytes
|
||||
_ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
|
||||
)
|
||||
|
||||
var (
|
||||
_V_true = jit.Imm(int64(pbool(true)))
|
||||
_V_false = jit.Imm(int64(pbool(false)))
|
||||
_F_value = jit.Imm(int64(native.S_value))
|
||||
)
|
||||
|
||||
var (
|
||||
_V_max = jit.Imm(int64(types.V_MAX))
|
||||
_E_eof = jit.Imm(int64(types.ERR_EOF))
|
||||
_E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
|
||||
_E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
|
||||
)
|
||||
|
||||
var (
|
||||
_F_convTslice = jit.Func(convTslice)
|
||||
_F_convTstring = jit.Func(convTstring)
|
||||
_F_invalid_vtype = jit.Func(invalid_vtype)
|
||||
)
|
||||
|
||||
var (
|
||||
_T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
|
||||
_T_bool = jit.Type(reflect.TypeOf(false))
|
||||
_T_int64 = jit.Type(reflect.TypeOf(int64(0)))
|
||||
_T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||
_T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
|
||||
_T_string = jit.Type(reflect.TypeOf(""))
|
||||
_T_number = jit.Type(reflect.TypeOf(json.Number("")))
|
||||
_T_float64 = jit.Type(reflect.TypeOf(float64(0)))
|
||||
)
|
||||
|
||||
var _R_tab = map[int]string {
|
||||
'[': "_decode_V_ARRAY",
|
||||
'{': "_decode_V_OBJECT",
|
||||
':': "_decode_V_KEY_SEP",
|
||||
',': "_decode_V_ELEM_SEP",
|
||||
']': "_decode_V_ARRAY_END",
|
||||
'}': "_decode_V_OBJECT_END",
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) compile() {
|
||||
self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
|
||||
self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
|
||||
self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
|
||||
|
||||
/* initialize the state machine */
|
||||
self.Emit("XORL", _CX, _CX) // XORL CX, CX
|
||||
self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
|
||||
/* initialize digital buffer first */
|
||||
self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
|
||||
self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
|
||||
self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
|
||||
/* add ST offset */
|
||||
self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* set the value from previous round */
|
||||
self.Link("_set_value") // _set_value:
|
||||
self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
|
||||
self.Emit("XORL" , _SI, _SI) // XORL SI, SI
|
||||
self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
|
||||
self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
|
||||
|
||||
/* check for value stack */
|
||||
self.Link("_next") // _next:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_return") // JS _return
|
||||
|
||||
/* fast path: test up to 4 characters manually */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
|
||||
/* at least 1 to 3 spaces */
|
||||
for i := 0; i < 3; i++ {
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
|
||||
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
|
||||
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
}
|
||||
|
||||
/* at least 4 spaces */
|
||||
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
|
||||
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
|
||||
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
|
||||
|
||||
/* fast path: use lookup table to select decoder */
|
||||
self.Link("_decode_fast") // _decode_fast:
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_decode_tab", 4) // .... &_decode_tab
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
|
||||
self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
|
||||
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/* decode with native decoder */
|
||||
self.Link("_decode_native") // _decode_native:
|
||||
self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
|
||||
self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
|
||||
self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
|
||||
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
|
||||
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
|
||||
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
|
||||
self.callc(_F_value) // CALL value
|
||||
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_parsing_error")
|
||||
self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
|
||||
self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
|
||||
self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
|
||||
|
||||
/* jump table selector */
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
|
||||
self.Sref("_switch_table", 4) // .... &_switch_table
|
||||
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
|
||||
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
|
||||
self.Rjmp("JMP" , _AX) // JMP AX
|
||||
|
||||
/** V_EOF **/
|
||||
self.Link("_decode_V_EOF") // _decode_V_EOF:
|
||||
self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
|
||||
/** V_NULL **/
|
||||
self.Link("_decode_V_NULL") // _decode_V_NULL:
|
||||
self.Emit("XORL", _R8, _R8) // XORL R8, R8
|
||||
self.Emit("XORL", _R9, _R9) // XORL R9, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_TRUE **/
|
||||
self.Link("_decode_V_TRUE") // _decode_V_TRUE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
// TODO: maybe modified by users?
|
||||
self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_FALSE **/
|
||||
self.Link("_decode_V_FALSE") // _decode_V_FALSE:
|
||||
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
|
||||
self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
|
||||
self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_ARRAY **/
|
||||
self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
|
||||
/* create a new array */
|
||||
self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), _BX) // MOVQ _A_init_len, BX
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX) // MOVQ _A_init_cap, CX
|
||||
self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
|
||||
|
||||
/* pack into an interface */
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_len), _BX) // MOVQ _A_init_len, BX
|
||||
self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX) // MOVQ _A_init_cap, CX
|
||||
self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
|
||||
self.Emit("MOVQ", _AX, _R8) // MOVQ AX, R8
|
||||
|
||||
/* replace current state with an array */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
|
||||
self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
|
||||
self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
|
||||
|
||||
/* add a new slot for the first element */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT **/
|
||||
self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
|
||||
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
|
||||
self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_0, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
|
||||
self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_STRING **/
|
||||
self.Link("_decode_V_STRING") // _decode_V_STRING:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
|
||||
self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
|
||||
|
||||
/* check for escapes */
|
||||
self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
|
||||
self.Sjmp("JNE" , "_unquote") // JNE _unquote
|
||||
self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
|
||||
self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
|
||||
self.Sref("_copy_string_end", 4)
|
||||
self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
|
||||
self.Sjmp("JC", "copy_string")
|
||||
self.Link("_copy_string_end")
|
||||
self.Emit("XORL", _DX, _DX)
|
||||
|
||||
/* strings with no escape sequences */
|
||||
self.Link("_noescape") // _noescape:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
|
||||
self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
|
||||
self.Sjmp("JC" , "_object_key") // JC _object_key
|
||||
|
||||
/* check for pre-packed strings, avoid 1 allocation */
|
||||
self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
|
||||
self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
|
||||
self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX
|
||||
self.Emit("MOVQ" , _R8, _AX) // MOVQ R8, AX
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
|
||||
|
||||
/* packed string already in R9 */
|
||||
self.Link("_packed_str") // _packed_str:
|
||||
self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
|
||||
self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* the string is an object key, get the map */
|
||||
self.Link("_object_key")
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
|
||||
/* add a new delimiter */
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
|
||||
|
||||
/* add a new slot int the map */
|
||||
self.Emit("MOVQ", _AX, _DI) // MOVQ AX, DI
|
||||
self.Emit("MOVQ", _T_map, _AX) // MOVQ _T_map, AX
|
||||
self.Emit("MOVQ", _SI, _BX) // MOVQ SI, BX
|
||||
self.Emit("MOVQ", _R8, _CX) // MOVQ R9, CX
|
||||
self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
|
||||
|
||||
/* add to the pointer stack */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* allocate memory to store the string header and unquoted result */
|
||||
self.Link("_unquote") // _unquote:
|
||||
self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
|
||||
self.Emit("MOVQ", _T_byte, _BX) // MOVQ _T_byte, BX
|
||||
self.Emit("MOVB", jit.Imm(0), _CX) // MOVB $0, CX
|
||||
self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
|
||||
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
|
||||
|
||||
/* prepare the unquoting parameters */
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
|
||||
self.Emit("NEGQ" , _CX) // NEGQ CX
|
||||
self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
|
||||
self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
|
||||
self.Emit("XORL" , _R8, _R8) // XORL R8, R8
|
||||
self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
|
||||
self.Emit("SETCC", _R8) // SETCC R8
|
||||
self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
|
||||
|
||||
/* unquote the string, with R9 been preserved */
|
||||
self.Emit("MOVQ", _R9, _VAR_R9) // SAVE R9
|
||||
self.call_c(_F_unquote) // CALL unquote
|
||||
self.Emit("MOVQ", _VAR_R9, _R9) // LOAD R9
|
||||
|
||||
/* check for errors */
|
||||
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
|
||||
self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
|
||||
self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
|
||||
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
|
||||
self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
|
||||
self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
|
||||
self.Sjmp("JMP" , "_noescape") // JMP _noescape
|
||||
|
||||
/** V_DOUBLE **/
|
||||
self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
|
||||
self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
|
||||
|
||||
/** V_INTEGER **/
|
||||
self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
|
||||
self.Sjmp("JC" , "_use_number") // JC _use_number
|
||||
self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
|
||||
self.Sjmp("JC" , "_use_int64") // JC _use_int64
|
||||
//TODO: use ss.Dv directly
|
||||
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
|
||||
|
||||
/* represent numbers as `float64` */
|
||||
self.Link("_use_float64") // _use_float64:
|
||||
self.Emit("MOVQ" , _X0, _AX) // MOVQ X0, AX
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
|
||||
self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
|
||||
self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `json.Number` */
|
||||
self.Link("_use_number") // _use_number
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
|
||||
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
|
||||
self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
|
||||
self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
|
||||
self.Emit("MOVQ", _SI, _AX) // MOVQ SI, AX
|
||||
self.Emit("MOVQ", _CX, _BX) // MOVQ CX, BX
|
||||
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
|
||||
self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
|
||||
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/* represent numbers as `int64` */
|
||||
self.Link("_use_int64") // _use_int64:
|
||||
self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
|
||||
self.call_go(_F_convT64) // CALL_GO runtime.convT64
|
||||
self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
|
||||
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
|
||||
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
|
||||
self.Sjmp("JMP" , "_set_value") // JMP _set_value
|
||||
|
||||
/** V_KEY_SEP **/
|
||||
self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ELEM_SEP **/
|
||||
self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
|
||||
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_arr))
|
||||
self.Sjmp("JE" , "_array_sep") // JZ _next
|
||||
self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* arrays */
|
||||
self.Link("_array_sep")
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
|
||||
self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
|
||||
self.Sjmp("JAE" , "_array_more") // JAE _array_more
|
||||
|
||||
/* add a slot for the new element */
|
||||
self.Link("_array_append") // _array_append:
|
||||
self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
|
||||
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
|
||||
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
|
||||
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
|
||||
self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
|
||||
self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
|
||||
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
|
||||
self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
|
||||
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_ARRAY_END **/
|
||||
self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
|
||||
self.Emit("XORL", _DX, _DX) // XORL DX, DX
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
|
||||
self.Sjmp("JE" , "_first_item") // JE _first_item
|
||||
self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
|
||||
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* first element of an array */
|
||||
self.Link("_first_item") // _first_item:
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
|
||||
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/** V_OBJECT_END **/
|
||||
self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
|
||||
self.Emit("MOVL", jit.Imm(_S_omask_end), _DI) // MOVL _S_omask, DI
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
|
||||
self.Emit("BTQ" , _AX, _DI)
|
||||
self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
|
||||
self.Emit("XORL", _AX, _AX) // XORL AX, AX
|
||||
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
|
||||
self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
|
||||
self.Sjmp("JMP" , "_next") // JMP _next
|
||||
|
||||
/* return from decoder */
|
||||
self.Link("_return") // _return:
|
||||
self.Emit("XORL", _EP, _EP) // XORL EP, EP
|
||||
self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
|
||||
self.Link("_epilogue") // _epilogue:
|
||||
self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
|
||||
self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
|
||||
self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
|
||||
self.Emit("RET") // RET
|
||||
|
||||
/* array expand */
|
||||
self.Link("_array_more") // _array_more:
|
||||
self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
|
||||
self.Emit("MOVQ" , jit.Ptr(_SI, 0), _BX) // MOVQ (SI), BX
|
||||
self.Emit("MOVQ" , jit.Ptr(_SI, 8), _CX) // MOVQ 8(SI), CX
|
||||
self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DI) // MOVQ 16(SI), DI
|
||||
self.Emit("MOVQ" , _DI, _SI) // MOVQ DI, 24(SP)
|
||||
self.Emit("SHLQ" , jit.Imm(1), _SI) // SHLQ $1, SI
|
||||
self.call_go(_F_growslice) // CALL_GO runtime.growslice
|
||||
self.Emit("MOVQ" , _AX, _DI) // MOVQ AX, DI
|
||||
self.Emit("MOVQ" , _BX, _DX) // MOVQ BX, DX
|
||||
self.Emit("MOVQ" , _CX, _AX) // MOVQ CX, AX
|
||||
|
||||
/* update the slice */
|
||||
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
|
||||
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
|
||||
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
|
||||
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
|
||||
self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
|
||||
self.Sjmp("JMP" , "_array_append") // JMP _array_append
|
||||
|
||||
/* copy string */
|
||||
self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_p)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_n)
|
||||
self.Emit("MOVQ", _DI, _VAR_cs_LR)
|
||||
self.Emit("MOVQ", _AX, _BX)
|
||||
self.Emit("MOVQ", _AX, _CX)
|
||||
self.Emit("MOVQ", _T_byte, _AX)
|
||||
self.call_go(_F_makeslice)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_d)
|
||||
self.Emit("MOVQ", _VAR_cs_p, _BX)
|
||||
self.Emit("MOVQ", _VAR_cs_n, _CX)
|
||||
self.call_go(_F_memmove)
|
||||
self.Emit("MOVQ", _VAR_cs_d, _R8)
|
||||
self.Emit("MOVQ", _VAR_cs_n, _AX)
|
||||
self.Emit("MOVQ", _VAR_cs_LR, _DI)
|
||||
self.Rjmp("JMP", _DI)
|
||||
|
||||
/* error handlers */
|
||||
self.Link("_stack_overflow")
|
||||
self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_vtype_error") // _vtype_error:
|
||||
self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_invalid_char") // _invalid_char:
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
|
||||
self.Sjmp("JMP" , "_error") // JMP _error
|
||||
self.Link("_unquote_error") // _unquote_error:
|
||||
self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
|
||||
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
|
||||
self.Link("_parsing_error") // _parsing_error:
|
||||
self.Emit("NEGQ" , _AX) // NEGQ AX
|
||||
self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
|
||||
self.Link("_error") // _error:
|
||||
self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
|
||||
self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
|
||||
self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
|
||||
|
||||
/* invalid value type, never returns */
|
||||
self.Link("_invalid_vtype")
|
||||
self.call_go(_F_invalid_vtype) // CALL invalid_type
|
||||
self.Emit("UD2") // UD2
|
||||
|
||||
/* switch jump table */
|
||||
self.Link("_switch_table") // _switch_table:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
|
||||
self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
|
||||
self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
|
||||
self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
|
||||
self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
|
||||
self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
|
||||
self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
|
||||
self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
|
||||
self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
|
||||
self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
|
||||
self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
|
||||
self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
|
||||
|
||||
/* fast character lookup table */
|
||||
self.Link("_decode_tab") // _decode_tab:
|
||||
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
|
||||
|
||||
/* generate rest of the tabs */
|
||||
for i := 1; i < 256; i++ {
|
||||
if to, ok := _R_tab[i]; ok {
|
||||
self.Sref(to, -int64(i) * 4)
|
||||
} else {
|
||||
self.Byte(0x00, 0x00, 0x00, 0x00)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
||||
_subr_decode_value = new(_ValueDecoder).build()
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
func invalid_vtype(vt types.ValueType) {
|
||||
throw(fmt.Sprintf("invalid value type: %d", vt))
|
||||
}
|
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117_test.s
generated
vendored
Normal file
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_go117_test.s
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
// +build go1.17,!go1.21
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#include "go_asm.h"
|
||||
#include "funcdata.h"
|
||||
#include "textflag.h"
|
||||
|
||||
TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
|
||||
NO_LOCAL_POINTERS
|
||||
PXOR X0, X0
|
||||
MOVOU X0, rv+48(FP)
|
||||
MOVQ st+0(FP) , R13
|
||||
MOVQ sp+8(FP) , R10
|
||||
MOVQ sn+16(FP), R12
|
||||
MOVQ ic+24(FP), R11
|
||||
MOVQ vp+32(FP), R15
|
||||
MOVQ df+40(FP), AX
|
||||
MOVQ ·_subr_decode_value(SB), BX
|
||||
CALL BX
|
||||
MOVQ R11, rp+48(FP)
|
||||
MOVQ BX, ex+56(FP)
|
||||
RET
|
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_test.s
generated
vendored
Normal file
37
vendor/github.com/bytedance/sonic/internal/decoder/generic_amd64_test.s
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
// +build go1.15,!go1.17
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
//
|
||||
|
||||
#include "go_asm.h"
|
||||
#include "funcdata.h"
|
||||
#include "textflag.h"
|
||||
|
||||
TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
|
||||
NO_LOCAL_POINTERS
|
||||
PXOR X0, X0
|
||||
MOVOU X0, rv+48(FP)
|
||||
MOVQ st+0(FP), BX
|
||||
MOVQ sp+8(FP), R12
|
||||
MOVQ sn+16(FP), R13
|
||||
MOVQ ic+24(FP), R14
|
||||
MOVQ vp+32(FP), R15
|
||||
MOVQ df+40(FP), R10
|
||||
MOVQ ·_subr_decode_value(SB), AX
|
||||
CALL AX
|
||||
MOVQ R14, rp+48(FP)
|
||||
MOVQ R11, ex+56(FP)
|
||||
RET
|
143
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
Normal file
143
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`sync`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
const (
|
||||
_MinSlice = 2
|
||||
_MaxStack = 4096 // 4k slots
|
||||
_MaxStackBytes = _MaxStack * _PtrBytes
|
||||
_MaxDigitNums = 800 // used in atof fallback algorithm
|
||||
)
|
||||
|
||||
const (
|
||||
_PtrBytes = _PTR_SIZE / 8
|
||||
_FsmOffset = (_MaxStack + 1) * _PtrBytes
|
||||
_DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes
|
||||
_StackSize = unsafe.Sizeof(_Stack{})
|
||||
)
|
||||
|
||||
var (
|
||||
stackPool = sync.Pool{}
|
||||
valueCache = []unsafe.Pointer(nil)
|
||||
fieldCache = []*caching.FieldMap(nil)
|
||||
fieldCacheMux = sync.Mutex{}
|
||||
programCache = caching.CreateProgramCache()
|
||||
)
|
||||
|
||||
type _Stack struct {
|
||||
sp uintptr
|
||||
sb [_MaxStack]unsafe.Pointer
|
||||
mm types.StateMachine
|
||||
vp [types.MAX_RECURSE]unsafe.Pointer
|
||||
dp [_MaxDigitNums]byte
|
||||
}
|
||||
|
||||
type _Decoder func(
|
||||
s string,
|
||||
i int,
|
||||
vp unsafe.Pointer,
|
||||
sb *_Stack,
|
||||
fv uint64,
|
||||
sv string, // DO NOT pass value to this arguement, since it is only used for local _VAR_sv
|
||||
vk unsafe.Pointer, // DO NOT pass value to this arguement, since it is only used for local _VAR_vk
|
||||
) (int, error)
|
||||
|
||||
var _KeepAlive struct {
|
||||
s string
|
||||
i int
|
||||
vp unsafe.Pointer
|
||||
sb *_Stack
|
||||
fv uint64
|
||||
sv string
|
||||
vk unsafe.Pointer
|
||||
|
||||
ret int
|
||||
err error
|
||||
|
||||
frame_decoder [_FP_offs]byte
|
||||
frame_generic [_VD_offs]byte
|
||||
}
|
||||
|
||||
var (
|
||||
argPtrs = []bool{true, false, false, true, true, false, true, false, true}
|
||||
localPtrs = []bool{}
|
||||
)
|
||||
|
||||
var (
|
||||
argPtrs_generic = []bool{true}
|
||||
localPtrs_generic = []bool{}
|
||||
)
|
||||
|
||||
func newStack() *_Stack {
|
||||
if ret := stackPool.Get(); ret == nil {
|
||||
return new(_Stack)
|
||||
} else {
|
||||
return ret.(*_Stack)
|
||||
}
|
||||
}
|
||||
|
||||
func resetStack(p *_Stack) {
|
||||
memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
|
||||
}
|
||||
|
||||
func freeStack(p *_Stack) {
|
||||
p.sp = 0
|
||||
stackPool.Put(p)
|
||||
}
|
||||
|
||||
func freezeValue(v unsafe.Pointer) uintptr {
|
||||
valueCache = append(valueCache, v)
|
||||
return uintptr(v)
|
||||
}
|
||||
|
||||
func freezeFields(v *caching.FieldMap) int64 {
|
||||
fieldCacheMux.Lock()
|
||||
fieldCache = append(fieldCache, v)
|
||||
fieldCacheMux.Unlock()
|
||||
return referenceFields(v)
|
||||
}
|
||||
|
||||
func referenceFields(v *caching.FieldMap) int64 {
|
||||
return int64(uintptr(unsafe.Pointer(v)))
|
||||
}
|
||||
|
||||
func makeDecoder(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
|
||||
if pp, err := newCompiler().compile(vt.Pack()); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return newAssembler(pp).Load(), nil
|
||||
}
|
||||
}
|
||||
|
||||
func findOrCompile(vt *rt.GoType) (_Decoder, error) {
|
||||
if val := programCache.Get(vt); val != nil {
|
||||
return val.(_Decoder), nil
|
||||
} else if ret, err := programCache.Compute(vt, makeDecoder); err == nil {
|
||||
return ret.(_Decoder), nil
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
46
vendor/github.com/bytedance/sonic/internal/decoder/primitives.go
generated
vendored
Normal file
46
vendor/github.com/bytedance/sonic/internal/decoder/primitives.go
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`encoding/json`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
func decodeTypedPointer(s string, i int, vt *rt.GoType, vp unsafe.Pointer, sb *_Stack, fv uint64) (int, error) {
|
||||
if fn, err := findOrCompile(vt); err != nil {
|
||||
return 0, err
|
||||
} else {
|
||||
rt.MoreStack(_FP_size + _VD_size + native.MaxFrameSize)
|
||||
rt.StopProf()
|
||||
ret, err := fn(s, i, vp, sb, fv, "", nil)
|
||||
rt.StartProf()
|
||||
return ret, err
|
||||
}
|
||||
}
|
||||
|
||||
func decodeJsonUnmarshaler(vv interface{}, s string) error {
|
||||
return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s))
|
||||
}
|
||||
|
||||
func decodeTextUnmarshaler(vv interface{}, s string) error {
|
||||
return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s))
|
||||
}
|
217
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
Normal file
217
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
Normal file
@ -0,0 +1,217 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`bytes`
|
||||
`io`
|
||||
`sync`
|
||||
|
||||
`github.com/bytedance/sonic/option`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
)
|
||||
|
||||
var (
|
||||
minLeftBufferShift uint = 1
|
||||
)
|
||||
|
||||
// StreamDecoder is the decoder context object for streaming input.
|
||||
type StreamDecoder struct {
|
||||
r io.Reader
|
||||
buf []byte
|
||||
scanp int
|
||||
scanned int64
|
||||
err error
|
||||
Decoder
|
||||
}
|
||||
|
||||
var bufPool = sync.Pool{
|
||||
New: func () interface{} {
|
||||
return make([]byte, 0, option.DefaultDecoderBufferSize)
|
||||
},
|
||||
}
|
||||
|
||||
// NewStreamDecoder adapts to encoding/json.NewDecoder API.
|
||||
//
|
||||
// NewStreamDecoder returns a new decoder that reads from r.
|
||||
func NewStreamDecoder(r io.Reader) *StreamDecoder {
|
||||
return &StreamDecoder{r : r}
|
||||
}
|
||||
|
||||
// Decode decodes input stream into val with corresponding data.
|
||||
// Redundantly bytes may be read and left in its buffer, and can be used at next call.
|
||||
// Either io error from underlying io.Reader (except io.EOF)
|
||||
// or syntax error from data will be recorded and stop subsequently decoding.
|
||||
func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
if self.err != nil {
|
||||
return self.err
|
||||
}
|
||||
|
||||
var buf = self.buf[self.scanp:]
|
||||
var p = 0
|
||||
var recycle bool
|
||||
if cap(buf) == 0 {
|
||||
buf = bufPool.Get().([]byte)
|
||||
recycle = true
|
||||
}
|
||||
|
||||
var first = true
|
||||
var repeat = true
|
||||
read_more:
|
||||
for {
|
||||
l := len(buf)
|
||||
realloc(&buf)
|
||||
n, err := self.r.Read(buf[l:cap(buf)])
|
||||
buf = buf[:l+n]
|
||||
if err != nil {
|
||||
repeat = false
|
||||
if err == io.EOF {
|
||||
if len(buf) == 0 {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
self.err = err
|
||||
return err
|
||||
}
|
||||
if n > 0 || first {
|
||||
break
|
||||
}
|
||||
}
|
||||
first = false
|
||||
|
||||
l := len(buf)
|
||||
if l > 0 {
|
||||
self.Decoder.Reset(string(buf))
|
||||
err = self.Decoder.Decode(val)
|
||||
if err != nil {
|
||||
if repeat && self.repeatable(err) {
|
||||
goto read_more
|
||||
}
|
||||
self.err = err
|
||||
}
|
||||
|
||||
p = self.Decoder.Pos()
|
||||
self.scanned += int64(p)
|
||||
self.scanp = 0
|
||||
}
|
||||
|
||||
if l > p {
|
||||
// remain undecoded bytes, so copy them into self.buf
|
||||
self.buf = append(self.buf[:0], buf[p:]...)
|
||||
} else {
|
||||
self.buf = nil
|
||||
recycle = true
|
||||
}
|
||||
|
||||
if recycle {
|
||||
buf = buf[:0]
|
||||
bufPool.Put(buf)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (self StreamDecoder) repeatable(err error) bool {
|
||||
if ee, ok := err.(SyntaxError); ok &&
|
||||
(ee.Code == types.ERR_EOF || (ee.Code == types.ERR_INVALID_CHAR && self.i >= len(self.s)-1)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// InputOffset returns the input stream byte offset of the current decoder position.
|
||||
// The offset gives the location of the end of the most recently returned token and the beginning of the next token.
|
||||
func (self *StreamDecoder) InputOffset() int64 {
|
||||
return self.scanned + int64(self.scanp)
|
||||
}
|
||||
|
||||
// Buffered returns a reader of the data remaining in the Decoder's buffer.
|
||||
// The reader is valid until the next call to Decode.
|
||||
func (self *StreamDecoder) Buffered() io.Reader {
|
||||
return bytes.NewReader(self.buf[self.scanp:])
|
||||
}
|
||||
|
||||
// More reports whether there is another element in the
|
||||
// current array or object being parsed.
|
||||
func (self *StreamDecoder) More() bool {
|
||||
if self.err != nil {
|
||||
return false
|
||||
}
|
||||
c, err := self.peek()
|
||||
return err == nil && c != ']' && c != '}'
|
||||
}
|
||||
|
||||
func (self *StreamDecoder) peek() (byte, error) {
|
||||
var err error
|
||||
for {
|
||||
for i := self.scanp; i < len(self.buf); i++ {
|
||||
c := self.buf[i]
|
||||
if isSpace(c) {
|
||||
continue
|
||||
}
|
||||
self.scanp = i
|
||||
return c, nil
|
||||
}
|
||||
// buffer has been scanned, now report any error
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
self.err = err
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
err = self.refill()
|
||||
}
|
||||
}
|
||||
|
||||
func isSpace(c byte) bool {
|
||||
return types.SPACE_MASK & (1 << c) != 0
|
||||
}
|
||||
|
||||
func (self *StreamDecoder) refill() error {
|
||||
// Make room to read more into the buffer.
|
||||
// First slide down data already consumed.
|
||||
if self.scanp > 0 {
|
||||
self.scanned += int64(self.scanp)
|
||||
n := copy(self.buf, self.buf[self.scanp:])
|
||||
self.buf = self.buf[:n]
|
||||
self.scanp = 0
|
||||
}
|
||||
|
||||
// Grow buffer if not large enough.
|
||||
realloc(&self.buf)
|
||||
|
||||
// Read. Delay error for next iteration (after scan).
|
||||
n, err := self.r.Read(self.buf[len(self.buf):cap(self.buf)])
|
||||
self.buf = self.buf[0 : len(self.buf)+n]
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func realloc(buf *[]byte) {
|
||||
l := uint(len(*buf))
|
||||
c := uint(cap(*buf))
|
||||
if c - l <= c >> minLeftBufferShift {
|
||||
e := l+(l>>minLeftBufferShift)
|
||||
if e < option.DefaultDecoderBufferSize {
|
||||
e = option.DefaultDecoderBufferSize
|
||||
}
|
||||
tmp := make([]byte, l, e)
|
||||
copy(tmp, *buf)
|
||||
*buf = tmp
|
||||
}
|
||||
}
|
||||
|
111
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go115.go
generated
vendored
Normal file
111
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go115.go
generated
vendored
Normal file
@ -0,0 +1,111 @@
|
||||
// +build go1.15,!go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`reflect`
|
||||
|
||||
_ `github.com/chenzhuoyu/base64x`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode
|
||||
var _subr__b64decode uintptr
|
||||
|
||||
// runtime.maxElementSize
|
||||
const _max_map_element_size uintptr = 128
|
||||
|
||||
func mapfast(vt reflect.Type) bool {
|
||||
return vt.Elem().Size() <= _max_map_element_size
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname throw runtime.throw
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func throw(s string)
|
||||
|
||||
//go:linkname convT64 runtime.convT64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convT64(v uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname convTslice runtime.convTslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convTslice(v []byte) unsafe.Pointer
|
||||
|
||||
//go:linkname convTstring runtime.convTstring
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convTstring(v string) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memequal runtime.memequal
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memequal(a unsafe.Pointer, b unsafe.Pointer, size uintptr) bool
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memmove runtime.memmove
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:linkname mallocgc runtime.mallocgc
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
|
||||
|
||||
//go:linkname makeslice runtime.makeslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname growslice runtime.growslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
|
||||
|
||||
//go:linkname makemap_small runtime.makemap_small
|
||||
func makemap_small() unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign runtime.mapassign
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast32 runtime.mapassign_fast32
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64 runtime.mapassign_fast64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname memclrHasPointers runtime.memclrHasPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
|
111
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
Normal file
111
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
Normal file
@ -0,0 +1,111 @@
|
||||
// +build go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
`reflect`
|
||||
|
||||
_ `github.com/chenzhuoyu/base64x`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode
|
||||
var _subr__b64decode uintptr
|
||||
|
||||
// runtime.maxElementSize
|
||||
const _max_map_element_size uintptr = 128
|
||||
|
||||
func mapfast(vt reflect.Type) bool {
|
||||
return vt.Elem().Size() <= _max_map_element_size
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname throw runtime.throw
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func throw(s string)
|
||||
|
||||
//go:linkname convT64 runtime.convT64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convT64(v uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname convTslice runtime.convTslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convTslice(v []byte) unsafe.Pointer
|
||||
|
||||
//go:linkname convTstring runtime.convTstring
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func convTstring(v string) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memequal runtime.memequal
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memequal(a unsafe.Pointer, b unsafe.Pointer, size uintptr) bool
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memmove runtime.memmove
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:linkname mallocgc runtime.mallocgc
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
|
||||
|
||||
//go:linkname makeslice runtime.makeslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
|
||||
|
||||
//go:noescape
|
||||
//go:linkname growslice reflect.growslice
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
|
||||
|
||||
//go:linkname makemap_small runtime.makemap_small
|
||||
func makemap_small() unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign runtime.mapassign
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast32 runtime.mapassign_fast32
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64 runtime.mapassign_fast64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname memclrHasPointers runtime.memclrHasPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
|
||||
|
||||
//go:noescape
|
||||
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
|
58
vendor/github.com/bytedance/sonic/internal/decoder/types.go
generated
vendored
Normal file
58
vendor/github.com/bytedance/sonic/internal/decoder/types.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`encoding`
|
||||
`encoding/base64`
|
||||
`encoding/json`
|
||||
`reflect`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
)
|
||||
|
||||
var (
|
||||
byteType = reflect.TypeOf(byte(0))
|
||||
intType = reflect.TypeOf(int(0))
|
||||
int8Type = reflect.TypeOf(int8(0))
|
||||
int16Type = reflect.TypeOf(int16(0))
|
||||
int32Type = reflect.TypeOf(int32(0))
|
||||
int64Type = reflect.TypeOf(int64(0))
|
||||
uintType = reflect.TypeOf(uint(0))
|
||||
uint8Type = reflect.TypeOf(uint8(0))
|
||||
uint16Type = reflect.TypeOf(uint16(0))
|
||||
uint32Type = reflect.TypeOf(uint32(0))
|
||||
uint64Type = reflect.TypeOf(uint64(0))
|
||||
float32Type = reflect.TypeOf(float32(0))
|
||||
float64Type = reflect.TypeOf(float64(0))
|
||||
stringType = reflect.TypeOf("")
|
||||
bytesType = reflect.TypeOf([]byte(nil))
|
||||
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||
base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0))
|
||||
)
|
||||
|
||||
var (
|
||||
errorType = reflect.TypeOf((*error)(nil)).Elem()
|
||||
jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
|
||||
encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
)
|
||||
|
||||
func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) {
|
||||
p := (*rt.GoIface)(unsafe.Pointer(&t))
|
||||
return p.Itab, (*rt.GoType)(p.Value)
|
||||
}
|
39
vendor/github.com/bytedance/sonic/internal/decoder/utils.go
generated
vendored
Normal file
39
vendor/github.com/bytedance/sonic/internal/decoder/utils.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/loader`
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
func pbool(v bool) uintptr {
|
||||
return freezeValue(unsafe.Pointer(&v))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func ptodec(p loader.Function) _Decoder {
|
||||
return *(*_Decoder)(unsafe.Pointer(&p))
|
||||
}
|
||||
|
||||
func assert_eq(v int64, exp int64, msg string) {
|
||||
if v != exp {
|
||||
panic(msg)
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user