[chore]: Bump github.com/gin-gonic/gin from 1.9.0 to 1.9.1 (#1855)

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
dependabot[bot]
2023-06-01 22:20:16 +01:00
committed by GitHub
parent 23705b31e4
commit 55aacaf4b0
138 changed files with 27543 additions and 25484 deletions

View File

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,885 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`fmt`
`reflect`
`strconv`
`strings`
`unsafe`
`github.com/bytedance/sonic/internal/resolver`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/option`
)
type _Op uint8
const (
_OP_null _Op = iota + 1
_OP_empty_arr
_OP_empty_obj
_OP_bool
_OP_i8
_OP_i16
_OP_i32
_OP_i64
_OP_u8
_OP_u16
_OP_u32
_OP_u64
_OP_f32
_OP_f64
_OP_str
_OP_bin
_OP_quote
_OP_number
_OP_eface
_OP_iface
_OP_byte
_OP_text
_OP_deref
_OP_index
_OP_load
_OP_save
_OP_drop
_OP_drop_2
_OP_recurse
_OP_is_nil
_OP_is_nil_p1
_OP_is_zero_1
_OP_is_zero_2
_OP_is_zero_4
_OP_is_zero_8
_OP_is_zero_map
_OP_goto
_OP_map_iter
_OP_map_stop
_OP_map_check_key
_OP_map_write_key
_OP_map_value_next
_OP_slice_len
_OP_slice_next
_OP_marshal
_OP_marshal_p
_OP_marshal_text
_OP_marshal_text_p
_OP_cond_set
_OP_cond_testc
)
const (
_INT_SIZE = 32 << (^uint(0) >> 63)
_PTR_SIZE = 32 << (^uintptr(0) >> 63)
_PTR_BYTE = unsafe.Sizeof(uintptr(0))
)
const (
_MAX_ILBUF = 100000 // cutoff at 100k of IL instructions
_MAX_FIELDS = 50 // cutoff at 50 fields struct
)
var _OpNames = [256]string {
_OP_null : "null",
_OP_empty_arr : "empty_arr",
_OP_empty_obj : "empty_obj",
_OP_bool : "bool",
_OP_i8 : "i8",
_OP_i16 : "i16",
_OP_i32 : "i32",
_OP_i64 : "i64",
_OP_u8 : "u8",
_OP_u16 : "u16",
_OP_u32 : "u32",
_OP_u64 : "u64",
_OP_f32 : "f32",
_OP_f64 : "f64",
_OP_str : "str",
_OP_bin : "bin",
_OP_quote : "quote",
_OP_number : "number",
_OP_eface : "eface",
_OP_iface : "iface",
_OP_byte : "byte",
_OP_text : "text",
_OP_deref : "deref",
_OP_index : "index",
_OP_load : "load",
_OP_save : "save",
_OP_drop : "drop",
_OP_drop_2 : "drop_2",
_OP_recurse : "recurse",
_OP_is_nil : "is_nil",
_OP_is_nil_p1 : "is_nil_p1",
_OP_is_zero_1 : "is_zero_1",
_OP_is_zero_2 : "is_zero_2",
_OP_is_zero_4 : "is_zero_4",
_OP_is_zero_8 : "is_zero_8",
_OP_is_zero_map : "is_zero_map",
_OP_goto : "goto",
_OP_map_iter : "map_iter",
_OP_map_stop : "map_stop",
_OP_map_check_key : "map_check_key",
_OP_map_write_key : "map_write_key",
_OP_map_value_next : "map_value_next",
_OP_slice_len : "slice_len",
_OP_slice_next : "slice_next",
_OP_marshal : "marshal",
_OP_marshal_p : "marshal_p",
_OP_marshal_text : "marshal_text",
_OP_marshal_text_p : "marshal_text_p",
_OP_cond_set : "cond_set",
_OP_cond_testc : "cond_testc",
}
func (self _Op) String() string {
if ret := _OpNames[self]; ret != "" {
return ret
} else {
return "<invalid>"
}
}
func _OP_int() _Op {
switch _INT_SIZE {
case 32: return _OP_i32
case 64: return _OP_i64
default: panic("unsupported int size")
}
}
func _OP_uint() _Op {
switch _INT_SIZE {
case 32: return _OP_u32
case 64: return _OP_u64
default: panic("unsupported uint size")
}
}
func _OP_uintptr() _Op {
switch _PTR_SIZE {
case 32: return _OP_u32
case 64: return _OP_u64
default: panic("unsupported pointer size")
}
}
func _OP_is_zero_ints() _Op {
switch _INT_SIZE {
case 32: return _OP_is_zero_4
case 64: return _OP_is_zero_8
default: panic("unsupported integer size")
}
}
type _Instr struct {
u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str)
p unsafe.Pointer // maybe GoString.Ptr, or *GoType
}
func packOp(op _Op) uint64 {
return uint64(op) << 56
}
func newInsOp(op _Op) _Instr {
return _Instr{u: packOp(op)}
}
func newInsVi(op _Op, vi int) _Instr {
return _Instr{u: packOp(op) | rt.PackInt(vi)}
}
func newInsVs(op _Op, vs string) _Instr {
return _Instr {
u: packOp(op) | rt.PackInt(len(vs)),
p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr,
}
}
func newInsVt(op _Op, vt reflect.Type) _Instr {
return _Instr {
u: packOp(op),
p: unsafe.Pointer(rt.UnpackType(vt)),
}
}
func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr {
i := 0
if pv {
i = 1
}
return _Instr {
u: packOp(op) | rt.PackInt(i),
p: unsafe.Pointer(rt.UnpackType(vt)),
}
}
func (self _Instr) op() _Op {
return _Op(self.u >> 56)
}
func (self _Instr) vi() int {
return rt.UnpackInt(self.u)
}
func (self _Instr) vf() uint8 {
return (*rt.GoType)(self.p).KindFlags
}
func (self _Instr) vs() (v string) {
(*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p
(*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi()
return
}
func (self _Instr) vk() reflect.Kind {
return (*rt.GoType)(self.p).Kind()
}
func (self _Instr) vt() reflect.Type {
return (*rt.GoType)(self.p).Pack()
}
func (self _Instr) vp() (vt reflect.Type, pv bool) {
return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1
}
func (self _Instr) i64() int64 {
return int64(self.vi())
}
func (self _Instr) vlen() int {
return int((*rt.GoType)(self.p).Size)
}
func (self _Instr) isBranch() bool {
switch self.op() {
case _OP_goto : fallthrough
case _OP_is_nil : fallthrough
case _OP_is_nil_p1 : fallthrough
case _OP_is_zero_1 : fallthrough
case _OP_is_zero_2 : fallthrough
case _OP_is_zero_4 : fallthrough
case _OP_is_zero_8 : fallthrough
case _OP_map_check_key : fallthrough
case _OP_map_write_key : fallthrough
case _OP_slice_next : fallthrough
case _OP_cond_testc : return true
default : return false
}
}
func (self _Instr) disassemble() string {
switch self.op() {
case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi())))
case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs()))
case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi())
case _OP_recurse : fallthrough
case _OP_map_iter : fallthrough
case _OP_marshal : fallthrough
case _OP_marshal_p : fallthrough
case _OP_marshal_text : fallthrough
case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt())
case _OP_goto : fallthrough
case _OP_is_nil : fallthrough
case _OP_is_nil_p1 : fallthrough
case _OP_is_zero_1 : fallthrough
case _OP_is_zero_2 : fallthrough
case _OP_is_zero_4 : fallthrough
case _OP_is_zero_8 : fallthrough
case _OP_is_zero_map : fallthrough
case _OP_cond_testc : fallthrough
case _OP_map_check_key : fallthrough
case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi())
case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt())
default : return self.op().String()
}
}
type (
_Program []_Instr
)
func (self _Program) pc() int {
return len(self)
}
func (self _Program) tag(n int) {
if n >= _MaxStack {
panic("type nesting too deep")
}
}
func (self _Program) pin(i int) {
v := &self[i]
v.u &= 0xffff000000000000
v.u |= rt.PackInt(self.pc())
}
func (self _Program) rel(v []int) {
for _, i := range v {
self.pin(i)
}
}
func (self *_Program) add(op _Op) {
*self = append(*self, newInsOp(op))
}
func (self *_Program) key(op _Op) {
*self = append(*self,
newInsVi(_OP_byte, '"'),
newInsOp(op),
newInsVi(_OP_byte, '"'),
)
}
func (self *_Program) int(op _Op, vi int) {
*self = append(*self, newInsVi(op, vi))
}
func (self *_Program) str(op _Op, vs string) {
*self = append(*self, newInsVs(op, vs))
}
func (self *_Program) rtt(op _Op, vt reflect.Type) {
*self = append(*self, newInsVt(op, vt))
}
func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) {
*self = append(*self, newInsVp(op, vt, pv))
}
func (self _Program) disassemble() string {
nb := len(self)
tab := make([]bool, nb + 1)
ret := make([]string, 0, nb + 1)
/* prescan to get all the labels */
for _, ins := range self {
if ins.isBranch() {
tab[ins.vi()] = true
}
}
/* disassemble each instruction */
for i, ins := range self {
if !tab[i] {
ret = append(ret, "\t" + ins.disassemble())
} else {
ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble()))
}
}
/* add the last label, if needed */
if tab[nb] {
ret = append(ret, fmt.Sprintf("L_%d:", nb))
}
/* add an "end" indicator, and join all the strings */
return strings.Join(append(ret, "\tend"), "\n")
}
type _Compiler struct {
opts option.CompileOptions
pv bool
tab map[reflect.Type]bool
rec map[reflect.Type]uint8
}
func newCompiler() *_Compiler {
return &_Compiler {
opts: option.DefaultCompileOptions(),
tab: map[reflect.Type]bool{},
rec: map[reflect.Type]uint8{},
}
}
func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler {
self.opts = opts
if self.opts.RecursiveDepth > 0 {
self.rec = map[reflect.Type]uint8{}
}
return self
}
func (self *_Compiler) rescue(ep *error) {
if val := recover(); val != nil {
if err, ok := val.(error); ok {
*ep = err
} else {
panic(val)
}
}
}
func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) {
defer self.rescue(&err)
self.compileOne(&ret, 0, vt, pv)
return
}
func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) {
if self.tab[vt] {
p.vp(_OP_recurse, vt, pv)
} else {
self.compileRec(p, sp, vt, pv)
}
}
func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) {
pr := self.pv
pt := reflect.PtrTo(vt)
/* check for addressable `json.Marshaler` with pointer receiver */
if pv && pt.Implements(jsonMarshalerType) {
p.rtt(_OP_marshal_p, pt)
return
}
/* check for `json.Marshaler` */
if vt.Implements(jsonMarshalerType) {
self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType)
return
}
/* check for addressable `encoding.TextMarshaler` with pointer receiver */
if pv && pt.Implements(encodingTextMarshalerType) {
p.rtt(_OP_marshal_text_p, pt)
return
}
/* check for `encoding.TextMarshaler` */
if vt.Implements(encodingTextMarshalerType) {
self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType)
return
}
/* enter the recursion, and compile the type */
self.pv = pv
self.tab[vt] = true
self.compileOps(p, sp, vt)
/* exit the recursion */
self.pv = pr
delete(self.tab, vt)
}
func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) {
switch vt.Kind() {
case reflect.Bool : p.add(_OP_bool)
case reflect.Int : p.add(_OP_int())
case reflect.Int8 : p.add(_OP_i8)
case reflect.Int16 : p.add(_OP_i16)
case reflect.Int32 : p.add(_OP_i32)
case reflect.Int64 : p.add(_OP_i64)
case reflect.Uint : p.add(_OP_uint())
case reflect.Uint8 : p.add(_OP_u8)
case reflect.Uint16 : p.add(_OP_u16)
case reflect.Uint32 : p.add(_OP_u32)
case reflect.Uint64 : p.add(_OP_u64)
case reflect.Uintptr : p.add(_OP_uintptr())
case reflect.Float32 : p.add(_OP_f32)
case reflect.Float64 : p.add(_OP_f64)
case reflect.String : self.compileString (p, vt)
case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len())
case reflect.Interface : self.compileInterface (p, vt)
case reflect.Map : self.compileMap (p, sp, vt)
case reflect.Ptr : self.compilePtr (p, sp, vt.Elem())
case reflect.Slice : self.compileSlice (p, sp, vt.Elem())
case reflect.Struct : self.compileStruct (p, sp, vt)
default : panic (error_type(vt))
}
}
func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) {
x := p.pc()
p.add(_OP_is_nil)
fn(p, sp, vt)
e := p.pc()
p.add(_OP_goto)
p.pin(x)
p.add(nil_op)
p.pin(e)
}
func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) {
self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody)
}
func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) {
p.tag(sp)
p.add(_OP_save)
p.add(_OP_deref)
self.compileOne(p, sp + 1, vt, true)
p.add(_OP_drop)
}
func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) {
self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody)
}
func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) {
p.tag(sp + 1)
p.int(_OP_byte, '{')
p.add(_OP_save)
p.rtt(_OP_map_iter, vt)
p.add(_OP_save)
i := p.pc()
p.add(_OP_map_check_key)
u := p.pc()
p.add(_OP_map_write_key)
self.compileMapBodyKey(p, vt.Key())
p.pin(u)
p.int(_OP_byte, ':')
p.add(_OP_map_value_next)
self.compileOne(p, sp + 2, vt.Elem(), false)
j := p.pc()
p.add(_OP_map_check_key)
p.int(_OP_byte, ',')
v := p.pc()
p.add(_OP_map_write_key)
self.compileMapBodyKey(p, vt.Key())
p.pin(v)
p.int(_OP_byte, ':')
p.add(_OP_map_value_next)
self.compileOne(p, sp + 2, vt.Elem(), false)
p.int(_OP_goto, j)
p.pin(i)
p.pin(j)
p.add(_OP_map_stop)
p.add(_OP_drop_2)
p.int(_OP_byte, '}')
}
func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) {
if !vk.Implements(encodingTextMarshalerType) {
self.compileMapBodyTextKey(p, vk)
} else {
self.compileMapBodyUtextKey(p, vk)
}
}
func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) {
switch vk.Kind() {
case reflect.Invalid : panic("map key is nil")
case reflect.Bool : p.key(_OP_bool)
case reflect.Int : p.key(_OP_int())
case reflect.Int8 : p.key(_OP_i8)
case reflect.Int16 : p.key(_OP_i16)
case reflect.Int32 : p.key(_OP_i32)
case reflect.Int64 : p.key(_OP_i64)
case reflect.Uint : p.key(_OP_uint())
case reflect.Uint8 : p.key(_OP_u8)
case reflect.Uint16 : p.key(_OP_u16)
case reflect.Uint32 : p.key(_OP_u32)
case reflect.Uint64 : p.key(_OP_u64)
case reflect.Uintptr : p.key(_OP_uintptr())
case reflect.Float32 : p.key(_OP_f32)
case reflect.Float64 : p.key(_OP_f64)
case reflect.String : self.compileString(p, vk)
default : panic(error_type(vk))
}
}
func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) {
if vk.Kind() != reflect.Ptr {
p.rtt(_OP_marshal_text, vk)
} else {
self.compileMapBodyUtextPtr(p, vk)
}
}
func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) {
i := p.pc()
p.add(_OP_is_nil)
p.rtt(_OP_marshal_text, vk)
j := p.pc()
p.add(_OP_goto)
p.pin(i)
p.str(_OP_text, "\"\"")
p.pin(j)
}
func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) {
self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody)
}
func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) {
if isSimpleByte(vt) {
p.add(_OP_bin)
} else {
self.compileSliceArray(p, sp, vt)
}
}
func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) {
p.tag(sp)
p.int(_OP_byte, '[')
p.add(_OP_save)
p.add(_OP_slice_len)
i := p.pc()
p.rtt(_OP_slice_next, vt)
self.compileOne(p, sp + 1, vt, true)
j := p.pc()
p.rtt(_OP_slice_next, vt)
p.int(_OP_byte, ',')
self.compileOne(p, sp + 1, vt, true)
p.int(_OP_goto, j)
p.pin(i)
p.pin(j)
p.add(_OP_drop)
p.int(_OP_byte, ']')
}
func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) {
p.tag(sp)
p.int(_OP_byte, '[')
p.add(_OP_save)
/* first item */
if nb != 0 {
self.compileOne(p, sp + 1, vt, self.pv)
p.add(_OP_load)
}
/* remaining items */
for i := 1; i < nb; i++ {
p.int(_OP_byte, ',')
p.int(_OP_index, i * int(vt.Size()))
self.compileOne(p, sp + 1, vt, self.pv)
p.add(_OP_load)
}
/* end of array */
p.add(_OP_drop)
p.int(_OP_byte, ']')
}
func (self *_Compiler) compileString(p *_Program, vt reflect.Type) {
if vt != jsonNumberType {
p.add(_OP_str)
} else {
p.add(_OP_number)
}
}
func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) {
if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) {
p.vp(_OP_recurse, vt, self.pv)
if self.opts.RecursiveDepth > 0 {
if self.pv {
self.rec[vt] = 1
} else {
self.rec[vt] = 0
}
}
} else {
self.compileStructBody(p, sp, vt)
}
}
func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) {
p.tag(sp)
p.int(_OP_byte, '{')
p.add(_OP_save)
p.add(_OP_cond_set)
/* compile each field */
for _, fv := range resolver.ResolveStruct(vt) {
var s []int
var o resolver.Offset
/* "omitempty" for arrays */
if fv.Type.Kind() == reflect.Array {
if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 {
continue
}
}
/* index to the field */
for _, o = range fv.Path {
if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref {
s = append(s, p.pc())
p.add(_OP_is_nil)
p.add(_OP_deref)
}
}
/* check for "omitempty" option */
if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 {
s = append(s, p.pc())
self.compileStructFieldZero(p, fv.Type)
}
/* add the comma if not the first element */
i := p.pc()
p.add(_OP_cond_testc)
p.int(_OP_byte, ',')
p.pin(i)
/* compile the key and value */
ft := fv.Type
p.str(_OP_text, Quote(fv.Name) + ":")
/* check for "stringnize" option */
if (fv.Opts & resolver.F_stringize) == 0 {
self.compileOne(p, sp + 1, ft, self.pv)
} else {
self.compileStructFieldStr(p, sp + 1, ft)
}
/* patch the skipping jumps and reload the struct pointer */
p.rel(s)
p.add(_OP_load)
}
/* end of object */
p.add(_OP_drop)
p.int(_OP_byte, '}')
}
func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) {
pc := -1
ft := vt
sv := false
/* dereference the pointer if needed */
if ft.Kind() == reflect.Ptr {
ft = ft.Elem()
}
/* check if it can be stringized */
switch ft.Kind() {
case reflect.Bool : sv = true
case reflect.Int : sv = true
case reflect.Int8 : sv = true
case reflect.Int16 : sv = true
case reflect.Int32 : sv = true
case reflect.Int64 : sv = true
case reflect.Uint : sv = true
case reflect.Uint8 : sv = true
case reflect.Uint16 : sv = true
case reflect.Uint32 : sv = true
case reflect.Uint64 : sv = true
case reflect.Uintptr : sv = true
case reflect.Float32 : sv = true
case reflect.Float64 : sv = true
case reflect.String : sv = true
}
/* if it's not, ignore the "string" and follow the regular path */
if !sv {
self.compileOne(p, sp, vt, self.pv)
return
}
/* dereference the pointer */
if vt.Kind() == reflect.Ptr {
pc = p.pc()
vt = vt.Elem()
p.add(_OP_is_nil)
p.add(_OP_deref)
}
/* special case of a double-quoted string */
if ft != jsonNumberType && ft.Kind() == reflect.String {
p.add(_OP_quote)
} else {
self.compileStructFieldQuoted(p, sp, vt)
}
/* the "null" case of the pointer */
if pc != -1 {
e := p.pc()
p.add(_OP_goto)
p.pin(pc)
p.add(_OP_null)
p.pin(e)
}
}
func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) {
switch vt.Kind() {
case reflect.Bool : p.add(_OP_is_zero_1)
case reflect.Int : p.add(_OP_is_zero_ints())
case reflect.Int8 : p.add(_OP_is_zero_1)
case reflect.Int16 : p.add(_OP_is_zero_2)
case reflect.Int32 : p.add(_OP_is_zero_4)
case reflect.Int64 : p.add(_OP_is_zero_8)
case reflect.Uint : p.add(_OP_is_zero_ints())
case reflect.Uint8 : p.add(_OP_is_zero_1)
case reflect.Uint16 : p.add(_OP_is_zero_2)
case reflect.Uint32 : p.add(_OP_is_zero_4)
case reflect.Uint64 : p.add(_OP_is_zero_8)
case reflect.Uintptr : p.add(_OP_is_nil)
case reflect.Float32 : p.add(_OP_is_zero_4)
case reflect.Float64 : p.add(_OP_is_zero_8)
case reflect.String : p.add(_OP_is_nil_p1)
case reflect.Interface : p.add(_OP_is_nil_p1)
case reflect.Map : p.add(_OP_is_zero_map)
case reflect.Ptr : p.add(_OP_is_nil)
case reflect.Slice : p.add(_OP_is_nil_p1)
default : panic(error_type(vt))
}
}
func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) {
p.int(_OP_byte, '"')
self.compileOne(p, sp, vt, self.pv)
p.int(_OP_byte, '"')
}
func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) {
x := p.pc()
p.add(_OP_is_nil_p1)
/* iface and efaces are different */
if vt.NumMethod() == 0 {
p.add(_OP_eface)
} else {
p.add(_OP_iface)
}
/* the "null" value */
e := p.pc()
p.add(_OP_goto)
p.pin(x)
p.add(_OP_null)
p.pin(e)
}
func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) {
pc := p.pc()
vk := vt.Kind()
/* direct receiver */
if vk != reflect.Ptr {
p.rtt(op, vt)
return
}
/* value receiver with a pointer type, check for nil before calling the marshaler */
p.add(_OP_is_nil)
p.rtt(op, vt)
i := p.pc()
p.add(_OP_goto)
p.pin(pc)
p.add(_OP_null)
p.pin(i)
}

View File

@ -0,0 +1,66 @@
// +build go1.15,!go1.17
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`os`
`strings`
`runtime`
`runtime/debug`
`github.com/bytedance/sonic/internal/jit`
)
var (
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
)
var (
_Instr_End _Instr = newInsOp(_OP_null)
_F_gc = jit.Func(runtime.GC)
_F_force_gc = jit.Func(debug.FreeOSMemory)
_F_println = jit.Func(println_wrapper)
)
func println_wrapper(i int, op1 int, op2 int){
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
}
func (self *_Assembler) force_gc() {
self.call_go(_F_gc)
self.call_go(_F_force_gc)
}
func (self *_Assembler) debug_instr(i int, v *_Instr) {
if debugSyncGC {
if (i+1 == len(self.p)) {
self.print_gc(i, v, &_Instr_End)
} else {
next := &(self.p[i+1])
self.print_gc(i, v, next)
name := _OpNames[next.op()]
if strings.Contains(name, "save") {
return
}
}
self.force_gc()
}
}

View File

@ -0,0 +1,205 @@
// +build go1.17,!go1.21
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`fmt`
`os`
`runtime`
`strings`
`unsafe`
`github.com/bytedance/sonic/internal/jit`
`github.com/twitchyliquid64/golang-asm/obj`
)
const _FP_debug = 128
var (
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
debugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != ""
)
var (
_Instr_End = newInsOp(_OP_is_nil)
_F_gc = jit.Func(gc)
_F_println = jit.Func(println_wrapper)
_F_print = jit.Func(print)
)
func (self *_Assembler) dsave(r ...obj.Addr) {
for i, v := range r {
if i > _FP_debug / 8 - 1 {
panic("too many registers to save")
} else {
self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8))
}
}
}
func (self *_Assembler) dload(r ...obj.Addr) {
for i, v := range r {
if i > _FP_debug / 8 - 1 {
panic("too many registers to load")
} else {
self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8), v)
}
}
}
func println_wrapper(i int, op1 int, op2 int){
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
}
func print(i int){
println(i)
}
func gc() {
if !debugSyncGC {
return
}
runtime.GC()
// debug.FreeOSMemory()
}
func (self *_Assembler) dcall(fn obj.Addr) {
self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10
self.Rjmp("CALL", _R10) // CALL R10
}
func (self *_Assembler) debug_gc() {
if !debugSyncGC {
return
}
self.dsave(_REG_debug...)
self.dcall(_F_gc)
self.dload(_REG_debug...)
}
func (self *_Assembler) debug_instr(i int, v *_Instr) {
if debugSyncGC {
if i+1 == len(self.p) {
self.print_gc(i, v, &_Instr_End)
} else {
next := &(self.p[i+1])
self.print_gc(i, v, next)
name := _OpNames[next.op()]
if strings.Contains(name, "save") {
return
}
}
// self.debug_gc()
}
}
//go:noescape
//go:linkname checkptrBase runtime.checkptrBase
func checkptrBase(p unsafe.Pointer) uintptr
//go:noescape
//go:linkname findObject runtime.findObject
func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr)
var (
_F_checkptr = jit.Func(checkptr)
_F_printptr = jit.Func(printptr)
)
var (
_R10 = jit.Reg("R10")
)
var _REG_debug = []obj.Addr {
jit.Reg("AX"),
jit.Reg("BX"),
jit.Reg("CX"),
jit.Reg("DX"),
jit.Reg("DI"),
jit.Reg("SI"),
jit.Reg("BP"),
jit.Reg("SP"),
jit.Reg("R8"),
jit.Reg("R9"),
jit.Reg("R10"),
jit.Reg("R11"),
jit.Reg("R12"),
jit.Reg("R13"),
jit.Reg("R14"),
jit.Reg("R15"),
}
func checkptr(ptr uintptr) {
if ptr == 0 {
return
}
fmt.Printf("pointer: %x\n", ptr)
f := checkptrBase(unsafe.Pointer(uintptr(ptr)))
if f == 0 {
fmt.Printf("! unknown-based pointer: %x\n", ptr)
} else if f == 1 {
fmt.Printf("! stack pointer: %x\n", ptr)
} else {
fmt.Printf("base: %x\n", f)
}
findobj(ptr)
}
func findobj(ptr uintptr) {
base, s, objIndex := findObject(ptr, 0, 0)
if s != nil && base == 0 {
fmt.Printf("! invalid pointer: %x\n", ptr)
}
fmt.Printf("objIndex: %d\n", objIndex)
}
func (self *_Assembler) check_ptr(ptr obj.Addr, lea bool) {
if !debugCheckPtr {
return
}
self.dsave(_REG_debug...)
if lea {
self.Emit("LEAQ", ptr, _R10)
} else {
self.Emit("MOVQ", ptr, _R10)
}
self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0))
self.dcall(_F_checkptr)
self.dload(_REG_debug...)
}
func printptr(i int, ptr uintptr) {
fmt.Printf("[%d] ptr: %x\n", i, ptr)
}
func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) {
self.dsave(_REG_debug...)
if lea {
self.Emit("LEAQ", ptr, _R10)
} else {
self.Emit("MOVQ", ptr, _R10)
}
self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
self.Emit("MOVQ", _R10, _BX)
self.dcall(_F_printptr)
self.dload(_REG_debug...)
}

View File

@ -0,0 +1,328 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`bytes`
`encoding/json`
`reflect`
`runtime`
`unsafe`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/utf8`
`github.com/bytedance/sonic/option`
)
// Options is a set of encoding options.
type Options uint64
const (
bitSortMapKeys = iota
bitEscapeHTML
bitCompactMarshaler
bitNoQuoteTextMarshaler
bitNoNullSliceOrMap
bitValidateString
// used for recursive compile
bitPointerValue = 63
)
const (
// SortMapKeys indicates that the keys of a map needs to be sorted
// before serializing into JSON.
// WARNING: This hurts performance A LOT, USE WITH CARE.
SortMapKeys Options = 1 << bitSortMapKeys
// EscapeHTML indicates encoder to escape all HTML characters
// after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
// WARNING: This hurts performance A LOT, USE WITH CARE.
EscapeHTML Options = 1 << bitEscapeHTML
// CompactMarshaler indicates that the output JSON from json.Marshaler
// is always compact and needs no validation
CompactMarshaler Options = 1 << bitCompactMarshaler
// NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
// is always escaped string and needs no quoting
NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
// NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
// instead of 'null'
NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap
// ValidateString indicates that encoder should validate the input string
// before encoding it into JSON.
ValidateString Options = 1 << bitValidateString
// CompatibleWithStd is used to be compatible with std encoder.
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
)
// Encoder represents a specific set of encoder configurations.
type Encoder struct {
Opts Options
prefix string
indent string
}
// Encode returns the JSON encoding of v.
func (self *Encoder) Encode(v interface{}) ([]byte, error) {
if self.indent != "" || self.prefix != "" {
return EncodeIndented(v, self.prefix, self.indent, self.Opts)
}
return Encode(v, self.Opts)
}
// SortKeys enables the SortMapKeys option.
func (self *Encoder) SortKeys() *Encoder {
self.Opts |= SortMapKeys
return self
}
// SetEscapeHTML specifies if option EscapeHTML opens
func (self *Encoder) SetEscapeHTML(f bool) {
if f {
self.Opts |= EscapeHTML
} else {
self.Opts &= ^EscapeHTML
}
}
// SetValidateString specifies if option ValidateString opens
func (self *Encoder) SetValidateString(f bool) {
if f {
self.Opts |= ValidateString
} else {
self.Opts &= ^ValidateString
}
}
// SetCompactMarshaler specifies if option CompactMarshaler opens
func (self *Encoder) SetCompactMarshaler(f bool) {
if f {
self.Opts |= CompactMarshaler
} else {
self.Opts &= ^CompactMarshaler
}
}
// SetNoQuoteTextMarshaler specifies if option NoQuoteTextMarshaler opens
func (self *Encoder) SetNoQuoteTextMarshaler(f bool) {
if f {
self.Opts |= NoQuoteTextMarshaler
} else {
self.Opts &= ^NoQuoteTextMarshaler
}
}
// SetIndent instructs the encoder to format each subsequent encoded
// value as if indented by the package-level function EncodeIndent().
// Calling SetIndent("", "") disables indentation.
func (enc *Encoder) SetIndent(prefix, indent string) {
enc.prefix = prefix
enc.indent = indent
}
// Quote returns the JSON-quoted version of s.
func Quote(s string) string {
var n int
var p []byte
/* check for empty string */
if s == "" {
return `""`
}
/* allocate space for result */
n = len(s) + 2
p = make([]byte, 0, n)
/* call the encoder */
_ = encodeString(&p, s)
return rt.Mem2Str(p)
}
// Encode returns the JSON encoding of val, encoded with opts.
func Encode(val interface{}, opts Options) ([]byte, error) {
var ret []byte
buf := newBytes()
err := encodeInto(&buf, val, opts)
/* check for errors */
if err != nil {
freeBytes(buf)
return nil, err
}
/* htmlescape or correct UTF-8 if opts enable */
old := buf
buf = encodeFinish(old, opts)
pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr
pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr
/* return when allocated a new buffer */
if pbuf != pold {
freeBytes(old)
return buf, nil
}
/* make a copy of the result */
ret = make([]byte, len(buf))
copy(ret, buf)
freeBytes(buf)
/* return the buffer into pool */
return ret, nil
}
// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
// a new one.
func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
err := encodeInto(buf, val, opts)
if err != nil {
return err
}
*buf = encodeFinish(*buf, opts)
return err
}
func encodeInto(buf *[]byte, val interface{}, opts Options) error {
stk := newStack()
efv := rt.UnpackEface(val)
err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts))
/* return the stack into pool */
if err != nil {
resetStack(stk)
}
freeStack(stk)
/* avoid GC ahead */
runtime.KeepAlive(buf)
runtime.KeepAlive(efv)
return err
}
func encodeFinish(buf []byte, opts Options) []byte {
if opts & EscapeHTML != 0 {
buf = HTMLEscape(nil, buf)
}
if opts & ValidateString != 0 && !utf8.Validate(buf) {
buf = utf8.CorrectWith(nil, buf, `\ufffd`)
}
return buf
}
var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
// so that the JSON will be safe to embed inside HTML <script> tags.
// For historical reasons, web browsers don't honor standard HTML
// escaping within <script> tags, so an alternative JSON encoding must
// be used.
func HTMLEscape(dst []byte, src []byte) []byte {
return htmlEscape(dst, src)
}
// EncodeIndented is like Encode but applies Indent to format the output.
// Each JSON element in the output will begin on a new line beginning with prefix
// followed by one or more copies of indent according to the indentation nesting.
func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
var err error
var out []byte
var buf *bytes.Buffer
/* encode into the buffer */
out = newBytes()
err = EncodeInto(&out, val, opts)
/* check for errors */
if err != nil {
freeBytes(out)
return nil, err
}
/* indent the JSON */
buf = newBuffer()
err = json.Indent(buf, out, prefix, indent)
/* check for errors */
if err != nil {
freeBytes(out)
freeBuffer(buf)
return nil, err
}
/* copy to the result buffer */
ret := make([]byte, buf.Len())
copy(ret, buf.Bytes())
/* return the buffers into pool */
freeBytes(out)
freeBuffer(buf)
return ret, nil
}
// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
// order to reduce the first-hit latency.
//
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
// a compile option to set the depth of recursive compile for the nested struct type.
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
cfg := option.DefaultCompileOptions()
for _, opt := range opts {
opt(&cfg)
break
}
return pretouchRec(map[reflect.Type]uint8{vt: 0}, cfg)
}
// Valid validates json and returns first non-blank character position,
// if it is only one valid json value.
// Otherwise returns invalid character position using start.
//
// Note: it does not check for the invalid UTF-8 characters.
func Valid(data []byte) (ok bool, start int) {
n := len(data)
if n == 0 {
return false, -1
}
s := rt.Mem2Str(data)
p := 0
m := types.NewStateMachine()
ret := native.ValidateOne(&s, &p, m)
types.FreeStateMachine(m)
if ret < 0 {
return false, p-1
}
/* check for trailing spaces */
for ;p < n; p++ {
if (types.SPACE_MASK & (1 << data[p])) == 0 {
return false, p
}
}
return true, ret
}

View File

@ -0,0 +1,65 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding/json`
`fmt`
`reflect`
`strconv`
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
var _ERR_too_deep = &json.UnsupportedValueError {
Str : "Value nesting too deep",
Value : reflect.ValueOf("..."),
}
var _ERR_nan_or_infinite = &json.UnsupportedValueError {
Str : "NaN or ±Infinite",
Value : reflect.ValueOf("NaN or ±Infinite"),
}
func error_type(vtype reflect.Type) error {
return &json.UnsupportedTypeError{Type: vtype}
}
func error_number(number json.Number) error {
return &json.UnsupportedValueError {
Str : "invalid number literal: " + strconv.Quote(string(number)),
Value : reflect.ValueOf(number),
}
}
func error_marshaler(ret []byte, pos int) error {
return fmt.Errorf("invalid Marshaler output json syntax at %d: %q", pos, ret)
}
const (
panicNilPointerOfNonEmptyString int = 1 + iota
)
func goPanic(code int, val unsafe.Pointer) {
switch(code){
case panicNilPointerOfNonEmptyString:
panic(fmt.Sprintf("val: %#v has nil pointer while its length is not zero!", (*rt.GoString)(val)))
default:
panic("encoder error!")
}
}

View File

@ -0,0 +1,199 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
"encoding"
"reflect"
"sync"
"unsafe"
"github.com/bytedance/sonic/internal/native"
"github.com/bytedance/sonic/internal/rt"
)
type _MapPair struct {
k string // when the map key is integer, k is pointed to m
v unsafe.Pointer
m [32]byte
}
type _MapIterator struct {
it rt.GoMapIterator // must be the first field
kv rt.GoSlice // slice of _MapPair
ki int
}
var (
iteratorPool = sync.Pool{}
iteratorPair = rt.UnpackType(reflect.TypeOf(_MapPair{}))
)
func init() {
if unsafe.Offsetof(_MapIterator{}.it) != 0 {
panic("_MapIterator.it is not the first field")
}
}
func newIterator() *_MapIterator {
if v := iteratorPool.Get(); v == nil {
return new(_MapIterator)
} else {
return resetIterator(v.(*_MapIterator))
}
}
func resetIterator(p *_MapIterator) *_MapIterator {
p.ki = 0
p.it = rt.GoMapIterator{}
p.kv.Len = 0
return p
}
func (self *_MapIterator) at(i int) *_MapPair {
return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{})))
}
func (self *_MapIterator) add() (p *_MapPair) {
p = self.at(self.kv.Len)
self.kv.Len++
return
}
func (self *_MapIterator) data() (p []_MapPair) {
*(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv
return
}
func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) {
p := self.add()
p.v = v
/* check for strings */
if tk := t.Kind(); tk != reflect.String {
return self.appendGeneric(p, t, tk, k)
}
/* fast path for strings */
p.k = *(*string)(k)
return nil
}
func (self *_MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error {
switch v {
case reflect.Int : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int)(k)))]) ; return nil
case reflect.Int8 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int8)(k)))]) ; return nil
case reflect.Int16 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int16)(k)))]) ; return nil
case reflect.Int32 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int32)(k)))]) ; return nil
case reflect.Int64 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], *(*int64)(k))]) ; return nil
case reflect.Uint : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint)(k)))]) ; return nil
case reflect.Uint8 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint8)(k)))]) ; return nil
case reflect.Uint16 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint16)(k)))]) ; return nil
case reflect.Uint32 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint32)(k)))]) ; return nil
case reflect.Uint64 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], *(*uint64)(k))]) ; return nil
case reflect.Uintptr : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uintptr)(k)))]) ; return nil
case reflect.Interface : return self.appendInterface(p, t, k)
case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k)
default : panic("unexpected map key type")
}
}
func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
// compiler has already checked that the type implements the encoding.MarshalText interface
if !t.Indirect() {
k = *(*unsafe.Pointer)(k)
}
eface := rt.GoEface{Value: k, Type: t}.Pack()
out, err := eface.(encoding.TextMarshaler).MarshalText()
if err != nil {
return err
}
p.k = rt.Mem2Str(out)
return
}
func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
if len(rt.IfaceType(t).Methods) == 0 {
panic("unexpected map key type")
} else if p.k, err = asText(k); err == nil {
return nil
} else {
return
}
}
func iteratorStop(p *_MapIterator) {
iteratorPool.Put(p)
}
func iteratorNext(p *_MapIterator) {
i := p.ki
t := &p.it
/* check for unordered iteration */
if i < 0 {
mapiternext(t)
return
}
/* check for end of iteration */
if p.ki >= p.kv.Len {
t.K = nil
t.V = nil
return
}
/* update the key-value pair, and increase the pointer */
t.K = unsafe.Pointer(&p.at(p.ki).k)
t.V = p.at(p.ki).v
p.ki++
}
func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, error) {
it := newIterator()
mapiterinit(t, m, &it.it)
/* check for key-sorting, empty map don't need sorting */
if m.Count == 0 || (fv & uint64(SortMapKeys)) == 0 {
it.ki = -1
return it, nil
}
/* pre-allocate space if needed */
if m.Count > it.kv.Cap {
it.kv = growslice(iteratorPair, it.kv, m.Count)
}
/* dump all the key-value pairs */
for ; it.it.K != nil; mapiternext(&it.it) {
if err := it.append(t.Key, it.it.K, it.it.V); err != nil {
iteratorStop(it)
return nil, err
}
}
/* sort the keys, map with only 1 item don't need sorting */
if it.ki = 1; m.Count > 1 {
radixQsort(it.data(), 0, maxDepth(it.kv.Len))
}
/* load the first pair into iterator */
it.it.V = it.at(0).v
it.it.K = unsafe.Pointer(&it.at(0).k)
return it, nil
}

View File

@ -0,0 +1,193 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`bytes`
`sync`
`unsafe`
`errors`
`reflect`
`github.com/bytedance/sonic/internal/caching`
`github.com/bytedance/sonic/option`
`github.com/bytedance/sonic/internal/rt`
)
const (
_MaxStack = 4096 // 4k states
_StackSize = unsafe.Sizeof(_Stack{})
)
var (
bytesPool = sync.Pool{}
stackPool = sync.Pool{}
bufferPool = sync.Pool{}
programCache = caching.CreateProgramCache()
)
type _State struct {
x int
f uint64
p unsafe.Pointer
q unsafe.Pointer
}
type _Stack struct {
sp uint64
sb [_MaxStack]_State
}
type _Encoder func(
rb *[]byte,
vp unsafe.Pointer,
sb *_Stack,
fv uint64,
) error
var _KeepAlive struct {
rb *[]byte
vp unsafe.Pointer
sb *_Stack
fv uint64
err error
frame [_FP_offs]byte
}
var errCallShadow = errors.New("DON'T CALL THIS!")
// Faker func of _Encoder, used to export its stackmap as _Encoder's
func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *_Stack, fv uint64) (err error) {
// align to assembler_amd64.go: _FP_offs
var frame [_FP_offs]byte
// must keep all args and frames noticeable to GC
_KeepAlive.rb = rb
_KeepAlive.vp = vp
_KeepAlive.sb = sb
_KeepAlive.fv = fv
_KeepAlive.err = err
_KeepAlive.frame = frame
return errCallShadow
}
func newBytes() []byte {
if ret := bytesPool.Get(); ret != nil {
return ret.([]byte)
} else {
return make([]byte, 0, option.DefaultEncoderBufferSize)
}
}
func newStack() *_Stack {
if ret := stackPool.Get(); ret == nil {
return new(_Stack)
} else {
return ret.(*_Stack)
}
}
func resetStack(p *_Stack) {
memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
}
func newBuffer() *bytes.Buffer {
if ret := bufferPool.Get(); ret != nil {
return ret.(*bytes.Buffer)
} else {
return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize))
}
}
func freeBytes(p []byte) {
p = p[:0]
bytesPool.Put(p)
}
func freeStack(p *_Stack) {
p.sp = 0
stackPool.Put(p)
}
func freeBuffer(p *bytes.Buffer) {
p.Reset()
bufferPool.Put(p)
}
func makeEncoder(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
if pp, err := newCompiler().compile(vt.Pack(), ex[0].(bool)); err != nil {
return nil, err
} else {
as := newAssembler(pp)
as.name = vt.String()
return as.Load(), nil
}
}
func findOrCompile(vt *rt.GoType, pv bool) (_Encoder, error) {
if val := programCache.Get(vt); val != nil {
return val.(_Encoder), nil
} else if ret, err := programCache.Compute(vt, makeEncoder, pv); err == nil {
return ret.(_Encoder), nil
} else {
return nil, err
}
}
func pretouchType(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
/* compile function */
compiler := newCompiler().apply(opts)
encoder := func(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
if pp, err := compiler.compile(_vt, ex[0].(bool)); err != nil {
return nil, err
} else {
as := newAssembler(pp)
as.name = vt.String()
return as.Load(), nil
}
}
/* find or compile */
vt := rt.UnpackType(_vt)
if val := programCache.Get(vt); val != nil {
return nil, nil
} else if _, err := programCache.Compute(vt, encoder, v == 1); err == nil {
return compiler.rec, nil
} else {
return nil, err
}
}
func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
return nil
}
next := make(map[reflect.Type]uint8)
for vt, v := range vtm {
sub, err := pretouchType(vt, opts, v)
if err != nil {
return err
}
for svt, v := range sub {
next[svt] = v
}
}
opts.RecursiveDepth -= 1
return pretouchRec(next, opts)
}

View File

@ -0,0 +1,168 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding`
`encoding/json`
`unsafe`
`github.com/bytedance/sonic/internal/jit`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/rt`
)
/** Encoder Primitives **/
func encodeNil(rb *[]byte) error {
*rb = append(*rb, 'n', 'u', 'l', 'l')
return nil
}
func encodeString(buf *[]byte, val string) error {
var sidx int
var pbuf *rt.GoSlice
var pstr *rt.GoString
/* opening quote */
*buf = append(*buf, '"')
pbuf = (*rt.GoSlice)(unsafe.Pointer(buf))
pstr = (*rt.GoString)(unsafe.Pointer(&val))
/* encode with native library */
for sidx < pstr.Len {
sn := pstr.Len - sidx
dn := pbuf.Cap - pbuf.Len
sp := padd(pstr.Ptr, sidx)
dp := padd(pbuf.Ptr, pbuf.Len)
nb := native.Quote(sp, sn, dp, &dn, 0)
/* check for errors */
if pbuf.Len += dn; nb >= 0 {
break
}
/* not enough space, grow the slice and try again */
sidx += ^nb
*pbuf = growslice(rt.UnpackType(byteType), *pbuf, pbuf.Cap * 2)
}
/* closing quote */
*buf = append(*buf, '"')
return nil
}
func encodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *_Stack, fv uint64) error {
if vt == nil {
return encodeNil(buf)
} else if fn, err := findOrCompile(vt, (fv&(1<<bitPointerValue)) != 0); err != nil {
return err
} else if vt.Indirect() {
rt.MoreStack(_FP_size + native.MaxFrameSize)
rt.StopProf()
err := fn(buf, *vp, sb, fv)
rt.StartProf()
return err
} else {
rt.MoreStack(_FP_size + native.MaxFrameSize)
rt.StopProf()
err := fn(buf, unsafe.Pointer(vp), sb, fv)
rt.StartProf()
return err
}
}
func encodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt Options) error {
if ret, err := val.MarshalJSON(); err != nil {
return err
} else {
if opt & CompactMarshaler != 0 {
return compact(buf, ret)
}
if ok, s := Valid(ret); !ok {
return error_marshaler(ret, s)
}
*buf = append(*buf, ret...)
return nil
}
}
func encodeTextMarshaler(buf *[]byte, val encoding.TextMarshaler, opt Options) error {
if ret, err := val.MarshalText(); err != nil {
return err
} else {
if opt & NoQuoteTextMarshaler != 0 {
*buf = append(*buf, ret...)
return nil
}
return encodeString(buf, rt.Mem2Str(ret) )
}
}
func htmlEscape(dst []byte, src []byte) []byte {
var sidx int
dst = append(dst, src[:0]...) // avoid check nil dst
sbuf := (*rt.GoSlice)(unsafe.Pointer(&src))
dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst))
/* grow dst if it is shorter */
if cap(dst) - len(dst) < len(src) + native.BufPaddingSize {
cap := len(src) * 3 / 2 + native.BufPaddingSize
*dbuf = growslice(typeByte, *dbuf, cap)
}
for sidx < sbuf.Len {
sp := padd(sbuf.Ptr, sidx)
dp := padd(dbuf.Ptr, dbuf.Len)
sn := sbuf.Len - sidx
dn := dbuf.Cap - dbuf.Len
nb := native.HTMLEscape(sp, sn, dp, &dn)
/* check for errors */
if dbuf.Len += dn; nb >= 0 {
break
}
/* not enough space, grow the slice and try again */
sidx += ^nb
*dbuf = growslice(typeByte, *dbuf, dbuf.Cap * 2)
}
return dst
}
var (
argPtrs = []bool { true, true, true, false }
localPtrs = []bool{}
)
var (
_F_assertI2I = jit.Func(assertI2I)
)
func asText(v unsafe.Pointer) (string, error) {
text := assertI2I(_T_encoding_TextMarshaler, *(*rt.GoIface)(v))
r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText()
return rt.Mem2Str(r), e
}
func asJson(v unsafe.Pointer) (string, error) {
text := assertI2I(_T_json_Marshaler, *(*rt.GoIface)(v))
r, e := (*(*json.Marshaler)(unsafe.Pointer(&text))).MarshalJSON()
return rt.Mem2Str(r), e
}

View File

@ -0,0 +1,206 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
// Algorithm 3-way Radix Quicksort, d means the radix.
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
func radixQsort(kvs []_MapPair, d, maxDepth int) {
for len(kvs) > 11 {
// To avoid the worst case of quickSort (time: O(n^2)), use introsort here.
// Reference: https://en.wikipedia.org/wiki/Introsort and
// https://github.com/golang/go/issues/467
if maxDepth == 0 {
heapSort(kvs, 0, len(kvs))
return
}
maxDepth--
p := pivot(kvs, d)
lt, i, gt := 0, 0, len(kvs)
for i < gt {
c := byteAt(kvs[i].k, d)
if c < p {
swap(kvs, lt, i)
i++
lt++
} else if c > p {
gt--
swap(kvs, i, gt)
} else {
i++
}
}
// kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)]
// Native implemention:
// radixQsort(kvs[:lt], d, maxDepth)
// if p > -1 {
// radixQsort(kvs[lt:gt], d+1, maxDepth)
// }
// radixQsort(kvs[gt:], d, maxDepth)
// Optimize as follows: make recursive calls only for the smaller parts.
// Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/
if p == -1 {
if lt > len(kvs) - gt {
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else {
radixQsort(kvs[:lt], d, maxDepth)
kvs = kvs[gt:]
}
} else {
ml := maxThree(lt, gt-lt, len(kvs)-gt)
if ml == lt {
radixQsort(kvs[lt:gt], d+1, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else if ml == gt-lt {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[lt:gt]
d += 1
} else {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[lt:gt], d+1, maxDepth)
kvs = kvs[gt:]
}
}
}
insertRadixSort(kvs, d)
}
func insertRadixSort(kvs []_MapPair, d int) {
for i := 1; i < len(kvs); i++ {
for j := i; j > 0 && lessFrom(kvs[j].k, kvs[j-1].k, d); j-- {
swap(kvs, j, j-1)
}
}
}
func pivot(kvs []_MapPair, d int) int {
m := len(kvs) >> 1
if len(kvs) > 40 {
// Tukey's ``Ninther,'' median of three mediankvs of three.
t := len(kvs) / 8
return medianThree(
medianThree(byteAt(kvs[0].k, d), byteAt(kvs[t].k, d), byteAt(kvs[2*t].k, d)),
medianThree(byteAt(kvs[m].k, d), byteAt(kvs[m-t].k, d), byteAt(kvs[m+t].k, d)),
medianThree(byteAt(kvs[len(kvs)-1].k, d),
byteAt(kvs[len(kvs)-1-t].k, d),
byteAt(kvs[len(kvs)-1-2*t].k, d)))
}
return medianThree(byteAt(kvs[0].k, d), byteAt(kvs[m].k, d), byteAt(kvs[len(kvs)-1].k, d))
}
func medianThree(i, j, k int) int {
if i > j {
i, j = j, i
} // i < j
if k < i {
return i
}
if k > j {
return j
}
return k
}
func maxThree(i, j, k int) int {
max := i
if max < j {
max = j
}
if max < k {
max = k
}
return max
}
// maxDepth returns a threshold at which quicksort should switch
// to heapsort. It returnkvs 2*ceil(lg(n+1)).
func maxDepth(n int) int {
var depth int
for i := n; i > 0; i >>= 1 {
depth++
}
return depth * 2
}
// siftDown implements the heap property on kvs[lo:hi].
// first is an offset into the array where the root of the heap lies.
func siftDown(kvs []_MapPair, lo, hi, first int) {
root := lo
for {
child := 2*root + 1
if child >= hi {
break
}
if child+1 < hi && kvs[first+child].k < kvs[first+child+1].k {
child++
}
if kvs[first+root].k >= kvs[first+child].k {
return
}
swap(kvs, first+root, first+child)
root = child
}
}
func heapSort(kvs []_MapPair, a, b int) {
first := a
lo := 0
hi := b - a
// Build heap with the greatest element at top.
for i := (hi - 1) / 2; i >= 0; i-- {
siftDown(kvs, i, hi, first)
}
// Pop elements, the largest first, into end of kvs.
for i := hi - 1; i >= 0; i-- {
swap(kvs, first, first+i)
siftDown(kvs, lo, i, first)
}
}
// Note that _MapPair.k is NOT pointed to _MapPair.m when map key is integer after swap
func swap(kvs []_MapPair, a, b int) {
kvs[a].k, kvs[b].k = kvs[b].k, kvs[a].k
kvs[a].v, kvs[b].v = kvs[b].v, kvs[a].v
}
// Compare two strings from the pos d.
func lessFrom(a, b string, d int) bool {
l := len(a)
if l > len(b) {
l = len(b)
}
for i := d; i < l; i++ {
if a[i] == b[i] {
continue
}
return a[i] < b[i]
}
return len(a) < len(b)
}
func byteAt(b string, p int) int {
if p < len(b) {
return int(b[p])
}
return -1
}

View File

@ -0,0 +1,84 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding/json`
`io`
)
// StreamEncoder uses io.Writer as input.
type StreamEncoder struct {
w io.Writer
Encoder
}
// NewStreamEncoder adapts to encoding/json.NewDecoder API.
//
// NewStreamEncoder returns a new encoder that write to w.
func NewStreamEncoder(w io.Writer) *StreamEncoder {
return &StreamEncoder{w: w}
}
// Encode encodes interface{} as JSON to io.Writer
func (enc *StreamEncoder) Encode(val interface{}) (err error) {
out := newBytes()
/* encode into the buffer */
err = EncodeInto(&out, val, enc.Opts)
if err != nil {
goto free_bytes
}
if enc.indent != "" || enc.prefix != "" {
/* indent the JSON */
buf := newBuffer()
err = json.Indent(buf, out, enc.prefix, enc.indent)
if err != nil {
freeBuffer(buf)
goto free_bytes
}
// according to standard library, terminate each value with a newline...
buf.WriteByte('\n')
/* copy into io.Writer */
_, err = io.Copy(enc.w, buf)
if err != nil {
freeBuffer(buf)
goto free_bytes
}
} else {
/* copy into io.Writer */
var n int
for len(out) > 0 {
n, err = enc.w.Write(out)
out = out[n:]
if err != nil {
goto free_bytes
}
}
// according to standard library, terminate each value with a newline...
enc.w.Write([]byte{'\n'})
}
free_bytes:
freeBytes(out)
return err
}

View File

@ -0,0 +1,65 @@
// +build go1.15,!go1.17
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice runtime.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@ -0,0 +1,66 @@
// +build go1.17,!go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice runtime.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I2
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
//go:linkname _runtime_writeBarrier runtime.writeBarrier
var _runtime_writeBarrier uintptr
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@ -0,0 +1,66 @@
// +build go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice reflect.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I2
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
//go:linkname _runtime_writeBarrier runtime.writeBarrier
var _runtime_writeBarrier uintptr
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@ -0,0 +1,47 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding`
`encoding/json`
`reflect`
)
var (
byteType = reflect.TypeOf(byte(0))
jsonNumberType = reflect.TypeOf(json.Number(""))
jsonUnsupportedValueType = reflect.TypeOf(new(json.UnsupportedValueError))
)
var (
errorType = reflect.TypeOf((*error)(nil)).Elem()
jsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
encodingTextMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
)
func isSimpleByte(vt reflect.Type) bool {
if vt.Kind() != byteType.Kind() {
return false
} else {
return !isEitherMarshaler(vt) && !isEitherMarshaler(reflect.PtrTo(vt))
}
}
func isEitherMarshaler(vt reflect.Type) bool {
return vt.Implements(jsonMarshalerType) || vt.Implements(encodingTextMarshalerType)
}

View File

@ -0,0 +1,52 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding/json`
`unsafe`
`github.com/bytedance/sonic/loader`
)
//go:nosplit
func padd(p unsafe.Pointer, v int) unsafe.Pointer {
return unsafe.Pointer(uintptr(p) + uintptr(v))
}
//go:nosplit
func ptoenc(p loader.Function) _Encoder {
return *(*_Encoder)(unsafe.Pointer(&p))
}
func compact(p *[]byte, v []byte) error {
buf := newBuffer()
err := json.Compact(buf, v)
/* check for errors */
if err != nil {
return err
}
/* add to result */
v = buf.Bytes()
*p = append(*p, v...)
/* return the buffer into pool */
freeBuffer(buf)
return nil
}