mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[chore]: Bump github.com/gin-contrib/cors from 1.4.0 to 1.5.0 (#2388)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
This commit is contained in:
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
130
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go
generated
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
_ `unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
126
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
126
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go
generated
vendored
Normal file
@ -0,0 +1,126 @@
|
||||
// +build go1.17,!go1.21
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
132
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
132
vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go
generated
vendored
Normal file
@ -0,0 +1,132 @@
|
||||
// +build go1.21,!go1.22
|
||||
|
||||
// Copyright 2023 CloudWeGo Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package decoder
|
||||
|
||||
import (
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname gcWriteBarrier2 runtime.gcWriteBarrier2
|
||||
func gcWriteBarrier2()
|
||||
|
||||
// Notice: gcWriteBarrier must use R11 register!!
|
||||
var _R11 = _IC
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrier2 = jit.Func(gcWriteBarrier2)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _DI)
|
||||
self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
|
||||
if saveDI {
|
||||
self.load(_DI, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.save(_AX, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
|
||||
if saveAX {
|
||||
self.load(_AX, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI, _R11)
|
||||
} else {
|
||||
self.save(_R11)
|
||||
}
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _DI)
|
||||
self.Emit("MOVQ", _DI, jit.Ptr(_R11, 8))
|
||||
if saveDI {
|
||||
self.load(_DI, _R11)
|
||||
} else {
|
||||
self.load(_R11)
|
||||
}
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.save(_R11)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrier2, _R11)
|
||||
self.Rjmp("CALL", _R11)
|
||||
self.Emit("MOVQ", ptr, jit.Ptr(_R11, 0))
|
||||
self.Emit("MOVQ", rec, _AX)
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_R11, 8))
|
||||
self.load(_R11)
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
//go:build go1.17 && !go1.22
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -24,7 +24,6 @@ import (
|
||||
`fmt`
|
||||
`math`
|
||||
`reflect`
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
@ -33,7 +32,6 @@ import (
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Register Allocations
|
||||
@ -825,8 +823,8 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
_Vp_max_f32 = new(float64)
|
||||
_Vp_min_f32 = new(float64)
|
||||
_Vp_max_f32 = new(float32)
|
||||
_Vp_min_f32 = new(float32)
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -835,17 +833,15 @@ func init() {
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_single_X0() {
|
||||
self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
|
||||
self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS _VAR_st_Dv, X0
|
||||
self.Emit("MOVQ" , _V_max_f32, _CX) // MOVQ _max_f32, CX
|
||||
self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
|
||||
self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
|
||||
self.Emit("UCOMISD" , jit.Ptr(_CX, 0), _X0) // UCOMISD (CX), X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("MOVQ" , _V_min_f32, _CX) // MOVQ _min_f32, CX
|
||||
self.Emit("MOVSD" , jit.Ptr(_CX, 0), _X1) // MOVSD (CX), X1
|
||||
self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_CX, 0), _X0) // UCOMISS (CX), X0
|
||||
self.Sjmp("JB" , _LB_range_error) // JB _range_error
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_signed_CX(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
|
||||
@ -1931,62 +1927,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
||||
self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), (SP)
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
//go:linkname _runtime_writeBarrier runtime.writeBarrier
|
||||
var _runtime_writeBarrier uintptr
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(uintptr(unsafe.Pointer(&_runtime_writeBarrier))))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -23,7 +23,6 @@ import (
|
||||
`fmt`
|
||||
`math`
|
||||
`reflect`
|
||||
`strconv`
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/caching`
|
||||
@ -32,7 +31,6 @@ import (
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Register Allocations
|
||||
@ -818,8 +816,8 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
_Vp_max_f32 = new(float64)
|
||||
_Vp_min_f32 = new(float64)
|
||||
_Vp_max_f32 = new(float32)
|
||||
_Vp_min_f32 = new(float32)
|
||||
)
|
||||
|
||||
func init() {
|
||||
@ -828,17 +826,15 @@ func init() {
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_single() {
|
||||
self.Emit("MOVSD" , _VAR_st_Dv, _X0) // MOVSD st.Dv, X0
|
||||
self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0
|
||||
self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX
|
||||
self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET
|
||||
self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP
|
||||
self.Emit("UCOMISD" , jit.Ptr(_AX, 0), _X0) // UCOMISD (AX), X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX
|
||||
self.Emit("MOVSD" , jit.Ptr(_AX, 0), _X1) // MOVSD (AX), X1
|
||||
self.Emit("UCOMISD" , _X0, _X1) // UCOMISD X0, X1
|
||||
self.Sjmp("JA" , _LB_range_error) // JA _range_error
|
||||
self.Emit("CVTSD2SS", _X0, _X0) // CVTSD2SS X0, X0
|
||||
self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0
|
||||
self.Sjmp("JB" , _LB_range_error) // JB _range_error
|
||||
}
|
||||
|
||||
func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) {
|
||||
@ -1951,63 +1947,3 @@ func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) {
|
||||
self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP)
|
||||
self.call_go(_F_println)
|
||||
}
|
||||
|
||||
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
|
||||
|
||||
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
|
||||
func gcWriteBarrierAX()
|
||||
|
||||
var (
|
||||
_V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier))
|
||||
|
||||
_F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX)
|
||||
)
|
||||
|
||||
func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
} else {
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
}
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
if saveAX {
|
||||
self.Emit("XCHGQ", ptr, _AX)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
2
vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/decoder/compiler.go
generated
vendored
@ -1152,4 +1152,4 @@ func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
|
||||
p.pin(j)
|
||||
p.int(_OP_add, 1)
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
2
vendor/github.com/bytedance/sonic/internal/decoder/debug.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/decoder/debug.go
generated
vendored
@ -67,4 +67,4 @@ func (self *_Assembler) debug_instr(i int, v *_Instr) {
|
||||
}
|
||||
self.force_gc()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
16
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
16
vendor/github.com/bytedance/sonic/internal/decoder/decoder.go
generated
vendored
@ -30,14 +30,14 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
_F_use_int64 = iota
|
||||
_F_use_number
|
||||
_F_disable_urc
|
||||
_F_disable_unknown
|
||||
_F_copy_string
|
||||
_F_validate_string
|
||||
_F_use_int64 = 0
|
||||
_F_disable_urc = 2
|
||||
_F_disable_unknown = 3
|
||||
_F_copy_string = 4
|
||||
|
||||
_F_allow_control = 31
|
||||
_F_use_number = types.B_USE_NUMBER
|
||||
_F_validate_string = types.B_VALIDATE_STRING
|
||||
_F_allow_control = types.B_ALLOW_CONTROL
|
||||
)
|
||||
|
||||
type Options uint64
|
||||
@ -252,4 +252,4 @@ func Skip(data []byte) (start int, end int) {
|
||||
ret := native.SkipOne(&s, &p, m, uint64(0))
|
||||
types.FreeStateMachine(m)
|
||||
return ret, p
|
||||
}
|
||||
}
|
||||
|
70
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
70
vendor/github.com/bytedance/sonic/internal/decoder/errors.go
generated
vendored
@ -44,35 +44,12 @@ func (self SyntaxError) Description() string {
|
||||
}
|
||||
|
||||
func (self SyntaxError) description() string {
|
||||
i := 16
|
||||
p := self.Pos - i
|
||||
q := self.Pos + i
|
||||
|
||||
/* check for empty source */
|
||||
if self.Src == "" {
|
||||
return fmt.Sprintf("no sources available: %#v", self)
|
||||
}
|
||||
|
||||
/* prevent slicing before the beginning */
|
||||
if p < 0 {
|
||||
p, q, i = 0, q - p, i + p
|
||||
}
|
||||
|
||||
/* prevent slicing beyond the end */
|
||||
if n := len(self.Src); q > n {
|
||||
n = q - n
|
||||
q = len(self.Src)
|
||||
|
||||
/* move the left bound if possible */
|
||||
if p > n {
|
||||
i += n
|
||||
p -= n
|
||||
}
|
||||
}
|
||||
|
||||
/* left and right length */
|
||||
x := clamp_zero(i)
|
||||
y := clamp_zero(q - p - i - 1)
|
||||
p, x, q, y := calcBounds(len(self.Src), self.Pos)
|
||||
|
||||
/* compose the error description */
|
||||
return fmt.Sprintf(
|
||||
@ -85,6 +62,39 @@ func (self SyntaxError) description() string {
|
||||
)
|
||||
}
|
||||
|
||||
func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) {
|
||||
if pos >= size || pos < 0 {
|
||||
return 0, 0, size, 0
|
||||
}
|
||||
|
||||
i := 16
|
||||
lbound = pos - i
|
||||
rbound = pos + i
|
||||
|
||||
/* prevent slicing before the beginning */
|
||||
if lbound < 0 {
|
||||
lbound, rbound, i = 0, rbound - lbound, i + lbound
|
||||
}
|
||||
|
||||
/* prevent slicing beyond the end */
|
||||
if n := size; rbound > n {
|
||||
n = rbound - n
|
||||
rbound = size
|
||||
|
||||
/* move the left bound if possible */
|
||||
if lbound > n {
|
||||
i += n
|
||||
lbound -= n
|
||||
}
|
||||
}
|
||||
|
||||
/* left and right length */
|
||||
lwidth = clamp_zero(i)
|
||||
rwidth = clamp_zero(rbound - lbound - i - 1)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self SyntaxError) Message() string {
|
||||
if self.Msg == "" {
|
||||
return self.Code.Message()
|
||||
@ -107,16 +117,19 @@ var stackOverflow = &json.UnsupportedValueError {
|
||||
Value : reflect.ValueOf("..."),
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_wrap(src string, pos int, code types.ParsingError) error {
|
||||
return SyntaxError {
|
||||
return *error_wrap_heap(src, pos, code)
|
||||
}
|
||||
|
||||
//go:noinline
|
||||
func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError {
|
||||
return &SyntaxError {
|
||||
Pos : pos,
|
||||
Src : src,
|
||||
Code : code,
|
||||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_type(vt *rt.GoType) error {
|
||||
return &json.UnmarshalTypeError{Type: vt.Pack()}
|
||||
}
|
||||
@ -158,7 +171,6 @@ func (self MismatchTypeError) Description() string {
|
||||
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
return &MismatchTypeError {
|
||||
Pos : pos,
|
||||
@ -167,12 +179,10 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error {
|
||||
}
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_field(name string) error {
|
||||
return errors.New("json: unknown field " + strconv.Quote(name))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func error_value(value string, vtype reflect.Type) error {
|
||||
return &json.UnmarshalTypeError {
|
||||
Type : vtype,
|
||||
|
@ -1,5 +1,4 @@
|
||||
//go:build go1.17 && !go1.21
|
||||
// +build go1.17,!go1.21
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -23,13 +22,11 @@ import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
@ -286,7 +283,7 @@ func (self *_ValueDecoder) compile() {
|
||||
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
|
||||
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
|
||||
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
|
||||
self.callc(_F_value) // CALL value
|
||||
self.callc(_F_value) // CALL value
|
||||
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
|
||||
|
||||
/* check for errors */
|
||||
@ -720,46 +717,6 @@ func (self *_ValueDecoder) compile() {
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R9)
|
||||
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _AX)
|
||||
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.call(_F_gcWriteBarrierAX)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
@ -1,4 +1,4 @@
|
||||
// +build go1.17,!go1.21
|
||||
// +build go1.17,!go1.22
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
@ -1,4 +1,4 @@
|
||||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
@ -22,13 +22,11 @@ import (
|
||||
`encoding/json`
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strconv`
|
||||
|
||||
`github.com/bytedance/sonic/internal/jit`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/obj/x86`
|
||||
)
|
||||
|
||||
/** Crucial Registers:
|
||||
@ -645,7 +643,8 @@ func (self *_ValueDecoder) compile() {
|
||||
self.Emit("MOVQ", _R8, _VAR_cs_p)
|
||||
self.Emit("MOVQ", _AX, _VAR_cs_n)
|
||||
self.Emit("MOVQ", _DI, _VAR_cs_LR)
|
||||
self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _T_byte, _R8)
|
||||
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
|
||||
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
|
||||
self.call_go(_F_makeslice)
|
||||
@ -722,48 +721,6 @@ func (self *_ValueDecoder) compile() {
|
||||
}
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", _AX, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
|
||||
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
|
||||
panic("rec contains AX!")
|
||||
}
|
||||
self.Emit("MOVQ", _V_writeBarrier, _R10)
|
||||
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
|
||||
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, _AX)
|
||||
if saveDI {
|
||||
self.save(_DI)
|
||||
}
|
||||
self.Emit("LEAQ", rec, _DI)
|
||||
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
|
||||
self.Rjmp("CALL", _R10)
|
||||
if saveDI {
|
||||
self.load(_DI)
|
||||
}
|
||||
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
self.Emit("MOVQ", ptr, rec)
|
||||
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
|
||||
}
|
||||
|
||||
/** Generic Decoder **/
|
||||
|
||||
var (
|
@ -1,4 +1,4 @@
|
||||
// +build go1.15,!go1.17
|
||||
// +build go1.16,!go1.17
|
||||
|
||||
//
|
||||
// Copyright 2021 ByteDance Inc.
|
4
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
4
vendor/github.com/bytedance/sonic/internal/decoder/pools.go
generated
vendored
@ -29,7 +29,7 @@ const (
|
||||
_MinSlice = 2
|
||||
_MaxStack = 4096 // 4k slots
|
||||
_MaxStackBytes = _MaxStack * _PtrBytes
|
||||
_MaxDigitNums = 800 // used in atof fallback algorithm
|
||||
_MaxDigitNums = types.MaxDigitNums // used in atof fallback algorithm
|
||||
)
|
||||
|
||||
const (
|
||||
@ -140,4 +140,4 @@ func findOrCompile(vt *rt.GoType) (_Decoder, error) {
|
||||
} else {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
19
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
19
vendor/github.com/bytedance/sonic/internal/decoder/stream.go
generated
vendored
@ -21,8 +21,9 @@ import (
|
||||
`io`
|
||||
`sync`
|
||||
|
||||
`github.com/bytedance/sonic/option`
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/option`
|
||||
)
|
||||
|
||||
var (
|
||||
@ -71,6 +72,7 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
|
||||
|
||||
var first = true
|
||||
var repeat = true
|
||||
|
||||
read_more:
|
||||
for {
|
||||
l := len(buf)
|
||||
@ -97,11 +99,20 @@ read_more:
|
||||
l := len(buf)
|
||||
if l > 0 {
|
||||
self.Decoder.Reset(string(buf))
|
||||
|
||||
var x int
|
||||
if ret := native.SkipOneFast(&self.s, &x); ret < 0 {
|
||||
if repeat {
|
||||
goto read_more
|
||||
} else {
|
||||
err = SyntaxError{x, self.s, types.ParsingError(-ret), ""}
|
||||
self.err = err
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
err = self.Decoder.Decode(val)
|
||||
if err != nil {
|
||||
if repeat && self.repeatable(err) {
|
||||
goto read_more
|
||||
}
|
||||
self.err = err
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// +build go1.15,!go1.20
|
||||
// +build go1.16,!go1.20
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
12
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
12
vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go
generated
vendored
@ -82,23 +82,23 @@ func makemap_small() unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign runtime.mapassign
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
func mapassign(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast32 runtime.mapassign_fast32
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
func mapassign_fast32(t *rt.GoMapType, h unsafe.Pointer, k uint32) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64 runtime.mapassign_fast64
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
func mapassign_fast64(t *rt.GoMapType, h unsafe.Pointer, k uint64) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
func mapassign_fast64ptr(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
|
||||
|
||||
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
func mapassign_faststr(t *rt.GoMapType, h unsafe.Pointer, s string) unsafe.Pointer
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname memclrHasPointers runtime.memclrHasPointers
|
||||
@ -108,4 +108,4 @@ func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
|
||||
//go:noescape
|
||||
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
|
||||
//goland:noinspection GoUnusedParameter
|
||||
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
|
||||
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
|
||||
|
Reference in New Issue
Block a user