// zeroX15 zeroes the X15 register.
func zeroX15(s *ssagen.State) {
+ if !buildcfg.Experiment.SIMD {
+ opregreg(s, x86.AXORPS, x86.REG_X15, x86.REG_X15)
+ return
+ }
vxorps := func(s *ssagen.State) {
p := s.Prog(x86.AVXORPS)
p.From.Type = obj.TYPE_REG
// there's no need to handle that. Clear R14 so that there's
// a bad value in there, in case needm tries to use it.
XORPS X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
XORQ R14, R14
MOVQ $runtime·needAndBindM<ABIInternal>(SB), AX
CALL AX
get_tls(R14)
MOVQ g(R14), R14
XORPS X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
JMP ·sigpanic<ABIInternal>(SB)
// gcWriteBarrier informs the GC about heap pointer writes.
// Back to Go world, set special registers.
// The g register (R14) is preserved in C.
XORPS X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
RET
// C->Go callback thunk that allows to call runtime·racesymbolize from C code.
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
get_tls(R12)
MOVQ g(R12), R14
PXOR X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
// Reserve space for spill slots.
NOP SP // disable vet stack checking
// R14 is cleared in case there's a non-zero value in there
// if called from a non-go thread.
XORPS X15, X15
+#ifdef GOEXPERIMENT_simd
CMPB internal∕cpu·X86+const_offsetX86HasAVX(SB), $1
JNE 2(PC)
VXORPS X15, X15, X15
+#endif
XORQ R14, R14
get_tls(AX)