panic("bad store type")
}
+// loadByType2 returns an opcode that can load consecutive memory locations into 2 registers with type t.
+// returns obj.AXXX if no such opcode exists.
+func loadByType2(t *types.Type) obj.As {
+ if t.IsFloat() {
+ switch t.Size() {
+ case 4:
+ return arm64.AFLDPS
+ case 8:
+ return arm64.AFLDPD
+ }
+ } else {
+ switch t.Size() {
+ case 4:
+ return arm64.ALDPW
+ case 8:
+ return arm64.ALDP
+ }
+ }
+ return obj.AXXX
+}
+
+// storeByType2 returns an opcode that can store registers with type t into 2 consecutive memory locations.
+// returns obj.AXXX if no such opcode exists.
+func storeByType2(t *types.Type) obj.As {
+ if t.IsFloat() {
+ switch t.Size() {
+ case 4:
+ return arm64.AFSTPS
+ case 8:
+ return arm64.AFSTPD
+ }
+ } else {
+ switch t.Size() {
+ case 4:
+ return arm64.ASTPW
+ case 8:
+ return arm64.ASTP
+ }
+ }
+ return obj.AXXX
+}
+
// makeshift encodes a register shifted by a constant, used as an Offset in Prog.
func makeshift(v *ssa.Value, reg int16, typ int64, s int64) int64 {
if s < 0 || s >= 64 {
p.From.Reg = v.Args[0].Reg()
ssagen.AddrAuto(&p.To, v)
case ssa.OpArgIntReg, ssa.OpArgFloatReg:
+ ssagen.CheckArgReg(v)
// The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
// The loop only runs once.
- for _, a := range v.Block.Func.RegArgs {
- // Pass the spill/unspill information along to the assembler, offset by size of
- // the saved LR slot.
+ args := v.Block.Func.RegArgs
+ if len(args) == 0 {
+ break
+ }
+ v.Block.Func.RegArgs = nil // prevent from running again
+
+ for i := 0; i < len(args); i++ {
+ a := args[i]
+ // Offset by size of the saved LR slot.
addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.Arch.FixedFrameSize)
- s.FuncInfo().AddSpill(
- obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
+ // Look for double-register operations if we can.
+ if i < len(args)-1 {
+ b := args[i+1]
+ if a.Type.Size() == b.Type.Size() &&
+ a.Type.IsFloat() == b.Type.IsFloat() &&
+ b.Offset == a.Offset+a.Type.Size() {
+ ld := loadByType2(a.Type)
+ st := storeByType2(a.Type)
+ if ld != obj.AXXX && st != obj.AXXX {
+ s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Reg2: b.Reg, Addr: addr, Unspill: ld, Spill: st})
+ i++ // b is done also, skip it.
+ continue
+ }
+ }
+ }
+ // Pass the spill/unspill information along to the assembler.
+ s.FuncInfo().AddSpill(obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
}
- v.Block.Func.RegArgs = nil
- ssagen.CheckArgReg(v)
+
case ssa.OpARM64ADD,
ssa.OpARM64SUB,
ssa.OpARM64AND,
--- /dev/null
+// asmcheck
+
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package codegen
+
+func i64(a, b int64) int64 { // arm64:`STP\s`,`LDP\s`
+ g()
+ return a + b
+}
+
+func i32(a, b int32) int32 { // arm64:`STPW`,`LDPW`
+ g()
+ return a + b
+}
+
+func f64(a, b float64) float64 { // arm64:`FSTPD`,`FLDPD`
+ g()
+ return a + b
+}
+
+func f32(a, b float32) float32 { // arm64:`FSTPS`,`FLDPS`
+ g()
+ return a + b
+}
+
+//go:noinline
+func g() {
+}