// The loop only runs once.
for _, ap := range v.Block.Func.RegArgs {
// Pass the spill/unspill information along to the assembler, offset by size of return PC pushed on stack.
- addr := ssagen.SpillSlotAddr(ap.Mem(), x86.REG_SP, v.Block.Func.Config.PtrSize)
+ addr := ssagen.SpillSlotAddr(ap, x86.REG_SP, v.Block.Func.Config.PtrSize)
s.FuncInfo().AddSpill(
- obj.RegSpill{Reg: ap.Reg(), Addr: addr, Unspill: loadByType(ap.Type()), Spill: storeByType(ap.Type())})
+ obj.RegSpill{Reg: ap.Reg, Addr: addr, Unspill: loadByType(ap.Type), Spill: storeByType(ap.Type)})
}
v.Block.Func.RegArgs = nil
ssagen.CheckArgReg(v)
return a
}
-type ArgPair struct {
- reg *Register
- mem LocalSlot
-}
-
-func (ap *ArgPair) Reg() int16 {
- return ap.reg.objNum
-}
-
-func (ap *ArgPair) Type() *types.Type {
- return ap.mem.Type
-}
-
-func (ap *ArgPair) Mem() *LocalSlot {
- return &ap.mem
-}
-
-func (t ArgPair) String() string {
- n0 := "nil"
- if t.reg != nil {
- n0 = t.reg.String()
- }
- n1 := t.mem.String()
- return fmt.Sprintf("<%s,%s>", n0, n1)
+type Spill struct {
+ Type *types.Type
+ Offset int64
+ Reg int16
}
f.setHome(v, loc)
continue
}
-
- nameOff := v.Aux.(*AuxNameOffset)
- loc := LocalSlot{N: nameOff.Name, Type: v.Type, Off: nameOff.Offset}
- if f.pass.debug > stackDebug {
- fmt.Printf("stackalloc Op%s %s to %s\n", v.Op, v, loc)
- }
- // register args already allocated to registers, but need to know the stack allocation for later
- reg := f.getHome(v.ID).(*Register)
- f.RegArgs = append(f.RegArgs, ArgPair{reg: reg, mem: loc})
}
// For each type, we keep track of all the stack slots we
s.emitOpenDeferInfo()
}
+ // Record incoming parameter spill information for morestack calls emitted in the assembler.
+ // This is done here, using all the parameters (used, partially used, and unused) because
+ // it mimics the behavior of the former ABI (everything stored) and because it's not 100%
+ // clear if naming conventions are respected in autogenerated code.
+ // TODO figure out exactly what's unused, don't spill it. Make liveness fine-grained, also.
+ // TODO non-amd64 architectures have link registers etc that may require adjustment here.
+ for _, p := range params.InParams() {
+ typs, offs := p.RegisterTypesAndOffsets()
+ for i, t := range typs {
+ o := offs[i] // offset within parameter
+ fo := p.FrameOffset(params) // offset of parameter in frame
+ reg := ssa.ObjRegForAbiReg(p.Registers[i], s.f.Config)
+ s.f.RegArgs = append(s.f.RegArgs, ssa.Spill{Reg: reg, Offset: fo + o, Type: t})
+ }
+ }
+
return s.f
}
// The resulting addr is used in a non-standard context -- in the prologue
// of a function, before the frame has been constructed, so the standard
// addressing for the parameters will be wrong.
-func SpillSlotAddr(slot *ssa.LocalSlot, baseReg int16, extraOffset int64) obj.Addr {
- n, off := slot.N, slot.Off
- if n.Class != ir.PPARAM && n.Class != ir.PPARAMOUT {
- panic("Only expected to see param and returns here")
- }
+func SpillSlotAddr(spill ssa.Spill, baseReg int16, extraOffset int64) obj.Addr {
return obj.Addr{
Name: obj.NAME_NONE,
Type: obj.TYPE_MEM,
Reg: baseReg,
- Offset: off + extraOffset + n.FrameOffset(),
+ Offset: spill.Offset + extraOffset,
}
}