}
// For each type, we keep track of all the stack slots we
- // have allocated for that type.
- // TODO: share slots among equivalent types. We would need to
- // only share among types with the same GC signature. See the
- // type.Equal calls below for where this matters.
- locations := map[*types.Type][]LocalSlot{}
+ // have allocated for that type. This map is keyed by
+ // strings returned by types.LinkString. This guarantees
+ // type equality, but also lets us match the same type represented
+ // by two different types.Type structures. See issue 65783.
+ locations := map[string][]LocalSlot{}
// Each time we assign a stack slot to a value v, we remember
// the slot we used via an index into locations[v.Type].
noname:
// Set of stack slots we could reuse.
- locs := locations[v.Type]
+ typeKey := v.Type.LinkString()
+ locs := locations[typeKey]
// Mark all positions in locs used by interfering values.
for i := 0; i < len(locs); i++ {
used[i] = false
if i == len(locs) {
s.nAuto++
locs = append(locs, LocalSlot{N: f.NewLocal(v.Pos, v.Type), Type: v.Type, Off: 0})
- locations[v.Type] = locs
+ locations[typeKey] = locs
}
// Use the stack variable at that index for v.
loc := locs[i]
// amd64:`CALL\truntime\.deferprocStack`
defer func() {}()
}
+
+// Check that stack slots are shared among values of the same
+// type, but not pointer-identical types. See issue 65783.
+
+func spillSlotReuse() {
+ // The return values of getp1 and getp2 need to be
+ // spilled around the calls to nopInt. Make sure that
+ // spill slot gets reused.
+
+ //arm64:`.*autotmp_2-8\(SP\)`
+ getp1()[nopInt()] = 0
+ //arm64:`.*autotmp_2-8\(SP\)`
+ getp2()[nopInt()] = 0
+}
+
+//go:noinline
+func nopInt() int {
+ return 0
+}
+
+//go:noinline
+func getp1() *[4]int {
+ return nil
+}
+
+//go:noinline
+func getp2() *[4]int {
+ return nil
+}