names []LocalSlot
slots []int
used []bool
+
+ nArgSlot, // Number of Values sourced to arg slot
+ nNotNeed, // Number of Values not needing a stack slot
+ nNamedSlot, // Number of Values using a named stack slot
+ nReuse, // Number of values reusing a stack slot
+ nAuto, // Number of autos allocated for stack slots.
+ nSelfInterfere int32 // Number of self-interferences
}
func newStackAllocState(f *Func) *stackAllocState {
s.f.Config.stackAllocState = s
s.f = nil
s.live = nil
+ s.nArgSlot, s.nNotNeed, s.nNamedSlot, s.nReuse, s.nAuto, s.nSelfInterfere = 0, 0, 0, 0, 0, 0
}
type stackValState struct {
defer putStackAllocState(s)
s.stackalloc()
+ if f.pass.stats > 0 {
+ f.logStat("stack_alloc_stats",
+ s.nArgSlot, "arg_slots", s.nNotNeed, "slot_not_needed",
+ s.nNamedSlot, "named_slots", s.nAuto, "auto_slots",
+ s.nReuse, "reused_slots", s.nSelfInterfere, "self_interfering")
+ }
+
return s.live
}
for _, b := range f.Blocks {
for _, v := range b.Values {
if !s.values[v.ID].needSlot {
+ s.nNotNeed++
continue
}
if v.Op == OpArg {
+ s.nArgSlot++
continue // already picked
}
if h != nil && h.(LocalSlot).N == name.N && h.(LocalSlot).Off == name.Off {
// A variable can interfere with itself.
// It is rare, but but it can happen.
+ s.nSelfInterfere++
goto noname
}
}
if f.pass.debug > stackDebug {
fmt.Printf("stackalloc %s to %s\n", v, name.Name())
}
+ s.nNamedSlot++
f.setHome(v, name)
continue
}
var i int
for i = 0; i < len(locs); i++ {
if !used[i] {
+ s.nReuse++
break
}
}
// If there is no unused stack slot, allocate a new one.
if i == len(locs) {
+ s.nAuto++
locs = append(locs, LocalSlot{N: f.Config.fe.Auto(v.Type), Type: v.Type, Off: 0})
locations[v.Type] = locs
}