import (
"fmt"
+ "go/constant"
+ "go/token"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
// A batch holds escape analysis state that's shared across an entire
// batch of functions being analyzed at once.
type batch struct {
- allLocs []*location
- closures []closure
+ allLocs []*location
+ closures []closure
+ reassignOracles map[*ir.Func]*ir.ReassignOracle
heapLoc location
mutatorLoc location
b.heapLoc.attrs = attrEscapes | attrPersists | attrMutates | attrCalls
b.mutatorLoc.attrs = attrMutates
b.calleeLoc.attrs = attrCalls
+ b.reassignOracles = make(map[*ir.Func]*ir.ReassignOracle)
// Construct data-flow graph from syntax trees.
for _, fn := range fns {
b.closures = nil
for _, loc := range b.allLocs {
+ // Try to replace some non-constant expressions with literals.
+ b.rewriteWithLiterals(loc.n, loc.curfn)
+
+ // Check if the node must be heap allocated for certain reasons
+ // such as OMAKESLICE for a large slice.
if why := HeapAllocReason(loc.n); why != "" {
b.flow(b.heapHole().addr(loc.n, why), loc)
}
base.WarnfAt(pos, "%v does not escape, mutate, or call", name)
}
}
+
+// rewriteWithLiterals attempts to replace certain non-constant expressions
+// within n with a literal if possible.
+func (b *batch) rewriteWithLiterals(n ir.Node, fn *ir.Func) {
+ if n == nil || fn == nil {
+ return
+ }
+ if n.Op() != ir.OMAKESLICE {
+ // TODO(thepudds): we handle more cases later in our CL stack.
+ return
+ }
+
+ // Look up a cached ReassignOracle for the function, lazily computing one if needed.
+ ro := b.reassignOracle(fn)
+ if ro == nil {
+ base.Fatalf("no ReassignOracle for function %v with closure parent %v", fn, fn.ClosureParent)
+ }
+
+ switch n.Op() {
+ case ir.OMAKESLICE:
+ // Check if we can replace a non-constant argument to make with
+ // a literal to allow for this slice to be stack allocated if otherwise allowed.
+ n := n.(*ir.MakeExpr)
+
+ r := &n.Cap
+ if n.Cap == nil {
+ r = &n.Len
+ }
+
+ if s := ro.StaticValue(*r); s.Op() == ir.OLITERAL {
+ lit, ok := s.(*ir.BasicLit)
+ if !ok || lit.Val().Kind() != constant.Int {
+ base.Fatalf("unexpected BasicLit Kind")
+ }
+ if constant.Compare(lit.Val(), token.GEQ, constant.MakeInt64(0)) {
+ *r = lit
+ }
+ }
+ }
+}
+
+// reassignOracle returns an initialized *ir.ReassignOracle for fn.
+// If fn is a closure, it returns the ReassignOracle for the ultimate parent.
+//
+// A new ReassignOracle is initialized lazily if needed, and the result
+// is cached to reduce duplicative work of preparing a ReassignOracle.
+func (b *batch) reassignOracle(fn *ir.Func) *ir.ReassignOracle {
+ if ro, ok := b.reassignOracles[fn]; ok {
+ return ro // Hit.
+ }
+
+ // For closures, we want the ultimate parent's ReassignOracle,
+ // so walk up the parent chain, if any.
+ f := fn
+ for f.ClosureParent != nil && !f.ClosureParent.IsPackageInit() {
+ f = f.ClosureParent
+ }
+
+ if f != fn {
+ // We found a parent.
+ ro := b.reassignOracles[f]
+ if ro != nil {
+ // Hit, via a parent. Before returning, store this ro for the original fn as well.
+ b.reassignOracles[fn] = ro
+ return ro
+ }
+ }
+
+ // Miss. We did not find a ReassignOracle for fn or a parent, so lazily create one.
+ ro := &ir.ReassignOracle{}
+ ro.Init(f)
+
+ // Cache the answer for the original fn.
+ b.reassignOracles[fn] = ro
+ if f != fn {
+ // Cache for the parent as well.
+ b.reassignOracles[f] = ro
+ }
+ return ro
+}
package escape
import (
- "cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
- "go/constant"
- "go/token"
)
func isSliceSelfAssign(dst, src ir.Node) bool {
if n.Op() == ir.OMAKESLICE {
n := n.(*ir.MakeExpr)
- r := &n.Cap
+ r := n.Cap
if n.Cap == nil {
- r = &n.Len
- }
-
- // Try to determine static values of make() calls, to avoid allocating them on the heap.
- // We are doing this in escape analysis, so that it happens after inlining and devirtualization.
- if s := ir.StaticValue(*r); s.Op() == ir.OLITERAL {
- lit, ok := s.(*ir.BasicLit)
- if !ok || lit.Val().Kind() != constant.Int {
- base.Fatalf("unexpected BasicLit Kind")
- }
- if constant.Compare(lit.Val(), token.GEQ, constant.MakeInt64(0)) {
- *r = lit
- }
+ r = n.Len
}
elem := n.Type().Elem()
// TODO: stack allocate these? See #65685.
return "zero-sized element"
}
- if !ir.IsSmallIntConst(*r) {
+ if !ir.IsSmallIntConst(r) {
// For non-constant sizes, we do a hybrid approach:
//
// if cap <= K {
// Implementation is in ../walk/builtin.go:walkMakeSlice.
return ""
}
- if ir.Int64Val(*r) > ir.MaxImplicitStackVarSize/elem.Size() {
+ if ir.Int64Val(r) > ir.MaxImplicitStackVarSize/elem.Size() {
return "too large for stack"
}
}