From e89791983a1330e467c8ba2cca9d7a581a5789cc Mon Sep 17 00:00:00 2001 From: thepudds Date: Fri, 4 Apr 2025 15:51:16 -0400 Subject: [PATCH] cmd/compile/internal/escape: use an ir.ReassignOracle Using the new-ish ir.ReassignOracle is more efficient than calling ir.StaticValue repeatedly. This CL now uses an ir.ReassignOracle for the recent make constant propagation introduced in CL 649035. We also pull the main change from CL 649035 into a new function, which we will update later in our stack. We will also use the ReassignOracles introduced here later in our stack. (We originally did most of this work in CL 649077, but we abandoned that in favor of CL 649035). We could also use an ir.ReassignOracle in the older processing of ir.OCALLFUNC in (*escape).call, but for now, we just leave that as a TODO. Updates #71359 Change-Id: I6e02eeac269bde3a302622b4dfe0c8dc63ec9ffc Reviewed-on: https://go-review.googlesource.com/c/go/+/673795 Reviewed-by: Keith Randall Auto-Submit: Keith Randall Reviewed-by: Keith Randall LUCI-TryBot-Result: Go LUCI Reviewed-by: David Chase --- src/cmd/compile/internal/escape/call.go | 1 + src/cmd/compile/internal/escape/escape.go | 93 ++++++++++++++++++++++- src/cmd/compile/internal/escape/utils.go | 23 +----- 3 files changed, 96 insertions(+), 21 deletions(-) diff --git a/src/cmd/compile/internal/escape/call.go b/src/cmd/compile/internal/escape/call.go index bd2e923da1..a80e2707e2 100644 --- a/src/cmd/compile/internal/escape/call.go +++ b/src/cmd/compile/internal/escape/call.go @@ -40,6 +40,7 @@ func (e *escape) call(ks []hole, call ir.Node) { var fn *ir.Name switch call.Op() { case ir.OCALLFUNC: + // TODO(thepudds): use an ir.ReassignOracle here. v := ir.StaticValue(call.Fun) fn = ir.StaticCalleeName(v) } diff --git a/src/cmd/compile/internal/escape/escape.go b/src/cmd/compile/internal/escape/escape.go index 5bd3038a9c..43fe0b8af5 100644 --- a/src/cmd/compile/internal/escape/escape.go +++ b/src/cmd/compile/internal/escape/escape.go @@ -6,6 +6,8 @@ package escape import ( "fmt" + "go/constant" + "go/token" "cmd/compile/internal/base" "cmd/compile/internal/ir" @@ -86,8 +88,9 @@ import ( // A batch holds escape analysis state that's shared across an entire // batch of functions being analyzed at once. type batch struct { - allLocs []*location - closures []closure + allLocs []*location + closures []closure + reassignOracles map[*ir.Func]*ir.ReassignOracle heapLoc location mutatorLoc location @@ -129,6 +132,7 @@ func Batch(fns []*ir.Func, recursive bool) { b.heapLoc.attrs = attrEscapes | attrPersists | attrMutates | attrCalls b.mutatorLoc.attrs = attrMutates b.calleeLoc.attrs = attrCalls + b.reassignOracles = make(map[*ir.Func]*ir.ReassignOracle) // Construct data-flow graph from syntax trees. for _, fn := range fns { @@ -154,6 +158,11 @@ func Batch(fns []*ir.Func, recursive bool) { b.closures = nil for _, loc := range b.allLocs { + // Try to replace some non-constant expressions with literals. + b.rewriteWithLiterals(loc.n, loc.curfn) + + // Check if the node must be heap allocated for certain reasons + // such as OMAKESLICE for a large slice. if why := HeapAllocReason(loc.n); why != "" { b.flow(b.heapHole().addr(loc.n, why), loc) } @@ -515,3 +524,83 @@ func (b *batch) reportLeaks(pos src.XPos, name string, esc leaks, sig *types.Typ base.WarnfAt(pos, "%v does not escape, mutate, or call", name) } } + +// rewriteWithLiterals attempts to replace certain non-constant expressions +// within n with a literal if possible. +func (b *batch) rewriteWithLiterals(n ir.Node, fn *ir.Func) { + if n == nil || fn == nil { + return + } + if n.Op() != ir.OMAKESLICE { + // TODO(thepudds): we handle more cases later in our CL stack. + return + } + + // Look up a cached ReassignOracle for the function, lazily computing one if needed. + ro := b.reassignOracle(fn) + if ro == nil { + base.Fatalf("no ReassignOracle for function %v with closure parent %v", fn, fn.ClosureParent) + } + + switch n.Op() { + case ir.OMAKESLICE: + // Check if we can replace a non-constant argument to make with + // a literal to allow for this slice to be stack allocated if otherwise allowed. + n := n.(*ir.MakeExpr) + + r := &n.Cap + if n.Cap == nil { + r = &n.Len + } + + if s := ro.StaticValue(*r); s.Op() == ir.OLITERAL { + lit, ok := s.(*ir.BasicLit) + if !ok || lit.Val().Kind() != constant.Int { + base.Fatalf("unexpected BasicLit Kind") + } + if constant.Compare(lit.Val(), token.GEQ, constant.MakeInt64(0)) { + *r = lit + } + } + } +} + +// reassignOracle returns an initialized *ir.ReassignOracle for fn. +// If fn is a closure, it returns the ReassignOracle for the ultimate parent. +// +// A new ReassignOracle is initialized lazily if needed, and the result +// is cached to reduce duplicative work of preparing a ReassignOracle. +func (b *batch) reassignOracle(fn *ir.Func) *ir.ReassignOracle { + if ro, ok := b.reassignOracles[fn]; ok { + return ro // Hit. + } + + // For closures, we want the ultimate parent's ReassignOracle, + // so walk up the parent chain, if any. + f := fn + for f.ClosureParent != nil && !f.ClosureParent.IsPackageInit() { + f = f.ClosureParent + } + + if f != fn { + // We found a parent. + ro := b.reassignOracles[f] + if ro != nil { + // Hit, via a parent. Before returning, store this ro for the original fn as well. + b.reassignOracles[fn] = ro + return ro + } + } + + // Miss. We did not find a ReassignOracle for fn or a parent, so lazily create one. + ro := &ir.ReassignOracle{} + ro.Init(f) + + // Cache the answer for the original fn. + b.reassignOracles[fn] = ro + if f != fn { + // Cache for the parent as well. + b.reassignOracles[f] = ro + } + return ro +} diff --git a/src/cmd/compile/internal/escape/utils.go b/src/cmd/compile/internal/escape/utils.go index b3ebe778f4..2718a7f841 100644 --- a/src/cmd/compile/internal/escape/utils.go +++ b/src/cmd/compile/internal/escape/utils.go @@ -5,12 +5,9 @@ package escape import ( - "cmd/compile/internal/base" "cmd/compile/internal/ir" "cmd/compile/internal/typecheck" "cmd/compile/internal/types" - "go/constant" - "go/token" ) func isSliceSelfAssign(dst, src ir.Node) bool { @@ -210,21 +207,9 @@ func HeapAllocReason(n ir.Node) string { if n.Op() == ir.OMAKESLICE { n := n.(*ir.MakeExpr) - r := &n.Cap + r := n.Cap if n.Cap == nil { - r = &n.Len - } - - // Try to determine static values of make() calls, to avoid allocating them on the heap. - // We are doing this in escape analysis, so that it happens after inlining and devirtualization. - if s := ir.StaticValue(*r); s.Op() == ir.OLITERAL { - lit, ok := s.(*ir.BasicLit) - if !ok || lit.Val().Kind() != constant.Int { - base.Fatalf("unexpected BasicLit Kind") - } - if constant.Compare(lit.Val(), token.GEQ, constant.MakeInt64(0)) { - *r = lit - } + r = n.Len } elem := n.Type().Elem() @@ -232,7 +217,7 @@ func HeapAllocReason(n ir.Node) string { // TODO: stack allocate these? See #65685. return "zero-sized element" } - if !ir.IsSmallIntConst(*r) { + if !ir.IsSmallIntConst(r) { // For non-constant sizes, we do a hybrid approach: // // if cap <= K { @@ -249,7 +234,7 @@ func HeapAllocReason(n ir.Node) string { // Implementation is in ../walk/builtin.go:walkMakeSlice. return "" } - if ir.Int64Val(*r) > ir.MaxImplicitStackVarSize/elem.Size() { + if ir.Int64Val(r) > ir.MaxImplicitStackVarSize/elem.Size() { return "too large for stack" } } -- 2.50.0