From 65dfe4a772a4bc612219d93886e5c07290785ee6 Mon Sep 17 00:00:00 2001 From: David Chase Date: Wed, 15 Jul 2020 09:07:00 -0400 Subject: [PATCH] cmd/compile: generate late-lowering static calls This is testing for the limited case of zero or one SSA-able results. One regression is that the later expansion of "Dereference" into Move into argument slots thwarts the MOVE A -> B, MOVE B -> C replaced-by MOVE A -> B, MOVE A -> C optimization; the second move is written instead as a Dereference at the phase where the optimization occurs, and because the target of the dereference is not visible in the dereference, it's not possible to verify that A and B or A and C don't overlap in some peculiar way (and for results fed to args, they can). Regression is repaired in a later CL by changing when calls are expanded. Change-Id: Ia0f48a9d483d5a54a99657a24b12b25b8edde55f Reviewed-on: https://go-review.googlesource.com/c/go/+/242782 Trust: David Chase Run-TryBot: David Chase TryBot-Result: Go Bot Reviewed-by: Cherry Zhang --- src/cmd/compile/internal/gc/ssa.go | 94 ++++++++++++++++++++++---- src/cmd/compile/internal/ssa/op.go | 5 ++ src/cmd/compile/internal/types/type.go | 6 +- 3 files changed, 87 insertions(+), 18 deletions(-) diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index c59945f206..0bd87beb84 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -10,6 +10,7 @@ import ( "html" "os" "sort" + "strings" "bufio" "bytes" @@ -651,6 +652,8 @@ type state struct { lastDeferExit *ssa.Block // Entry block of last defer exit code we generated lastDeferFinalBlock *ssa.Block // Final block of last defer exit code we generated lastDeferCount int // Number of defers encountered at that point + + prevCall *ssa.Value // the previous call; use this to tie results to the call op. } type funcLine struct { @@ -1076,7 +1079,7 @@ func (s *state) stmt(n *Node) { fallthrough case OCALLMETH, OCALLINTER: - s.callAddr(n, callNormal) + s.callResult(n, callNormal) if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class() == PFUNC { if fn := n.Left.Sym.Name; compiling_runtime && fn == "throw" || n.Left.Sym.Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") { @@ -1108,10 +1111,10 @@ func (s *state) stmt(n *Node) { if n.Esc == EscNever { d = callDeferStack } - s.callAddr(n.Left, d) + s.callResult(n.Left, d) } case OGO: - s.callAddr(n.Left, callGo) + s.callResult(n.Left, callGo) case OAS2DOTTYPE: res, resok := s.dottype(n.Right, true) @@ -4340,6 +4343,7 @@ func (s *state) callAddr(n *Node, k callKind) *ssa.Value { // Calls the function n using the specified call type. // Returns the address of the return value (or nil if none). func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { + s.prevCall = nil var sym *types.Sym // target symbol (if static) var closure *ssa.Value // ptr to closure to run (if dynamic) var codeptr *ssa.Value // ptr to target code (if dynamic) @@ -4347,6 +4351,7 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { fn := n.Left var ACArgs []ssa.Param var ACResults []ssa.Param + var callArgs []*ssa.Value res := n.Left.Type.Results() if k == callNormal { nf := res.NumFields() @@ -4356,10 +4361,15 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { } } + testLateExpansion := false + switch n.Op { case OCALLFUNC: if k == callNormal && fn.Op == ONAME && fn.Class() == PFUNC { sym = fn.Sym + if !returnResultAddr && strings.Contains(sym.Name, "testLateExpansion") { + testLateExpansion = true + } break } closure = s.expr(fn) @@ -4374,6 +4384,9 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { } if k == callNormal { sym = fn.Sym + if !returnResultAddr && strings.Contains(sym.Name, "testLateExpansion") { + testLateExpansion = true + } break } closure = s.getMethodClosure(fn) @@ -4470,12 +4483,20 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { if k != callNormal { // Write argsize and closure (args to newproc/deferproc). argsize := s.constInt32(types.Types[TUINT32], int32(stksize)) - addr := s.constOffPtrSP(s.f.Config.Types.UInt32Ptr, argStart) ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINT32], Offset: int32(argStart)}) - s.store(types.Types[TUINT32], addr, argsize) - addr = s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart+int64(Widthptr)) + if testLateExpansion { + callArgs = append(callArgs, argsize) + } else { + addr := s.constOffPtrSP(s.f.Config.Types.UInt32Ptr, argStart) + s.store(types.Types[TUINT32], addr, argsize) + } ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(argStart) + int32(Widthptr)}) - s.store(types.Types[TUINTPTR], addr, closure) + if testLateExpansion { + callArgs = append(callArgs, closure) + } else { + addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart+int64(Widthptr)) + s.store(types.Types[TUINTPTR], addr, closure) + } stksize += 2 * int64(Widthptr) argStart += 2 * int64(Widthptr) } @@ -4484,7 +4505,11 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { if rcvr != nil { addr := s.constOffPtrSP(s.f.Config.Types.UintptrPtr, argStart) ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(argStart)}) - s.store(types.Types[TUINTPTR], addr, rcvr) + if testLateExpansion { + callArgs = append(callArgs, rcvr) + } else { + s.store(types.Types[TUINTPTR], addr, rcvr) + } } // Write args. @@ -4492,14 +4517,20 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { args := n.Rlist.Slice() if n.Op == OCALLMETH { f := t.Recv() - ACArgs = append(ACArgs, s.storeArg(args[0], f.Type, argStart+f.Offset)) + ACArg, arg := s.putArg(args[0], f.Type, argStart+f.Offset, testLateExpansion) + ACArgs = append(ACArgs, ACArg) + callArgs = append(callArgs, arg) args = args[1:] } for i, n := range args { f := t.Params().Field(i) - ACArgs = append(ACArgs, s.storeArg(n, f.Type, argStart+f.Offset)) + ACArg, arg := s.putArg(n, f.Type, argStart+f.Offset, testLateExpansion) + ACArgs = append(ACArgs, ACArg) + callArgs = append(callArgs, arg) } + callArgs = append(callArgs, s.mem()) + // call target switch { case k == callDefer: @@ -4517,13 +4548,29 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { case codeptr != nil: call = s.newValue2A(ssa.OpInterCall, types.TypeMem, ssa.InterfaceAuxCall(ACArgs, ACResults), codeptr, s.mem()) case sym != nil: - call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(sym.Linksym(), ACArgs, ACResults), s.mem()) + if testLateExpansion { + var tys []*types.Type + aux := ssa.StaticAuxCall(sym.Linksym(), ACArgs, ACResults) + for i := int64(0); i < aux.NResults(); i++ { + tys = append(tys, aux.TypeOfResult(i)) + } + tys = append(tys, types.TypeMem) + call = s.newValue0A(ssa.OpStaticLECall, types.NewResults(tys), aux) + call.AddArgs(callArgs...) + } else { + call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, ssa.StaticAuxCall(sym.Linksym(), ACArgs, ACResults), s.mem()) + } default: s.Fatalf("bad call type %v %v", n.Op, n) } call.AuxInt = stksize // Call operations carry the argsize of the callee along with them } - s.vars[&memVar] = call + if testLateExpansion { + s.prevCall = call + s.vars[&memVar] = s.newValue1I(ssa.OpSelectN, types.TypeMem, int64(len(ACResults)), call) + } else { + s.vars[&memVar] = call + } // Insert OVARLIVE nodes s.stmtList(n.Nbody) @@ -4551,6 +4598,10 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { if returnResultAddr { return s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+Ctxt.FixedFrameSize()) } + + if testLateExpansion { + return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call) + } return s.load(n.Type, s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+Ctxt.FixedFrameSize())) } @@ -4939,6 +4990,7 @@ func (s *state) intDivide(n *Node, a, b *ssa.Value) *ssa.Value { // The call is added to the end of the current block. // If returns is false, the block is marked as an exit block. func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value { + s.prevCall = nil // Write args to the stack off := Ctxt.FixedFrameSize() var ACArgs []ssa.Param @@ -5098,9 +5150,21 @@ func (s *state) storeTypePtrs(t *types.Type, left, right *ssa.Value) { } } -func (s *state) storeArg(n *Node, t *types.Type, off int64) ssa.Param { - s.storeArgWithBase(n, t, s.sp, off) - return ssa.Param{Type: t, Offset: int32(off)} +// putArg evaluates n for the purpose of passing it as an argument to a function and returns the corresponding Param for the call. +// If forLateExpandedCall is true, it returns the argument value to pass to the call operation. +// If forLateExpandedCall is false, then the value is stored at the specified stack offset, and the returned value is nil. +func (s *state) putArg(n *Node, t *types.Type, off int64, forLateExpandedCall bool) (ssa.Param, *ssa.Value) { + var a *ssa.Value + if forLateExpandedCall { + if !canSSAType(t) { + a = s.newValue2(ssa.OpDereference, t, s.addr(n), s.mem()) + } else { + a = s.expr(n) + } + } else { + s.storeArgWithBase(n, t, s.sp, off) + } + return ssa.Param{Type: t, Offset: int32(off)}, a } func (s *state) storeArgWithBase(n *Node, t *types.Type, base *ssa.Value, off int64) { diff --git a/src/cmd/compile/internal/ssa/op.go b/src/cmd/compile/internal/ssa/op.go index b8f80f7ea4..1ab53cf285 100644 --- a/src/cmd/compile/internal/ssa/op.go +++ b/src/cmd/compile/internal/ssa/op.go @@ -83,22 +83,27 @@ type AuxCall struct { func (a *AuxCall) OffsetOfResult(which int64) int64 { return int64(a.results[which].Offset) } + // OffsetOfArg returns the SP offset of argument which (indexed 0, 1, etc). func (a *AuxCall) OffsetOfArg(which int64) int64 { return int64(a.args[which].Offset) } + // TypeOfResult returns the type of result which (indexed 0, 1, etc). func (a *AuxCall) TypeOfResult(which int64) *types.Type { return a.results[which].Type } + // TypeOfArg returns the type of argument which (indexed 0, 1, etc). func (a *AuxCall) TypeOfArg(which int64) *types.Type { return a.args[which].Type } + // SizeOfResult returns the size of result which (indexed 0, 1, etc). func (a *AuxCall) SizeOfResult(which int64) int64 { return a.TypeOfResult(which).Width } + // SizeOfArg returns the size of argument which (indexed 0, 1, etc). func (a *AuxCall) SizeOfArg(which int64) int64 { return a.TypeOfArg(which).Width diff --git a/src/cmd/compile/internal/types/type.go b/src/cmd/compile/internal/types/type.go index 9b05aef429..5d1d5d4008 100644 --- a/src/cmd/compile/internal/types/type.go +++ b/src/cmd/compile/internal/types/type.go @@ -68,7 +68,7 @@ const ( // SSA backend types TSSA // internal types used by SSA backend (flags, memory, etc.) TTUPLE // a pair of types, used by SSA backend - TRESULTS // multiuple types; the resulting of calling a function or method, plus a memory at the end. + TRESULTS // multiple types; the result of calling a function or method, with a memory at the end. NTYPE ) @@ -331,9 +331,9 @@ type Tuple struct { // Any tuple with a memory type must put that memory type second. } +// Results are the output from calls that will be late-expanded. type Results struct { - Types []*Type - // Any Results with a memory type must put that memory type last. + Types []*Type // Last element is memory output from call. } // Array contains Type fields specific to array types. -- 2.50.0