From 7bda6154caa6f0c527f4a8302e38d450b44ae68b Mon Sep 17 00:00:00 2001 From: David Chase Date: Wed, 12 Aug 2020 23:47:57 -0400 Subject: [PATCH] cmd/compile: add generic optimization patterns for late-expanded calls. Repeats existing patterns for old calls, so that these will apply during the optimization phases that precede call expansion. Change-Id: I1ca0a78c159aa1a51004db217edde4ecc772b646 Reviewed-on: https://go-review.googlesource.com/c/go/+/248190 Trust: David Chase Run-TryBot: David Chase Reviewed-by: Cherry Zhang TryBot-Result: Go Bot --- .../compile/internal/ssa/gen/generic.rules | 37 ++++ src/cmd/compile/internal/ssa/rewrite.go | 3 +- .../compile/internal/ssa/rewritegeneric.go | 158 ++++++++++++++++++ 3 files changed, 197 insertions(+), 1 deletion(-) diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index 588077422c..4351ef5bdd 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -1961,6 +1961,31 @@ && warnRule(fe.Debug_checknil(), v, "removed nil check") => (Invalid) +// for late-expanded calls +(Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call)) + && isSameCall(call.Aux, "runtime.newobject") + => mem + +(Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call)) + && isConstZero(x) + && isSameCall(call.Aux, "runtime.newobject") + => mem + +(Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call)) + && isConstZero(x) + && isSameCall(call.Aux, "runtime.newobject") + => mem + +(NilCheck (SelectN [0] call:(StaticLECall _ _)) (SelectN [1] call)) + && isSameCall(call.Aux, "runtime.newobject") + && warnRule(fe.Debug_checknil(), v, "removed nil check") + => (Invalid) + +(NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) (SelectN [1] call)) + && isSameCall(call.Aux, "runtime.newobject") + && warnRule(fe.Debug_checknil(), v, "removed nil check") + => (Invalid) + // Evaluate constant address comparisons. (EqPtr x x) => (ConstBool [true]) (NeqPtr x x) => (ConstBool [false]) @@ -2017,6 +2042,17 @@ && clobber(s1, s2, s3) => (Move {t.Elem()} [int64(sz)] dst src mem) +// Inline small or disjoint runtime.memmove calls with constant length. +// See the comment in op Move in genericOps.go for discussion of the type. +(SelectN [0] call:(StaticLECall {sym} dst src (Const(64|32) [sz]) mem)) + && sz >= 0 + && call.Uses == 1 // this will exclude all calls with results + && isSameCall(sym, "runtime.memmove") + && dst.Type.IsPtr() // avoids TUINTPTR, see issue 30061 + && isInlinableMemmove(dst, src, int64(sz), config) + && clobber(call) + => (Move {dst.Type.Elem()} [int64(sz)] dst src mem) + // De-virtualize interface calls into static calls. // Note that (ITab (IMake)) doesn't get // rewritten until after the first opt pass, @@ -2411,6 +2447,7 @@ (Store {t5} (OffPtr [o5] dst) d4 (Zero {t1} [n] dst mem))))) +// TODO this does not fire before call expansion; is that acceptable? (StaticCall {sym} x) && needRaceCleanup(sym, v) => x // Collapse moving A -> B -> C into just A -> C. diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go index 5d8b3ddc4e..e5f858a339 100644 --- a/src/cmd/compile/internal/ssa/rewrite.go +++ b/src/cmd/compile/internal/ssa/rewrite.go @@ -395,7 +395,8 @@ func canMergeLoad(target, load *Value) bool { // isSameCall reports whether sym is the same as the given named symbol func isSameCall(sym interface{}, name string) bool { - return sym.(*AuxCall).Fn.String() == name + fn := sym.(*AuxCall).Fn + return fn != nil && fn.String() == name } // nlz returns the number of leading zeros. diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index ade0a69a10..11f4cc7c58 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -368,6 +368,8 @@ func rewriteValuegeneric(v *Value) bool { return rewriteValuegeneric_OpSelect0(v) case OpSelect1: return rewriteValuegeneric_OpSelect1(v) + case OpSelectN: + return rewriteValuegeneric_OpSelectN(v) case OpSignExt16to32: return rewriteValuegeneric_OpSignExt16to32(v) case OpSignExt16to64: @@ -16124,6 +16126,38 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool { v.reset(OpInvalid) return true } + // match: (NilCheck (SelectN [0] call:(StaticLECall _ _)) (SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check") + // result: (Invalid) + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) { + break + } + v.reset(OpInvalid) + return true + } + // match: (NilCheck (OffPtr (SelectN [0] call:(StaticLECall _ _))) (SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check") + // result: (Invalid) + for { + if v_0.Op != OpOffPtr { + break + } + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 { + break + } + call := v_0_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 || v_1.Op != OpSelectN || auxIntToInt64(v_1.AuxInt) != 1 || call != v_1.Args[0] || !(isSameCall(call.Aux, "runtime.newobject") && warnRule(fe.Debug_checknil(), v, "removed nil check")) { + break + } + v.reset(OpInvalid) + return true + } return false } func rewriteValuegeneric_OpNot(v *Value) bool { @@ -20669,6 +20703,70 @@ func rewriteValuegeneric_OpSelect1(v *Value) bool { } return false } +func rewriteValuegeneric_OpSelectN(v *Value) bool { + v_0 := v.Args[0] + b := v.Block + config := b.Func.Config + // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const64 [sz]) mem)) + // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call) + // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem) + for { + if auxIntToInt64(v.AuxInt) != 0 { + break + } + call := v_0 + if call.Op != OpStaticLECall || len(call.Args) != 4 { + break + } + sym := auxToCall(call.Aux) + mem := call.Args[3] + dst := call.Args[0] + src := call.Args[1] + call_2 := call.Args[2] + if call_2.Op != OpConst64 { + break + } + sz := auxIntToInt64(call_2.AuxInt) + if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) { + break + } + v.reset(OpMove) + v.AuxInt = int64ToAuxInt(int64(sz)) + v.Aux = typeToAux(dst.Type.Elem()) + v.AddArg3(dst, src, mem) + return true + } + // match: (SelectN [0] call:(StaticLECall {sym} dst src (Const32 [sz]) mem)) + // cond: sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call) + // result: (Move {dst.Type.Elem()} [int64(sz)] dst src mem) + for { + if auxIntToInt64(v.AuxInt) != 0 { + break + } + call := v_0 + if call.Op != OpStaticLECall || len(call.Args) != 4 { + break + } + sym := auxToCall(call.Aux) + mem := call.Args[3] + dst := call.Args[0] + src := call.Args[1] + call_2 := call.Args[2] + if call_2.Op != OpConst32 { + break + } + sz := auxIntToInt32(call_2.AuxInt) + if !(sz >= 0 && call.Uses == 1 && isSameCall(sym, "runtime.memmove") && dst.Type.IsPtr() && isInlinableMemmove(dst, src, int64(sz), config) && clobber(call)) { + break + } + v.reset(OpMove) + v.AuxInt = int64ToAuxInt(int64(sz)) + v.Aux = typeToAux(dst.Type.Elem()) + v.AddArg3(dst, src, mem) + return true + } + return false +} func rewriteValuegeneric_OpSignExt16to32(v *Value) bool { v_0 := v.Args[0] // match: (SignExt16to32 (Const16 [c])) @@ -21714,6 +21812,48 @@ func rewriteValuegeneric_OpStore(v *Value) bool { v.copyOf(mem) return true } + // match: (Store (SelectN [0] call:(StaticLECall _ _)) x mem:(SelectN [1] call)) + // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + x := v_1 + mem := v_2 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } + // match: (Store (OffPtr (SelectN [0] call:(StaticLECall _ _))) x mem:(SelectN [1] call)) + // cond: isConstZero(x) && isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpOffPtr { + break + } + v_0_0 := v_0.Args[0] + if v_0_0.Op != OpSelectN || auxIntToInt64(v_0_0.AuxInt) != 0 { + break + } + call := v_0_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + x := v_1 + mem := v_2 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isConstZero(x) && isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } // match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [0] p2) d2 m3:(Move [n] p3 _ mem))) // cond: m2.Uses == 1 && m3.Uses == 1 && o1 == t2.Size() && n == t2.Size() + t1.Size() && isSamePtr(p1, p2) && isSamePtr(p2, p3) && clobber(m2, m3) // result: (Store {t1} op1 d1 (Store {t2} op2 d2 mem)) @@ -24411,6 +24551,24 @@ func rewriteValuegeneric_OpZero(v *Value) bool { v.copyOf(mem) return true } + // match: (Zero (SelectN [0] call:(StaticLECall _ _)) mem:(SelectN [1] call)) + // cond: isSameCall(call.Aux, "runtime.newobject") + // result: mem + for { + if v_0.Op != OpSelectN || auxIntToInt64(v_0.AuxInt) != 0 { + break + } + call := v_0.Args[0] + if call.Op != OpStaticLECall || len(call.Args) != 2 { + break + } + mem := v_1 + if mem.Op != OpSelectN || auxIntToInt64(mem.AuxInt) != 1 || call != mem.Args[0] || !(isSameCall(call.Aux, "runtime.newobject")) { + break + } + v.copyOf(mem) + return true + } // match: (Zero {t1} [n] p1 store:(Store {t2} (OffPtr [o2] p2) _ mem)) // cond: isSamePtr(p1, p2) && store.Uses == 1 && n >= o2 + t2.Size() && clobber(store) // result: (Zero {t1} [n] p1 mem) -- 2.50.0