// succ* fields must be variables
// For now, the generated successors must be a permutation of the matched successors.
+// Simplify nil checks.
+// These are inserted by for _, e := range a {}
+(NilCheck (Phi x (Add64 (Const64 [c]) y)) mem) && c > 0 && v.Args[0] == y -> (NilCheck x mem)
+
// constant folding
(Trunc16to8 (Const16 [c])) -> (Const8 [int64(int8(c))])
(Trunc32to8 (Const32 [c])) -> (Const8 [int64(int8(c))])
return rewriteValuegeneric_OpNeqPtr(v, config)
case OpNeqSlice:
return rewriteValuegeneric_OpNeqSlice(v, config)
+ case OpNilCheck:
+ return rewriteValuegeneric_OpNilCheck(v, config)
case OpOffPtr:
return rewriteValuegeneric_OpOffPtr(v, config)
case OpOr16:
}
return false
}
+func rewriteValuegeneric_OpNilCheck(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (NilCheck (Phi x (Add64 (Const64 [c]) y)) mem)
+ // cond: c > 0 && v.Args[0] == y
+ // result: (NilCheck x mem)
+ for {
+ if v.Args[0].Op != OpPhi {
+ break
+ }
+ x := v.Args[0].Args[0]
+ if v.Args[0].Args[1].Op != OpAdd64 {
+ break
+ }
+ if v.Args[0].Args[1].Args[0].Op != OpConst64 {
+ break
+ }
+ c := v.Args[0].Args[1].Args[0].AuxInt
+ y := v.Args[0].Args[1].Args[1]
+ if len(v.Args[0].Args) != 2 {
+ break
+ }
+ mem := v.Args[1]
+ if !(c > 0 && v.Args[0] == y) {
+ break
+ }
+ v.reset(OpNilCheck)
+ v.AddArg(x)
+ v.AddArg(mem)
+ return true
+ }
+ return false
+}
func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool {
b := v.Block
_ = b