(Sub8 x (Const8 <t> [c])) && x.Op != OpConst8 -> (Add8 (Const8 <t> [int64(int8(-c))]) x)
// fold negation into comparison operators
-(Not (Eq(64|32|16|8|B) x y)) -> (Neq(64|32|16|8|B) x y)
-(Not (Neq(64|32|16|8|B) x y)) -> (Eq(64|32|16|8|B) x y)
+(Not (Eq(64|32|16|8|B|Ptr|64F|32F) x y)) -> (Neq(64|32|16|8|B|Ptr|64F|32F) x y)
+(Not (Neq(64|32|16|8|B|Ptr|64F|32F) x y)) -> (Eq(64|32|16|8|B|Ptr|64F|32F) x y)
(Not (Greater(64|32|16|8) x y)) -> (Leq(64|32|16|8) x y)
(Not (Greater(64|32|16|8)U x y)) -> (Leq(64|32|16|8)U x y)
(Not (Leq(64|32|16|8) x y)) -> (Greater(64|32|16|8) x y)
(Not (Leq(64|32|16|8)U x y)) -> (Greater(64|32|16|8)U x y)
-
// Distribute multiplication c * (d+x) -> c*d + c*x. Useful for:
// a[i].b = ...; a[i+1].b = ...
(Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) ->
v.AddArg(y)
return true
}
+ // match: (Not (EqPtr x y))
+ // result: (NeqPtr x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpEqPtr {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpNeqPtr)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Not (Eq64F x y))
+ // result: (Neq64F x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpEq64F {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpNeq64F)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Not (Eq32F x y))
+ // result: (Neq32F x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpEq32F {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpNeq32F)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Not (Neq64 x y))
// result: (Eq64 x y)
for {
v.AddArg(y)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpNot_10(v *Value) bool {
// match: (Not (Neq32 x y))
// result: (Eq32 x y)
for {
v.AddArg(y)
return true
}
- return false
-}
-func rewriteValuegeneric_OpNot_10(v *Value) bool {
// match: (Not (NeqB x y))
// result: (EqB x y)
for {
v.AddArg(y)
return true
}
+ // match: (Not (NeqPtr x y))
+ // result: (EqPtr x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpNeqPtr {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpEqPtr)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Not (Neq64F x y))
+ // result: (Eq64F x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpNeq64F {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpEq64F)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Not (Neq32F x y))
+ // result: (Eq32F x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpNeq32F {
+ break
+ }
+ y := v_0.Args[1]
+ x := v_0.Args[0]
+ v.reset(OpEq32F)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (Not (Greater64 x y))
// result: (Leq64 x y)
for {
v.AddArg(y)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpNot_20(v *Value) bool {
// match: (Not (Greater8 x y))
// result: (Leq8 x y)
for {
v.AddArg(y)
return true
}
- return false
-}
-func rewriteValuegeneric_OpNot_20(v *Value) bool {
// match: (Not (Geq32 x y))
// result: (Less32 x y)
for {
v.AddArg(y)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpNot_30(v *Value) bool {
// match: (Not (Geq32U x y))
// result: (Less32U x y)
for {
v.AddArg(y)
return true
}
- return false
-}
-func rewriteValuegeneric_OpNot_30(v *Value) bool {
// match: (Not (Less8 x y))
// result: (Geq8 x y)
for {
v.AddArg(y)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpNot_40(v *Value) bool {
// match: (Not (Less8U x y))
// result: (Geq8U x y)
for {
v.AddArg(y)
return true
}
- return false
-}
-func rewriteValuegeneric_OpNot_40(v *Value) bool {
// match: (Not (Leq32U x y))
// result: (Greater32U x y)
for {