((Equal|NotEqual) (CMPW x z:(NEG y))) && z.Uses == 1 => ((Equal|NotEqual) (CMNW x y))
// For conditional instructions such as CSET, CSEL.
-// TODO: add support for LT, LE, GT, GE, overflow needs to be considered.
-((Equal|NotEqual) (CMPconst [0] x:(ADDconst [c] y))) && x.Uses == 1 => ((Equal|NotEqual) (CMNconst [c] y))
-((Equal|NotEqual) (CMPWconst [0] x:(ADDconst [c] y))) && x.Uses == 1 => ((Equal|NotEqual) (CMNWconst [int32(c)] y))
-((Equal|NotEqual) (CMPconst [0] z:(ADD x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMN x y))
-((Equal|NotEqual) (CMPWconst [0] z:(ADD x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMNW x y))
-((Equal|NotEqual) (CMPconst [0] z:(MADD a x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMN a (MUL <x.Type> x y)))
-((Equal|NotEqual) (CMPconst [0] z:(MSUB a x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMP a (MUL <x.Type> x y)))
-((Equal|NotEqual) (CMPWconst [0] z:(MADDW a x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMNW a (MULW <x.Type> x y)))
-((Equal|NotEqual) (CMPWconst [0] z:(MSUBW a x y))) && z.Uses == 1 => ((Equal|NotEqual) (CMPW a (MULW <x.Type> x y)))
+// TODO: add support for LE, GT, overflow needs to be considered.
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPconst [0] x:(ADDconst [c] y))) && x.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMNconst [c] y))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPWconst [0] x:(ADDconst [c] y))) && x.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMNWconst [int32(c)] y))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPconst [0] z:(ADD x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMN x y))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPWconst [0] z:(ADD x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMNW x y))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPconst [0] z:(MADD a x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMN a (MUL <x.Type> x y)))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPconst [0] z:(MSUB a x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMP a (MUL <x.Type> x y)))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPWconst [0] z:(MADDW a x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMNW a (MULW <x.Type> x y)))
+((Equal|NotEqual|LessThan|GreaterEqual) (CMPWconst [0] z:(MSUBW a x y))) && z.Uses == 1 => ((Equal|NotEqual|LessThanNoov|GreaterEqualNoov) (CMPW a (MULW <x.Type> x y)))
((CMPconst|CMNconst) [c] y) && c < 0 && c != -1<<63 => ((CMNconst|CMPconst) [-c] y)
((CMPWconst|CMNWconst) [c] y) && c < 0 && c != -1<<31 => ((CMNWconst|CMPWconst) [-c] y)
v.AddArg(v0)
return true
}
+ // match: (GreaterEqual (CMPconst [0] x:(ADDconst [c] y)))
+ // cond: x.Uses == 1
+ // result: (GreaterEqualNoov (CMNconst [c] y))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpARM64ADDconst {
+ break
+ }
+ c := auxIntToInt64(x.AuxInt)
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
+ v0.AuxInt = int64ToAuxInt(c)
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPWconst [0] x:(ADDconst [c] y)))
+ // cond: x.Uses == 1
+ // result: (GreaterEqualNoov (CMNWconst [int32(c)] y))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpARM64ADDconst {
+ break
+ }
+ c := auxIntToInt64(x.AuxInt)
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
+ v0.AuxInt = int32ToAuxInt(int32(c))
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPconst [0] z:(ADD x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMN x y))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64ADD {
+ break
+ }
+ y := z.Args[1]
+ x := z.Args[0]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
+ v0.AddArg2(x, y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPWconst [0] z:(ADD x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMNW x y))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64ADD {
+ break
+ }
+ y := z.Args[1]
+ x := z.Args[0]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
+ v0.AddArg2(x, y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPconst [0] z:(MADD a x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMN a (MUL <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MADD {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPconst [0] z:(MSUB a x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMP a (MUL <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MSUB {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPWconst [0] z:(MADDW a x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMNW a (MULW <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MADDW {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (GreaterEqual (CMPWconst [0] z:(MSUBW a x y)))
+ // cond: z.Uses == 1
+ // result: (GreaterEqualNoov (CMPW a (MULW <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MSUBW {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64GreaterEqualNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
// match: (GreaterEqual (FlagConstant [fc]))
// result: (MOVDconst [b2i(fc.ge())])
for {
v.AddArg(v0)
return true
}
+ // match: (LessThan (CMPconst [0] x:(ADDconst [c] y)))
+ // cond: x.Uses == 1
+ // result: (LessThanNoov (CMNconst [c] y))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpARM64ADDconst {
+ break
+ }
+ c := auxIntToInt64(x.AuxInt)
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags)
+ v0.AuxInt = int64ToAuxInt(c)
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPWconst [0] x:(ADDconst [c] y)))
+ // cond: x.Uses == 1
+ // result: (LessThanNoov (CMNWconst [int32(c)] y))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ x := v_0.Args[0]
+ if x.Op != OpARM64ADDconst {
+ break
+ }
+ c := auxIntToInt64(x.AuxInt)
+ y := x.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags)
+ v0.AuxInt = int32ToAuxInt(int32(c))
+ v0.AddArg(y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPconst [0] z:(ADD x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMN x y))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64ADD {
+ break
+ }
+ y := z.Args[1]
+ x := z.Args[0]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
+ v0.AddArg2(x, y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPWconst [0] z:(ADD x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMNW x y))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64ADD {
+ break
+ }
+ y := z.Args[1]
+ x := z.Args[0]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
+ v0.AddArg2(x, y)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPconst [0] z:(MADD a x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMN a (MUL <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MADD {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPconst [0] z:(MSUB a x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMP a (MUL <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPconst || auxIntToInt64(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MSUB {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPWconst [0] z:(MADDW a x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMNW a (MULW <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MADDW {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (LessThan (CMPWconst [0] z:(MSUBW a x y)))
+ // cond: z.Uses == 1
+ // result: (LessThanNoov (CMPW a (MULW <x.Type> x y)))
+ for {
+ if v_0.Op != OpARM64CMPWconst || auxIntToInt32(v_0.AuxInt) != 0 {
+ break
+ }
+ z := v_0.Args[0]
+ if z.Op != OpARM64MSUBW {
+ break
+ }
+ y := z.Args[2]
+ a := z.Args[0]
+ x := z.Args[1]
+ if !(z.Uses == 1) {
+ break
+ }
+ v.reset(OpARM64LessThanNoov)
+ v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags)
+ v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
+ v1.AddArg2(x, y)
+ v0.AddArg2(a, v1)
+ v.AddArg(v0)
+ return true
+ }
// match: (LessThan (FlagConstant [fc]))
// result: (MOVDconst [b2i(fc.lt())])
for {