v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
+ // match: (Leq16U x zero:(MOVDconst [0]))
+ // result: (Eq16 x zero)
+ for {
+ x := v_0
+ zero := v_1
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ v.reset(OpEq16)
+ v.AddArg2(x, zero)
+ return true
+ }
+ // match: (Leq16U (MOVDconst [1]) x)
+ // result: (Neq16 (MOVDconst [0]) x)
+ for {
+ if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq16)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(v0, x)
+ return true
+ }
// match: (Leq16U x y)
// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
for {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Leq32U x zero:(MOVDconst [0]))
+ // result: (Eq32 x zero)
+ for {
+ x := v_0
+ zero := v_1
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ v.reset(OpEq32)
+ v.AddArg2(x, zero)
+ return true
+ }
+ // match: (Leq32U (MOVDconst [1]) x)
+ // result: (Neq32 (MOVDconst [0]) x)
+ for {
+ if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq32)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(v0, x)
+ return true
+ }
// match: (Leq32U x y)
// result: (LessEqualU (CMPW x y))
for {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Leq64U x zero:(MOVDconst [0]))
+ // result: (Eq64 x zero)
+ for {
+ x := v_0
+ zero := v_1
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ v.reset(OpEq64)
+ v.AddArg2(x, zero)
+ return true
+ }
+ // match: (Leq64U (MOVDconst [1]) x)
+ // result: (Neq64 (MOVDconst [0]) x)
+ for {
+ if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq64)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(v0, x)
+ return true
+ }
// match: (Leq64U x y)
// result: (LessEqualU (CMP x y))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
+ // match: (Leq8U x zero:(MOVDconst [0]))
+ // result: (Eq8 x zero)
+ for {
+ x := v_0
+ zero := v_1
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ v.reset(OpEq8)
+ v.AddArg2(x, zero)
+ return true
+ }
+ // match: (Leq8U (MOVDconst [1]) x)
+ // result: (Neq8 (MOVDconst [0]) x)
+ for {
+ if v_0.Op != OpARM64MOVDconst || auxIntToInt64(v_0.AuxInt) != 1 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq8)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(v0, x)
+ return true
+ }
// match: (Leq8U x y)
// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
+ // match: (Less16U zero:(MOVDconst [0]) x)
+ // result: (Neq16 zero x)
+ for {
+ zero := v_0
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq16)
+ v.AddArg2(zero, x)
+ return true
+ }
+ // match: (Less16U x (MOVDconst [1]))
+ // result: (Eq16 x (MOVDconst [0]))
+ for {
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Less16U x y)
// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
for {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Less32U zero:(MOVDconst [0]) x)
+ // result: (Neq32 zero x)
+ for {
+ zero := v_0
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq32)
+ v.AddArg2(zero, x)
+ return true
+ }
+ // match: (Less32U x (MOVDconst [1]))
+ // result: (Eq32 x (MOVDconst [0]))
+ for {
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Less32U x y)
// result: (LessThanU (CMPW x y))
for {
v_1 := v.Args[1]
v_0 := v.Args[0]
b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Less64U zero:(MOVDconst [0]) x)
+ // result: (Neq64 zero x)
+ for {
+ zero := v_0
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq64)
+ v.AddArg2(zero, x)
+ return true
+ }
+ // match: (Less64U x (MOVDconst [1]))
+ // result: (Eq64 x (MOVDconst [0]))
+ for {
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Less64U x y)
// result: (LessThanU (CMP x y))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
+ // match: (Less8U zero:(MOVDconst [0]) x)
+ // result: (Neq8 zero x)
+ for {
+ zero := v_0
+ if zero.Op != OpARM64MOVDconst || auxIntToInt64(zero.AuxInt) != 0 {
+ break
+ }
+ x := v_1
+ v.reset(OpNeq8)
+ v.AddArg2(zero, x)
+ return true
+ }
+ // match: (Less8U x (MOVDconst [1]))
+ // result: (Eq8 x (MOVDconst [0]))
+ for {
+ x := v_0
+ if v_1.Op != OpARM64MOVDconst || auxIntToInt64(v_1.AuxInt) != 1 {
+ break
+ }
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(0)
+ v.AddArg2(x, v0)
+ return true
+ }
// match: (Less8U x y)
// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
for {
}
return 0
}
+
+func UintGtZero(a uint8, b uint16, c uint32, d uint64) int {
+ // arm64: `CBZW`, `CBNZW`, `CBNZ`, -`(CMPW|CMP|BLS|BHI)`
+ if a > 0 || b > 0 || c > 0 || d > 0 {
+ return 1
+ }
+ return 0
+}
+
+func UintLeqZero(a uint8, b uint16, c uint32, d uint64) int {
+ // arm64: `CBNZW`, `CBZW`, `CBZ`, -`(CMPW|CMP|BHI|BLS)`
+ if a <= 0 || b <= 0 || c <= 0 || d <= 0 {
+ return 1
+ }
+ return 0
+}
+
+func UintLtOne(a uint8, b uint16, c uint32, d uint64) int {
+ // arm64: `CBNZW`, `CBZW`, `CBZW`, `CBZ`, -`(CMPW|CMP|BHS|BLO)`
+ if a < 1 || b < 1 || c < 1 || d < 1 {
+ return 1
+ }
+ return 0
+}
+
+func UintGeqOne(a uint8, b uint16, c uint32, d uint64) int {
+ // arm64: `CBZW`, `CBNZW`, `CBNZ`, -`(CMPW|CMP|BLO|BHS)`
+ if a >= 1 || b >= 1 || c >= 1 || d >= 1 {
+ return 1
+ }
+ return 0
+}