case OpDiv8u:
return rewriteValuegeneric_OpDiv8u_0(v)
case OpEq16:
- return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v)
+ return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v)
case OpEq32:
- return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v)
+ return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v)
case OpEq32F:
return rewriteValuegeneric_OpEq32F_0(v)
case OpEq64:
- return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v)
+ return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v)
case OpEq64F:
return rewriteValuegeneric_OpEq64F_0(v)
case OpEq8:
- return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v)
+ return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v)
case OpEqB:
return rewriteValuegeneric_OpEqB_0(v)
case OpEqInter:
return rewriteValuegeneric_OpPhi_0(v)
case OpPtrIndex:
return rewriteValuegeneric_OpPtrIndex_0(v)
+ case OpRotateLeft16:
+ return rewriteValuegeneric_OpRotateLeft16_0(v)
+ case OpRotateLeft32:
+ return rewriteValuegeneric_OpRotateLeft32_0(v)
+ case OpRotateLeft64:
+ return rewriteValuegeneric_OpRotateLeft64_0(v)
+ case OpRotateLeft8:
+ return rewriteValuegeneric_OpRotateLeft8_0(v)
case OpRound32F:
return rewriteValuegeneric_OpRound32F_0(v)
case OpRound64F:
}
func rewriteValuegeneric_OpEq16_0(v *Value) bool {
b := v.Block
+ config := b.Func.Config
typ := &b.Func.Config.Types
// match: (Eq16 x x)
// cond:
v.AuxInt = b2i(c == d)
return true
}
- // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
+ // cond: x.Op != OpConst16 && udivisibleOK(16,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xffff])) (Const32 <typ.UInt32> [0]))
for {
_ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLsh16x64 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpMod16u {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh16x64 {
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd16 {
+ c := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
break
}
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
+ if v_1.AuxInt != 0 {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh16Ux64 {
+ if !(x.Op != OpConst16 && udivisibleOK(16, c) && !hasSmallRotate(config)) {
break
}
- if v_1_0_0_1.Type != t {
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xffff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq16 (Const16 [0]) (Mod16u x (Const16 [c])))
+ // cond: x.Op != OpConst16 && udivisibleOK(16,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xffff])) (Const32 <typ.UInt32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
break
}
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh16x64 {
+ if v_0.AuxInt != 0 {
break
}
- if v_1_0_0_1_0.Type != t {
+ v_1 := v.Args[1]
+ if v_1.Op != OpMod16u {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(x.Op != OpConst16 && udivisibleOK(16, c) && !hasSmallRotate(config)) {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xffff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 15 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst16 {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to16 {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to64 {
break
}
- if v_1_1.Type != typ.UInt64 {
+ if x != mul_1.Args[0] {
break
}
- if v_1_1.AuxInt != k {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ return false
+}
+func rewriteValuegeneric_OpEq16_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh16x64 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh16x64 {
+ if v_1_0.Op != OpConst16 {
break
}
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd16 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to16 {
break
}
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh16Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- if v_1_0_0_0.Type != t {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh16x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to64 {
break
}
- if v_1_0_0_0_0.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_1_0_0_0_0_1.AuxInt != 15 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to16 {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to64 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 x (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh16x64 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh16x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to16 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd16 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh16Ux64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to64 {
break
}
- if v_0_0_0_1.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh16x64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- if v_0_0_0_1_0.Type != t {
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 15 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to16 {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to64 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ if x != mul_1.Args[0] {
break
}
- if v_0_1.AuxInt != k {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq16_10(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- n := v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh16x64 {
+ if v_0.Op != OpMul16 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh16x64 {
+ if v_0_0.Op != OpConst16 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd16 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to16 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh16Ux64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- if v_0_0_0_0.Type != t {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh16x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to64 {
break
}
- if v_0_0_0_0_0.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_0_0_0_0_0_1.AuxInt != 15 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to16 {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to64 {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- if v_0_1.Type != typ.UInt64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- if v_0_1.AuxInt != k {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
- // cond: s.Uses == 1
- // result: (Eq16 x y)
+ // match: (Eq16 (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- _ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub16 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to16 {
break
}
- if v_1.AuxInt != 0 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- if !(s.Uses == 1) {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v.reset(OpEq16)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
- // cond: s.Uses == 1
- // result: (Eq16 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to64 {
break
}
- if v_0.AuxInt != 0 {
+ if x != mul_0.Args[0] {
break
}
- s := v.Args[1]
- if s.Op != OpSub16 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- v.reset(OpEq16)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32_0(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
- break
- }
- t := v_0.Type
- c := v_0.AuxInt
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpAdd32 {
+ if v_1.Op != OpMul16 {
break
}
- x := v_1.Args[1]
+ _ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ if v_1_0.Op != OpConst16 {
break
}
- if v_1_0.Type != t {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to16 {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
- break
- }
- t := v_0.Type
- c := v_0.AuxInt
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpAdd32 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
- x := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst16 {
+ break
+ }
+ c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ if v_1_1.Op != OpTrunc32to16 {
break
}
- if v_1_1.Type != t {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd32 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- x := v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if x != mul_0.Args[0] {
break
}
- if v_1.Type != t {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
}
- // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd32 {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- if v_1.Type != t {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq32 (Const32 [c]) (Const32 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq32 (Const32 [d]) (Const32 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if x != mul_1.Args[0] {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ return false
+}
+func rewriteValuegeneric_OpEq16_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh32x64 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32x64 {
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- _ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd32 {
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- t := v_1_0_0.Type
_ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh32Ux64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
break
}
- if v_1_0_0_1.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh32x64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- if v_1_0_0_1_0.Type != t {
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 31 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
break
}
- if v_1_1.Type != typ.UInt64 {
+ if x != mul_1.Args[0] {
break
}
- if v_1_1.AuxInt != k {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLsh32x64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32x64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
break
}
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd32 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
break
}
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh32Ux64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- if v_1_0_0_0.Type != t {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh32x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
break
}
- if v_1_0_0_0_0.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_1_0_0_0_0_1.AuxInt != 31 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to16 {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- n := v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh32x64 {
+ if v_0.Op != OpMul16 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32x64 {
+ if v_0_0.Op != OpTrunc32to16 {
break
}
- _ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd32 {
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- t := v_0_0_0.Type
_ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
+ m := mul_1.AuxInt
v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh32Ux64 {
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_1.Type != t {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh32x64 {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_0_0_0_1_0.Type != t {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst16 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to16 {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 31 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt16to32 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ if x != mul_1_0.Args[0] {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ if mul_1_1.AuxInt != 1 {
break
}
- if v_0_1.AuxInt != k {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq32_10(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh32x64 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst16 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to16 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh32Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
break
}
- if v_0_0_0_0.Type != t {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh32x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- if v_0_0_0_0_0.Type != t {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt16to32 {
break
}
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
+ if x != mul_0_0.Args[0] {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ if mul_0_1.AuxInt != 1 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 31 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_0_1.AuxInt != k {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt16to32 {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ if x != mul_1_0.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
- // cond: s.Uses == 1
- // result: (Eq32 x y)
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub32 {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- if v_1.AuxInt != 0 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- if !(s.Uses == 1) {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v.reset(OpEq32)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
- // cond: s.Uses == 1
- // result: (Eq32 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- if v_0.AuxInt != 0 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt16to32 {
break
}
- s := v.Args[1]
- if s.Op != OpSub32 {
+ if x != mul_0_0.Args[0] {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- v.reset(OpEq32)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
- // match: (Eq32F (Const32F [c]) (Const32F [d]))
- // cond:
- // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32F {
+ if mul_0_1.AuxInt != 1 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32F {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32F (Const32F [d]) (Const32F [c]))
- // cond:
- // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32F {
+ if v_0.Op != OpMul16 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32F {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq64_0(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq64 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
- return true
- }
- // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd64 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- x := v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_1_0.Type != t {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt16to32 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd64 {
+ if x != mul_1_0.Args[0] {
break
}
- _ = v_1.Args[1]
- x := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- if v_1_1.Type != t {
+ if mul_1_1.AuxInt != 1 {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
+ return false
+}
+func rewriteValuegeneric_OpEq16_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd64 {
+ if v_0.Op != OpMul16 {
break
}
- x := v_0.Args[1]
+ _ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst16 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
break
}
- if v_1.Type != t {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd64 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt16to32 {
break
}
- if v_1.Type != t {
+ if x != mul_0_0.Args[0] {
break
}
- c := v_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 (Const64 [c]) (Const64 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if v_0.Op != OpMul16 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to16 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq64 (Const64 [d]) (Const64 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
- for {
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLsh64x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64x64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd64 {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt16to32 {
break
}
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
+ if x != mul_1_0.Args[0] {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh64Ux64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- if v_1_0_0_1.Type != t {
+ if mul_1_1.AuxInt != 1 {
break
}
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh64x64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- if v_1_0_0_1_0.Type != t {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to16 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 63 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt16to32 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ if x != mul_0_0.Args[0] {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- if v_1_1.Type != typ.UInt64 {
+ if mul_0_1.AuxInt != 1 {
break
}
- if v_1_1.AuxInt != k {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh64x64 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64x64 {
+ if v_1_0.Op != OpConst16 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to16 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_1_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst16 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to16 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_1_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- _ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd64 {
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- t := v_1_0_0.Type
_ = v_1_0_0.Args[1]
v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh64Ux64 {
- break
- }
- if v_1_0_0_0.Type != t {
+ if v_1_0_0_0.Op != OpAvg32u {
break
}
_ = v_1_0_0_0.Args[1]
v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh64x64 {
+ if v_1_0_0_0_0.Op != OpLsh32x64 {
break
}
- if v_1_0_0_0_0.Type != t {
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt16to32 {
break
}
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ if x != v_1_0_0_0_0_0.Args[0] {
break
}
v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
if v_1_0_0_0_0_1.Op != OpConst64 {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ if v_1_0_0_0_0_1.AuxInt != 16 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 63 {
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul32 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ if x != mul_1.Args[0] {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- if v_1_1.AuxInt != k {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to16 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_1_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
for {
- n := v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh64x64 {
+ if v_0.Op != OpMul16 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64x64 {
+ if v_0_0.Op != OpConst16 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh64Ux64 {
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg32u {
break
}
- if v_0_0_0_1.Type != t {
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh32x64 {
break
}
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh64x64 {
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_0_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to16 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_0_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to16 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq16_40(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to16 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 16 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v2.AuxInt = int64(int16(udivisible(16, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(16 - udivisible(16, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(udivisible(16, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 15 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 15 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 15 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh16x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh16Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh16x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 15 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq16 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub16 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq16)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
+ // cond: s.Uses == 1
+ // result: (Eq16 x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ s := v.Args[1]
+ if s.Op != OpSub16 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq16)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd32 {
+ break
+ }
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ if v_1_0.Type != t {
+ break
+ }
+ d := v_1_0.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd32 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ if v_1_1.Type != t {
+ break
+ }
+ d := v_1_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd32 {
+ break
+ }
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Const32 [c]) (Const32 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq32 (Const32 [d]) (Const32 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg32u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul32u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1_0.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_40(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1_0.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1_0.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1_0.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_1_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_1_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_1_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_50(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_1_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
+ break
+ }
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq32 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub32 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq32)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_60(v *Value) bool {
+ // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
+ // cond: s.Uses == 1
+ // result: (Eq32 x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ s := v.Args[1]
+ if s.Op != OpSub32 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq32)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
+ // match: (Eq32F (Const32F [c]) (Const32F [d]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32F {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32F {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+ return true
+ }
+ // match: (Eq32F (Const32F [d]) (Const32F [c]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32F {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32F {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd64 {
+ break
+ }
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ if v_1_0.Type != t {
+ break
+ }
+ d := v_1_0.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd64 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != t {
+ break
+ }
+ d := v_1_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd64 {
+ break
+ }
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Const64 [c]) (Const64 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq64 (Const64 [d]) (Const64 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq64 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub64 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq64)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
+ // cond: s.Uses == 1
+ // result: (Eq64 x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ s := v.Args[1]
+ if s.Op != OpSub64 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq64)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
+ // match: (Eq64F (Const64F [c]) (Const64F [d]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64F {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64F {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+ return true
+ }
+ // match: (Eq64F (Const64F [d]) (Const64F [c]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64F {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64F {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq8_0(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
+ typ := &b.Func.Config.Types
+ // match: (Eq8 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd8 {
+ break
+ }
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
+ break
+ }
+ if v_1_0.Type != t {
+ break
+ }
+ d := v_1_0.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd8 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ if v_1_1.Type != t {
+ break
+ }
+ d := v_1_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd8 {
+ break
+ }
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst8 {
+ break
+ }
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd8 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Const8 [c]) (Const8 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq8 (Const8 [d]) (Const8 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
+ // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMod8u {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq8 (Const8 [0]) (Mod8u x (Const8 [c])))
+ // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
break
}
- if v_0_0_0_1_0.Type != t {
+ v_1 := v.Args[1]
+ if v_1.Op != OpMod8u {
break
}
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 63 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to8 {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ if x != mul_1.Args[0] {
break
}
- if v_0_1.AuxInt != k {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
return false
}
-func rewriteValuegeneric_OpEq64_10(v *Value) bool {
+func rewriteValuegeneric_OpEq8_10(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
- // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh64x64 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to8 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh64Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
break
}
- if v_0_0_0_0.Type != t {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh64x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
break
}
- if v_0_0_0_0_0.Type != t {
+ if x != mul_0.Args[0] {
break
}
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- if v_0_0_0_0_0_1.AuxInt != 63 {
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to8 {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- if v_0_1.Type != typ.UInt64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- if v_0_1.AuxInt != k {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
- // cond: s.Uses == 1
- // result: (Eq64 x y)
+ // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
_ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub64 {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to8 {
break
}
- if v_1.AuxInt != 0 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
break
}
- if !(s.Uses == 1) {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v.reset(OpEq64)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
- // cond: s.Uses == 1
- // result: (Eq64 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
break
}
- if v_0.AuxInt != 0 {
+ if x != mul_0.Args[0] {
break
}
- s := v.Args[1]
- if s.Op != OpSub64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- v.reset(OpEq64)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
- // match: (Eq64F (Const64F [c]) (Const64F [d]))
- // cond:
- // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64F {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64F {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64F (Const64F [d]) (Const64F [c]))
- // cond:
- // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+ // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst64F {
- break
- }
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64F {
+ if v_0.Op != OpMul8 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq8_0(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq8 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst8 {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
- return true
- }
- // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to8 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd8 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- x := v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst8 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_1_0.Type != t {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd8 {
+ if x != mul_1.Args[0] {
break
}
- _ = v_1.Args[1]
- x := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst8 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if v_1_1.Type != t {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd8 {
+ if v_0.Op != OpMul8 {
break
}
- x := v_0.Args[1]
+ _ = v_0.Args[1]
v_0_0 := v_0.Args[0]
if v_0_0.Op != OpConst8 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to8 {
break
}
- if v_1.Type != t {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd8 {
+ if v_0.Op != OpMul8 {
break
}
_ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst8 {
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to8 {
break
}
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- if v_1.Type != t {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 (Const8 [c]) (Const8 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
+ // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if v_0.Op != OpMul8 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to8 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq8 (Const8 [d]) (Const8 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
// match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
}
return false
}
-func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+func rewriteValuegeneric_OpEq8_20(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
// match: (Eq8 (Lsh8x64 (Rsh8x64 (Add8 <t> (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
return true
}
// match: (Mod8u <t> x (Const8 [c]))
- // cond: x.Op != OpConst8 && c > 0 && umagicOK(8 ,c)
+ // cond: x.Op != OpConst8 && c > 0 && umagicOK(8,c)
// result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
for {
t := v.Type
}
return false
}
+func rewriteValuegeneric_OpRotateLeft16_0(v *Value) bool {
+ // match: (RotateLeft16 x (Const16 [c]))
+ // cond: c%16 == 0
+ // result: x
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c%16 == 0) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpRotateLeft32_0(v *Value) bool {
+ // match: (RotateLeft32 x (Const32 [c]))
+ // cond: c%32 == 0
+ // result: x
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c%32 == 0) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpRotateLeft64_0(v *Value) bool {
+ // match: (RotateLeft64 x (Const64 [c]))
+ // cond: c%64 == 0
+ // result: x
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c%64 == 0) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpRotateLeft8_0(v *Value) bool {
+ // match: (RotateLeft8 x (Const8 [c]))
+ // cond: c%8 == 0
+ // result: x
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(c%8 == 0) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValuegeneric_OpRound32F_0(v *Value) bool {
// match: (Round32F x:(Const32F))
// cond: