case OpDiv8u:
return rewriteValuegeneric_OpDiv8u_0(v)
case OpEq16:
- return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v)
+ return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v) || rewriteValuegeneric_OpEq16_50(v)
case OpEq32:
- return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v)
+ return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v) || rewriteValuegeneric_OpEq32_70(v) || rewriteValuegeneric_OpEq32_80(v) || rewriteValuegeneric_OpEq32_90(v)
case OpEq32F:
return rewriteValuegeneric_OpEq32F_0(v)
case OpEq64:
- return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v)
+ return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v) || rewriteValuegeneric_OpEq64_40(v) || rewriteValuegeneric_OpEq64_50(v) || rewriteValuegeneric_OpEq64_60(v)
case OpEq64F:
return rewriteValuegeneric_OpEq64F_0(v)
case OpEq8:
- return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v)
+ return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v) || rewriteValuegeneric_OpEq8_30(v)
case OpEqB:
return rewriteValuegeneric_OpEqB_0(v)
case OpEqInter:
v.AddArg(v3)
return true
}
+ // match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
+ // cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMod16 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
+ break
+ }
+ c := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v2.AuxInt = c
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq16_10(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
+ typ := &b.Func.Config.Types
+ // match: (Eq16 (Const16 [0]) (Mod16 x (Const16 [c])))
+ // cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpMod16 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v2.AuxInt = c
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))))
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq16_10(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))))
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpEq16_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))))
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq16_20(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpEq16_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq16_30(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))) x)
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
+ return false
+}
+func rewriteValuegeneric_OpEq16_40(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))) x)
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq16_40(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])) x)
// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
v.AddArg(v4)
return true
}
- // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh16x64 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh16x64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd16 {
- break
- }
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
- break
- }
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh16Ux64 {
- break
- }
- if v_1_0_0_1.Type != t {
- break
- }
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh16x64 {
+ if v_1_0.Op != OpConst16 {
break
}
- if v_1_0_0_1_0.Type != t {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub16 {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 15 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpSignExt16to32 {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt16to32 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if x != v_1_1_1_0.Args[0] {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh16x64 {
+ if v_1.Op != OpMul16 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh16x64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd16 {
- break
- }
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh16Ux64 {
- break
- }
- if v_1_0_0_0.Type != t {
- break
- }
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh16x64 {
- break
- }
- if v_1_0_0_0_0.Type != t {
+ if v_1_0.Op != OpConst16 {
break
}
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub16 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 15 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpSignExt16to32 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ if x != mul_0.Args[0] {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt16to32 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if x != v_1_1_1_0.Args[0] {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh16x64 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh16x64 {
- break
- }
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd16 {
- break
- }
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
- break
- }
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh16Ux64 {
- break
- }
- if v_0_0_0_1.Type != t {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh16x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub16 {
break
}
- if v_0_0_0_1_0.Type != t {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpSignExt16to32 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 15 {
+ if x != mul_1.Args[0] {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt16to32 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ if x != v_1_0_1_0.Args[0] {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- if v_0_1.AuxInt != k {
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 15 && kbar == 16 - k
- // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+ // match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh16x64 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul16 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh16x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub16 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd16 {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh16Ux64 {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if v_0_0_0_0.Type != t {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpSignExt16to32 {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh16x64 {
+ if x != mul_0.Args[0] {
break
}
- if v_0_0_0_0_0.Type != t {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt16to32 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 15 {
+ if x != v_1_0_1_0.Args[0] {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst16 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- k := v_0_0_1.AuxInt
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
+ break
+ }
+ c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpSub16 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
- if v_0_1.AuxInt != k {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if !(k > 0 && k < 15 && kbar == 16-k) {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- v.reset(OpEq16)
- v0 := b.NewValue0(v.Pos, OpAnd16, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst16, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
- v0.AddArg(v1)
- v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst16, t)
- v2.AuxInt = 0
- v.AddArg(v2)
- return true
- }
- // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
- // cond: s.Uses == 1
- // result: (Eq16 x y)
- for {
- _ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub16 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpSignExt16to32 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst16 {
+ if x != mul_1.Args[0] {
break
}
- if v_1.AuxInt != 0 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if !(s.Uses == 1) {
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
break
}
- v.reset(OpEq16)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
- // cond: s.Uses == 1
- // result: (Eq16 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst16 {
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt16to32 {
break
}
- if v_0.AuxInt != 0 {
+ if x != v_0_1_1_0.Args[0] {
break
}
- s := v.Args[1]
- if s.Op != OpSub16 {
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ if v_0_1_1_1.AuxInt != 31 {
break
}
- v.reset(OpEq16)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32_0(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ // match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ if v_0.Op != OpMul16 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst16 {
break
}
- x := v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub16 {
break
}
- if v_1_0.Type != t {
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd32 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpSignExt16to32 {
break
}
- _ = v_1.Args[1]
- x := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ if x != mul_0.Args[0] {
break
}
- if v_1_1.Type != t {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd32 {
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- x := v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt16to32 {
break
}
- if v_1.Type != t {
+ if x != v_0_1_1_0.Args[0] {
break
}
- c := v_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v.AddArg(x)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
- // cond:
- // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ // match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd32 {
+ if v_0.Op != OpMul16 {
break
}
_ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
- break
- }
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub16 {
break
}
- if v_1.Type != t {
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpConst32, t)
- v0.AuxInt = int64(int32(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq32 (Const32 [c]) (Const32 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq32 (Const32 [d]) (Const32 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpSignExt16to32 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if x != mul_1.Args[0] {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt16to32 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul32u {
+ if x != v_0_0_1_0.Args[0] {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
break
}
- m := mul_0.AuxInt
- if x != mul.Args[1] {
+ if v_0_0_1_1.AuxInt != 31 {
break
}
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ return false
+}
+func rewriteValuegeneric_OpEq16_50(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+ // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul16 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub16 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul32u {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
_ = mul.Args[1]
- if x != mul.Args[0] {
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpSignExt16to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt16to32 {
+ break
+ }
+ if x != v_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst16 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
+ break
+ }
+ v.reset(OpLeq16U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+ v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+ v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+ v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v3.AuxInt = int64(int16(sdivisible(16, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v4.AuxInt = int64(int16(sdivisible(16, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v5.AuxInt = int64(16 - sdivisible(16, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+ v6.AuxInt = int64(int16(sdivisible(16, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
for {
_ = v.Args[1]
- x := v.Args[0]
+ n := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ if v_1.Op != OpLsh16x64 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
+ if v_1_0.Op != OpRsh16x64 {
break
}
_ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul32u {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
break
}
- m := mul_0.AuxInt
- if x != mul.Args[1] {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh16Ux64 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if v_1_0_0_1.Type != t {
break
}
- s := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh16x64 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_1_0_0_1_0.Type != t {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32_10(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
break
}
- _ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul32u {
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ if v_1_0_0_1_0_1.AuxInt != 15 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst32 {
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
break
}
- m := mul_1.AuxInt
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
v_1_0_1 := v_1_0.Args[1]
if v_1_0_1.Op != OpConst64 {
break
}
- s := v_1_0_1.AuxInt
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ if v_1_1.Op != OpConst64 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_1_1.Type != typ.UInt64 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh16x64 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh16x64 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd16 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul32u {
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh16Ux64 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ if v_1_0_0_0.Type != t {
break
}
- m := mul_0.AuxInt
- if x != mul.Args[1] {
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh16x64 {
break
}
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ if v_1_0_0_0_0.Type != t {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
- return true
- }
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ if v_1_0_0_0_0_1.AuxInt != 15 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul32u {
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ if v_1_0_0_0_1.Type != typ.UInt64 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst32 {
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
break
}
- m := mul_1.AuxInt
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_1_0_1.Type != typ.UInt64 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
return true
}
- // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
for {
- x := v.Args[1]
+ n := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ if v_0.Op != OpLsh16x64 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
+ if v_0_0.Op != OpRsh16x64 {
break
}
_ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul32u {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
break
}
- m := mul_0.AuxInt
- if x != mul.Args[1] {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh16Ux64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if v_0_0_0_1.Type != t {
break
}
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh16x64 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_0_0_0_1_0.Type != t {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
- return true
- }
- // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
break
}
- _ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul32u {
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ if v_0_0_0_1_0_1.AuxInt != 15 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst32 {
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
break
}
- m := mul_1.AuxInt
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
v_0_0_1 := v_0_0.Args[1]
if v_0_0_1.Op != OpConst64 {
break
}
- s := v_0_0_1.AuxInt
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ if v_0_1.Op != OpConst64 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_0_1.Type != typ.UInt64 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 15 && kbar == 16-k) {
+ break
+ }
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 15 && kbar == 16 - k
+ // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh16x64 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh16x64 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd16 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul32u {
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh16Ux64 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ if v_0_0_0_0.Type != t {
break
}
- if mul_0.Type != typ.UInt32 {
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh16x64 {
break
}
- m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh32Ux64 {
+ if v_0_0_0_0_0.Type != t {
break
}
- _ = mul_1.Args[1]
- if x != mul_1.Args[0] {
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
- break
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
}
- if mul_1_1.AuxInt != 1 {
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
break
}
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if v_0_0_0_0_0_1.AuxInt != 15 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
- return true
- }
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ if v_0_0_0_0_1.Type != typ.UInt64 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul32u {
+ if v_0_0_1.Type != typ.UInt64 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh32Ux64 {
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
break
}
- _ = mul_0.Args[1]
- if x != mul_0.Args[0] {
+ if v_0_1.Type != typ.UInt64 {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ if v_0_1.AuxInt != k {
break
}
- if mul_0_1.AuxInt != 1 {
+ if !(k > 0 && k < 15 && kbar == 16-k) {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst32 {
+ v.reset(OpEq16)
+ v0 := b.NewValue0(v.Pos, OpAnd16, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst16, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst16, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq16 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub16 {
break
}
- if mul_1.Type != typ.UInt32 {
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst16 {
break
}
- m := mul_1.AuxInt
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if v_1.AuxInt != 0 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if !(s.Uses == 1) {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v.reset(OpEq16)
+ v.AddArg(x)
+ v.AddArg(y)
return true
}
- // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
+ // cond: s.Uses == 1
+ // result: (Eq16 x y)
for {
_ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst16 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
+ if v_0.AuxInt != 0 {
break
}
- _ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul32u {
+ s := v.Args[1]
+ if s.Op != OpSub16 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpConst32 {
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
break
}
- if mul_0.Type != typ.UInt32 {
+ v.reset(OpEq16)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
break
}
- m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh32Ux64 {
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
break
}
- _ = mul_1.Args[1]
- if x != mul_1.Args[0] {
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd32 {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- if mul_1_1.AuxInt != 1 {
+ if v_1_0.Type != t {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ d := v_1_0.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
break
}
- s := v_1_0_1.AuxInt
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd32 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if v_1_1.Type != t {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
+ d := v_1_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ v.AddArg(x)
return true
}
- // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
- // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+ // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
for {
_ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd32 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- _ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul32u {
- break
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh32Ux64 {
+ if v_1.Type != t {
break
}
- _ = mul_0.Args[1]
- if x != mul_0.Args[0] {
+ c := v_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
+ // cond:
+ // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd32 {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- if mul_0_1.AuxInt != 1 {
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst32 {
+ if v_1.Type != t {
break
}
- if mul_1.Type != typ.UInt32 {
+ c := v_1.AuxInt
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpConst32, t)
+ v0.AuxInt = int64(int32(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq32 (Const32 [c]) (Const32 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
break
}
- m := mul_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
break
}
- s := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq32 (Const32 [d]) (Const32 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
break
}
- v.reset(OpLeq32U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = int64(int32(udivisible(32, c).m))
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = int64(32 - udivisible(32, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v4.AuxInt = int64(int32(udivisible(32, c).max))
- v.AddArg(v4)
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
if mul.Op != OpHmul32u {
break
}
if mul_0.Op != OpConst32 {
break
}
- if mul_0.Type != typ.UInt32 {
- break
- }
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh32Ux64 {
- break
- }
- _ = mul_1.Args[1]
- if x != mul_1.Args[0] {
- break
- }
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
- break
- }
- if mul_1_1.AuxInt != 1 {
+ if x != mul.Args[1] {
break
}
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq32_20(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh32Ux64 {
- break
- }
- _ = mul_0.Args[1]
- if x != mul_0.Args[0] {
- break
- }
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
if mul_1.Op != OpConst32 {
break
}
- if mul_1.Type != typ.UInt32 {
- break
- }
m := mul_1.AuxInt
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- _ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
if mul.Op != OpHmul32u {
break
}
if mul_0.Op != OpConst32 {
break
}
- if mul_0.Type != typ.UInt32 {
- break
- }
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh32Ux64 {
- break
- }
- _ = mul_1.Args[1]
- if x != mul_1.Args[0] {
+ if x != mul.Args[1] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
- break
- }
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
- break
- }
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ return false
+}
+func rewriteValuegeneric_OpEq32_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- _ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh32Ux64 {
- break
- }
- _ = mul_0.Args[1]
- if x != mul_0.Args[0] {
- break
- }
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
if mul_1.Op != OpConst32 {
break
}
- if mul_1.Type != typ.UInt32 {
- break
- }
m := mul_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
- break
- }
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpAvg32u {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- _ = v_1_1_0.Args[1]
- if x != v_1_1_0.Args[0] {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- mul := v_1_1_0.Args[1]
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
if mul.Op != OpHmul32u {
break
}
if x != mul.Args[1] {
break
}
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
- break
- }
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh32Ux64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpAvg32u {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- _ = v_1_1_0.Args[1]
- if x != v_1_1_0.Args[0] {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- mul := v_1_1_0.Args[1]
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
if mul.Op != OpHmul32u {
break
}
break
}
m := mul_1.AuxInt
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAvg32u {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1_0_0.Args[1]
- if x != v_1_0_0.Args[0] {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- mul := v_1_0_0.Args[1]
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
if mul.Op != OpHmul32u {
break
}
if x != mul.Args[1] {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32Ux64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAvg32u {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1_0_0.Args[1]
- if x != v_1_0_0.Args[0] {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- mul := v_1_0_0.Args[1]
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
if mul.Op != OpHmul32u {
break
}
break
}
m := mul_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- _ = v_0_1.Args[1]
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpAvg32u {
- break
- }
- _ = v_0_1_0.Args[1]
- if x != v_0_1_0.Args[0] {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- mul := v_0_1_0.Args[1]
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
if mul.Op != OpHmul32u {
break
}
if mul_0.Op != OpConst32 {
break
}
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
m := mul_0.AuxInt
- if x != mul.Args[1] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh32Ux64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- _ = v_0_1.Args[1]
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpAvg32u {
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul32u {
break
}
- _ = v_0_1_0.Args[1]
- if x != v_0_1_0.Args[0] {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- mul := v_0_1_0.Args[1]
- if mul.Op != OpHmul32u {
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
break
}
mul_1 := mul.Args[1]
if mul_1.Op != OpConst32 {
break
}
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
m := mul_1.AuxInt
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
- break
- }
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAvg32u {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0_0_0.Args[1]
- if x != v_0_0_0.Args[0] {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- mul := v_0_0_0.Args[1]
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
if mul.Op != OpHmul32u {
break
}
if mul_0.Op != OpConst32 {
break
}
+ if mul_0.Type != typ.UInt32 {
+ break
+ }
m := mul_0.AuxInt
- if x != mul.Args[1] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh32Ux64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
break
}
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq32_30(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32Ux64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAvg32u {
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul32u {
break
}
- _ = v_0_0_0.Args[1]
- if x != v_0_0_0.Args[0] {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh32Ux64 {
break
}
- mul := v_0_0_0.Args[1]
- if mul.Op != OpHmul32u {
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
break
}
mul_1 := mul.Args[1]
if mul_1.Op != OpConst32 {
break
}
+ if mul_1.Type != typ.UInt32 {
+ break
+ }
m := mul_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- _ = v_1_1_0.Args[1]
- mul := v_1_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
break
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
+ if mul_1.Op != OpRsh32Ux64 {
break
}
+ _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ return false
+}
+func rewriteValuegeneric_OpEq32_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- _ = v_1_1_0.Args[1]
- mul := v_1_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ if mul_0.Op != OpRsh32Ux64 {
break
}
+ _ = mul_0.Args[1]
if x != mul_0.Args[0] {
break
}
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
break
}
m := mul_1.AuxInt
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- _ = v_1_0_0.Args[1]
- mul := v_1_0_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ if mul_0.Type != typ.UInt32 {
break
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
+ if mul_1.Op != OpRsh32Ux64 {
break
}
+ _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ if mul_1_1.AuxInt != 1 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- _ = v_1_0_0.Args[1]
- mul := v_1_0_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ if mul_0.Op != OpRsh32Ux64 {
break
}
+ _ = mul_0.Args[1]
if x != mul_0.Args[0] {
break
}
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ if mul_1.Type != typ.UInt32 {
break
}
m := mul_1.AuxInt
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg32u {
break
}
- _ = v_0_1_0.Args[1]
- mul := v_0_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1.Args[0] {
+ if x != mul.Args[1] {
break
}
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
break
}
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
- break
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh32Ux64 {
break
}
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg32u {
break
}
- _ = v_0_1_0.Args[1]
- mul := v_0_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
- if x != mul_0.Args[0] {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
break
}
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg32u {
break
}
- _ = v_0_0_0.Args[1]
- mul := v_0_0_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1.Args[0] {
+ if x != mul.Args[1] {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32Ux64 {
break
}
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg32u {
break
}
- _ = v_0_0_0.Args[1]
- mul := v_0_0_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
- if x != mul_0.Args[0] {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg32u {
break
}
- _ = v_1_1_0.Args[1]
- mul := v_1_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_1.Args[1]
- mul_1_0 := mul_1.Args[0]
- if mul_1_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1_0.Args[0] {
- break
- }
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
- break
- }
- if mul_1_1.AuxInt != 1 {
+ if x != mul.Args[1] {
break
}
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq32_40(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh32Ux64 {
break
}
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg32u {
break
}
- _ = v_1_1_0.Args[1]
- mul := v_1_1_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
- _ = mul_0.Args[1]
- mul_0_0 := mul_0.Args[0]
- if mul_0_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_0_0.Args[0] {
- break
- }
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg32u {
break
}
- _ = v_1_0_0.Args[1]
- mul := v_1_0_0.Args[0]
- if mul.Op != OpMul64 {
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
- mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_1.Args[1]
- mul_1_0 := mul_1.Args[0]
- if mul_1_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1_0.Args[0] {
- break
- }
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
- break
- }
- if mul_1_1.AuxInt != 1 {
+ if x != mul.Args[1] {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ return false
+}
+func rewriteValuegeneric_OpEq32_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_1_0_0.Args[1]
- mul := v_1_0_0.Args[0]
- if mul.Op != OpMul64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32Ux64 {
break
}
- _ = mul_0.Args[1]
- mul_0_0 := mul_0.Args[0]
- if mul_0_0.Op != OpZeroExt32to64 {
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg32u {
break
}
- if x != mul_0_0.Args[0] {
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul32u {
break
}
- if mul_0_1.AuxInt != 1 {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_1_0.Args[1]
- mul := v_0_1_0.Args[0]
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
if mul.Op != OpMul64 {
break
}
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_1.Args[1]
- mul_1_0 := mul_1.Args[0]
- if mul_1_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1_0.Args[0] {
- break
- }
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ if x != mul_1.Args[0] {
break
}
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_1_0.Args[1]
- mul := v_0_1_0.Args[0]
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ if mul_0.Op != OpZeroExt32to64 {
break
}
- _ = mul_0.Args[1]
- mul_0_0 := mul_0.Args[0]
- if mul_0_0.Op != OpZeroExt32to64 {
+ if x != mul_0.Args[0] {
break
}
- if x != mul_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if mul_0_1.AuxInt != 1 {
- break
- }
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
- break
- }
- m := mul_1.AuxInt
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
- break
- }
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_0_0.Args[1]
- mul := v_0_0_0.Args[0]
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
if mul.Op != OpMul64 {
break
}
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_1.Args[1]
- mul_1_0 := mul_1.Args[0]
- if mul_1_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_1_0.Args[0] {
- break
- }
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ if x != mul_1.Args[0] {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_0_0.Args[1]
- mul := v_0_0_0.Args[0]
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_0.Args[1]
- mul_0_0 := mul_0.Args[0]
- if mul_0_0.Op != OpZeroExt32to64 {
- break
- }
- if x != mul_0_0.Args[0] {
- break
- }
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ if mul_0.Op != OpZeroExt32to64 {
break
}
- if mul_0_1.AuxInt != 1 {
+ if x != mul_0.Args[0] {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
- break
- }
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_1_1_0.Args[1]
- v_1_1_0_0 := v_1_1_0.Args[0]
- if v_1_1_0_0.Op != OpAvg64u {
- break
- }
- _ = v_1_1_0_0.Args[1]
- v_1_1_0_0_0 := v_1_1_0_0.Args[0]
- if v_1_1_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_1_1_0_0_0.Args[1]
- v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
- if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- if x != v_1_1_0_0_0_0.Args[0] {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
- if v_1_1_0_0_0_1.Op != OpConst64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- if v_1_1_0_0_0_1.AuxInt != 32 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- mul := v_1_1_0_0.Args[1]
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
if mul.Op != OpMul64 {
break
}
if x != mul_1.Args[0] {
break
}
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst32 {
- break
- }
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc64to32 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_1_1_0.Args[1]
- v_1_1_0_0 := v_1_1_0.Args[0]
- if v_1_1_0_0.Op != OpAvg64u {
- break
- }
- _ = v_1_1_0_0.Args[1]
- v_1_1_0_0_0 := v_1_1_0_0.Args[0]
- if v_1_1_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_1_1_0_0_0.Args[1]
- v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
- if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- if x != v_1_1_0_0_0_0.Args[0] {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
- if v_1_1_0_0_0_1.Op != OpConst64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- if v_1_1_0_0_0_1.AuxInt != 32 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- mul := v_1_1_0_0.Args[1]
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
if mul.Op != OpMul64 {
break
}
break
}
m := mul_1.AuxInt
- v_1_1_0_1 := v_1_1_0.Args[1]
- if v_1_1_0_1.Op != OpConst64 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpAvg64u {
- break
- }
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_1_0_0_0_0.Args[1]
- v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
- if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
- break
- }
- if x != v_1_0_0_0_0_0.Args[0] {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 32 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- mul := v_1_0_0_0.Args[1]
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
if mul.Op != OpMul64 {
break
}
if x != mul_1.Args[0] {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq32_50(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul32 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc64to32 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpAvg64u {
- break
- }
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_1_0_0_0_0.Args[1]
- v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
- if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
- break
- }
- if x != v_1_0_0_0_0_0.Args[0] {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 32 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- mul := v_1_0_0_0.Args[1]
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
if mul.Op != OpMul64 {
break
}
break
}
m := mul_1.AuxInt
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpConst64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- s := v_1_0_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst32 {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
- break
- }
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
- break
- }
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_0_1_0.Args[1]
- v_0_1_0_0 := v_0_1_0.Args[0]
- if v_0_1_0_0.Op != OpAvg64u {
- break
- }
- _ = v_0_1_0_0.Args[1]
- v_0_1_0_0_0 := v_0_1_0_0.Args[0]
- if v_0_1_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_0_1_0_0_0.Args[1]
- v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
- if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- if x != v_0_1_0_0_0_0.Args[0] {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
- if v_0_1_0_0_0_1.Op != OpConst64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- if v_0_1_0_0_0_1.AuxInt != 32 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- mul := v_0_1_0_0.Args[1]
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
if mul.Op != OpMul64 {
break
}
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
+ if mul_1.Op != OpRsh64Ux64 {
break
}
- if x != mul_1.Args[0] {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
break
}
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
+ if x != mul_1_0.Args[0] {
break
}
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ return false
+}
+func rewriteValuegeneric_OpEq32_40(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst32 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc64to32 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh64Ux64 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_1_0.Args[1]
- v_0_1_0_0 := v_0_1_0.Args[0]
- if v_0_1_0_0.Op != OpAvg64u {
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- _ = v_0_1_0_0.Args[1]
- v_0_1_0_0_0 := v_0_1_0_0.Args[0]
- if v_0_1_0_0_0.Op != OpLsh64x64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
break
}
- _ = v_0_1_0_0_0.Args[1]
- v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
- if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
break
}
- if x != v_0_1_0_0_0_0.Args[0] {
+ if x != mul_0_0.Args[0] {
break
}
- v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
- if v_0_1_0_0_0_1.Op != OpConst64 {
- break
- }
- if v_0_1_0_0_0_1.AuxInt != 32 {
- break
- }
- mul := v_0_1_0_0.Args[1]
- if mul.Op != OpMul64 {
- break
- }
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- if x != mul_0.Args[0] {
+ if mul_0_1.AuxInt != 1 {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
- v_0_1_0_1 := v_0_1_0.Args[1]
- if v_0_1_0_1.Op != OpConst64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
- break
- }
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpAvg64u {
- break
- }
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpLsh64x64 {
- break
- }
- _ = v_0_0_0_0_0.Args[1]
- v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
- if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
- break
- }
- if x != v_0_0_0_0_0_0.Args[0] {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 32 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- mul := v_0_0_0_0.Args[1]
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
if mul.Op != OpMul64 {
break
}
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt32to64 {
+ if mul_1.Op != OpRsh64Ux64 {
break
}
- if x != mul_1.Args[0] {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ if x != mul_1_0.Args[0] {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul32 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc64to32 {
- break
- }
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh64Ux64 {
- break
- }
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpAvg64u {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpLsh64x64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- _ = v_0_0_0_0_0.Args[1]
- v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
- if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- if x != v_0_0_0_0_0_0.Args[0] {
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 32 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
break
}
- mul := v_0_0_0_0.Args[1]
- if mul.Op != OpMul64 {
+ if x != mul_0_0.Args[0] {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt32to64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- if x != mul_0.Args[0] {
+ if mul_0_1.AuxInt != 1 {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- s := v_0_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst32 {
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
v.reset(OpLeq32U)
v.AddArg(v4)
return true
}
- // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLsh32x64 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32x64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd32 {
- break
- }
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh32Ux64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- if v_1_0_0_1.Type != t {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh32x64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- if v_1_0_0_1_0.Type != t {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 31 {
+ if x != mul_1_0.Args[0] {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ if mul_1_1.AuxInt != 1 {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
- break
- }
- if v_1_1.Type != typ.UInt64 {
- break
- }
- if v_1_1.AuxInt != k {
- break
- }
- if !(k > 0 && k < 31 && kbar == 32-k) {
- break
- }
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- n := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpLsh32x64 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh32x64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd32 {
- break
- }
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh32Ux64 {
- break
- }
- if v_1_0_0_0.Type != t {
- break
- }
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh32x64 {
- break
- }
- if v_1_0_0_0_0.Type != t {
- break
- }
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 31 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if x != mul_0_0.Args[0] {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if mul_0_1.AuxInt != 1 {
break
}
- if v_1_1.Type != typ.UInt64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- n := v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh32x64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32x64 {
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- _ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd32 {
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- t := v_0_0_0.Type
_ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
- break
- }
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh32Ux64 {
- break
- }
- if v_0_0_0_1.Type != t {
- break
- }
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh32x64 {
- break
- }
- if v_0_0_0_1_0.Type != t {
- break
- }
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 31 {
+ _ = mul_1.Args[1]
+ mul_1_0 := mul_1.Args[0]
+ if mul_1_0.Op != OpZeroExt32to64 {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ if x != mul_1_0.Args[0] {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if mul_1_1.AuxInt != 1 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- k := v_0_0_1.AuxInt
+ s := v_0_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
- break
- }
- if v_0_1.Type != typ.UInt64 {
- break
- }
- if v_0_1.AuxInt != k {
+ if v_0_1.Op != OpConst32 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 31 && kbar == 32 - k
- // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- n := v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpLsh32x64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh32x64 {
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- _ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd32 {
+ if v_0_0_0.Op != OpRsh64Ux64 {
break
}
- t := v_0_0_0.Type
_ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh32Ux64 {
- break
- }
- if v_0_0_0_0.Type != t {
- break
- }
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh32x64 {
- break
- }
- if v_0_0_0_0_0.Type != t {
- break
- }
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
- break
- }
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 31 {
+ _ = mul_0.Args[1]
+ mul_0_0 := mul_0.Args[0]
+ if mul_0_0.Op != OpZeroExt32to64 {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ if x != mul_0_0.Args[0] {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ if mul_0_1.AuxInt != 1 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- k := v_0_0_1.AuxInt
+ s := v_0_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
- break
- }
- if v_0_1.Type != typ.UInt64 {
- break
- }
- if v_0_1.AuxInt != k {
+ if v_0_1.Op != OpConst32 {
break
}
- if !(k > 0 && k < 31 && kbar == 32-k) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpAnd32, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst32, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst32, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
- // cond: s.Uses == 1
- // result: (Eq32 x y)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
_ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub32 {
- break
- }
- y := s.Args[1]
- x := s.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpConst32 {
+ if v_1.Op != OpMul32 {
break
}
- if v_1.AuxInt != 0 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- if !(s.Uses == 1) {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- v.reset(OpEq32)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32_60(v *Value) bool {
- // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
- // cond: s.Uses == 1
- // result: (Eq32 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32 {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- if v_0.AuxInt != 0 {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg64u {
break
}
- s := v.Args[1]
- if s.Op != OpSub32 {
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh64x64 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
break
}
- v.reset(OpEq32)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
- // match: (Eq32F (Const32F [c]) (Const32F [d]))
- // cond:
- // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32F {
+ if x != v_1_1_0_0_0_0.Args[0] {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32F {
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
- return true
- }
- // match: (Eq32F (Const32F [d]) (Const32F [c]))
- // cond:
- // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst32F {
+ if v_1_1_0_0_0_1.AuxInt != 32 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst32F {
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq64_0(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq64 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
- return true
- }
- // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- t := v_0.Type
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpAdd64 {
+ if x != mul_1.Args[0] {
break
}
- x := v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_1_0.Type != t {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
- break
- }
- t := v_0.Type
- c := v_0.AuxInt
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpAdd64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
- x := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpTrunc64to32 {
break
}
- if v_1_1.Type != t {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64Ux64 {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd64 {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAvg64u {
break
}
- x := v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ _ = v_1_1_0_0.Args[1]
+ v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+ if v_1_1_0_0_0.Op != OpLsh64x64 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_1_1_0_0_0.Args[1]
+ v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+ if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
break
}
- if v_1.Type != t {
+ if x != v_1_1_0_0_0_0.Args[0] {
break
}
- c := v_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
- // cond:
- // result: (Eq64 (Const64 <t> [c-d]) x)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpAdd64 {
+ v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+ if v_1_1_0_0_0_1.Op != OpConst64 {
break
}
- _ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_1_1_0_0_0_1.AuxInt != 32 {
break
}
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
- if v_1.Type != t {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpConst64, t)
- v0.AuxInt = c - d
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq64 (Const64 [c]) (Const64 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ if x != mul_0.Args[0] {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq64 (Const64 [d]) (Const64 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul64u {
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_1_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
break
}
m := mul_0.AuxInt
- if x != mul.Args[1] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
v.AddArg(v4)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ return false
+}
+func rewriteValuegeneric_OpEq32_50(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpTrunc64to32 {
break
}
- c := v_1_0.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64Ux64 {
break
}
- _ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul64u {
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAvg64u {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpLsh64x64 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ _ = v_1_0_0_0_0.Args[1]
+ v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+ if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
break
}
- m := mul_1.AuxInt
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
- break
- }
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if x != v_1_0_0_0_0_0.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
- return true
- }
- // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
break
}
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ if v_1_0_0_0_0_1.AuxInt != 32 {
break
}
- _ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul64u {
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpZeroExt32to64 {
break
}
- m := mul_0.AuxInt
- if x != mul.Args[1] {
+ if x != mul_0.Args[0] {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
break
}
- s := v_1_0_1.AuxInt
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpConst32 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
v.AddArg(v4)
return true
}
- return false
-}
-func rewriteValuegeneric_OpEq64_10(v *Value) bool {
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
break
}
- _ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul64u {
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- m := mul_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg64u {
break
}
- s := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh64x64 {
break
}
- c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
- return true
- }
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
- for {
- x := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if x != v_0_1_0_0_0_0.Args[0] {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
break
}
- c := v_0_0.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1_0_0_0_1.AuxInt != 32 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul64u {
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
break
}
m := mul_0.AuxInt
- if x != mul.Args[1] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
v.AddArg(v4)
return true
}
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst32 {
break
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1.Op != OpTrunc64to32 {
break
}
- _ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64Ux64 {
break
}
- _ = mul.Args[1]
- if x != mul.Args[0] {
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAvg64u {
break
}
- mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ _ = v_0_1_0_0.Args[1]
+ v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+ if v_0_1_0_0_0.Op != OpLsh64x64 {
break
}
- m := mul_1.AuxInt
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ _ = v_0_1_0_0_0.Args[1]
+ v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+ if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if x != v_0_1_0_0_0_0.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+ if v_0_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpMul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- _ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
break
}
m := mul_0.AuxInt
- if x != mul.Args[1] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt32to64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if x != mul_1.Args[0] {
break
}
- s := v_0_0_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpConst32 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
v.AddArg(v4)
return true
}
- // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpTrunc64to32 {
break
}
- _ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+ if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != v_0_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 32 {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
- if x != mul.Args[0] {
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt32to64 {
+ break
+ }
+ if x != mul_0.Args[0] {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- s := v_0_0_1.AuxInt
+ s := v_0_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpConst32 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = int64(int32(udivisible(32, c).m))
v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(32 - udivisible(32, c).k)
v0.AddArg(v3)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(udivisible(32, c).max))
v.AddArg(v4)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpConst32 {
break
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ if v_1_1.Op != OpSub32 {
break
}
_ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul64u {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
+ if mul_1.Op != OpSignExt32to64 {
break
}
- _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
break
}
- v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt32to64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if x != v_1_1_1_0.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpConst32 {
break
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ if v_1_1.Op != OpSub32 {
break
}
_ = v_1_1.Args[1]
- mul := v_1_1.Args[0]
- if mul.Op != OpHmul64u {
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ if mul_0.Op != OpSignExt32to64 {
break
}
- _ = mul_0.Args[1]
if x != mul_0.Args[0] {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
- break
- }
mul_1 := mul.Args[1]
if mul_1.Op != OpConst64 {
break
}
m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if v_1_1_1.Op != OpRsh64x64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt32to64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ if x != v_1_1_1_0.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ if v_1_0.Op != OpSub32 {
break
}
_ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
+ if mul_1.Op != OpSignExt32to64 {
break
}
- _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt32to64 {
+ break
+ }
+ if x != v_1_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
break
}
- s := v_1_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpConst32 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ if v_1_0.Op != OpSub32 {
break
}
_ = v_1_0.Args[1]
- mul := v_1_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ if mul_0.Op != OpSignExt32to64 {
break
}
- _ = mul_0.Args[1]
if x != mul_0.Args[0] {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
- break
- }
mul_1 := mul.Args[1]
if mul_1.Op != OpConst64 {
break
}
m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt32to64 {
+ break
+ }
+ if x != v_1_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
break
}
- s := v_1_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpConst32 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst32 {
break
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1.Op != OpSub32 {
break
}
_ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
+ if mul_1.Op != OpSignExt32to64 {
break
}
- _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
break
}
- v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt32to64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if x != v_0_1_1_0.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
return false
}
-func rewriteValuegeneric_OpEq64_20(v *Value) bool {
+func rewriteValuegeneric_OpEq32_60(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst32 {
break
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1.Op != OpSub32 {
break
}
_ = v_0_1.Args[1]
- mul := v_0_1.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
break
}
- _ = mul.Args[1]
- mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
- break
- }
- _ = mul_0.Args[1]
- if x != mul_0.Args[0] {
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpSignExt32to64 {
break
}
- if mul_0_1.AuxInt != 1 {
+ if x != mul_0.Args[0] {
break
}
mul_1 := mul.Args[1]
break
}
m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ if v_0_1_1.Op != OpRsh64x64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt32to64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ if x != v_0_1_1_0.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpSub32 {
break
}
_ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpRsh64Ux64 {
+ if mul_1.Op != OpSignExt32to64 {
break
}
- _ = mul_1.Args[1]
if x != mul_1.Args[0] {
break
}
- mul_1_1 := mul_1.Args[1]
- if mul_1_1.Op != OpConst64 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- if mul_1_1.AuxInt != 1 {
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt32to64 {
+ break
+ }
+ if x != v_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
break
}
- s := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpConst32 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpSub32 {
break
}
_ = v_0_0.Args[1]
- mul := v_0_0.Args[0]
- if mul.Op != OpHmul64u {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul64 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpRsh64Ux64 {
+ if mul_0.Op != OpSignExt32to64 {
break
}
- _ = mul_0.Args[1]
if x != mul_0.Args[0] {
break
}
- mul_0_1 := mul_0.Args[1]
- if mul_0_1.Op != OpConst64 {
- break
- }
- if mul_0_1.AuxInt != 1 {
- break
- }
mul_1 := mul.Args[1]
if mul_1.Op != OpConst64 {
break
}
m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt32to64 {
+ break
+ }
+ if x != v_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
break
}
- s := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpConst32 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpConst32 {
break
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ if v_1_1.Op != OpSub32 {
break
}
_ = v_1_1.Args[1]
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpAvg64u {
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
_ = v_1_1_0.Args[1]
- if x != v_1_1_0.Args[0] {
- break
- }
- mul := v_1_1_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
if x != mul.Args[1] {
break
}
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst64 {
+ if v_1_0.Op != OpConst32 {
break
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpRsh64Ux64 {
+ if v_1_1.Op != OpSub32 {
break
}
_ = v_1_1.Args[1]
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpAvg64u {
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
_ = v_1_1_0.Args[1]
- if x != v_1_1_0.Args[0] {
- break
- }
- mul := v_1_1_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
v_1_1_1 := v_1_1.Args[1]
- if v_1_1_1.Op != OpConst64 {
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- s := v_1_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
- v1.AddArg(v2)
- v1.AddArg(x)
- v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
- v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ if v_1_0.Op != OpSub32 {
break
}
_ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAvg64u {
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
_ = v_1_0_0.Args[1]
- if x != v_1_0_0.Args[0] {
- break
- }
- mul := v_1_0_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
if x != mul.Args[1] {
break
}
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- s := v_1_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpConst32 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpMul64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64Ux64 {
+ if v_1_0.Op != OpSub32 {
break
}
_ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAvg64u {
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
_ = v_1_0_0.Args[1]
- if x != v_1_0_0.Args[0] {
- break
- }
- mul := v_1_0_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- s := v_1_0_1.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpConst32 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst32 {
break
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1.Op != OpSub32 {
break
}
_ = v_0_1.Args[1]
v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpAvg64u {
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
_ = v_0_1_0.Args[1]
- if x != v_0_1_0.Args[0] {
- break
- }
- mul := v_0_1_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
if x != mul.Args[1] {
break
}
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ if v_0_1_1.Op != OpRsh32x64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst64 {
+ if v_0_0.Op != OpConst32 {
break
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpRsh64Ux64 {
+ if v_0_1.Op != OpSub32 {
break
}
_ = v_0_1.Args[1]
v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpAvg64u {
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
_ = v_0_1_0.Args[1]
- if x != v_0_1_0.Args[0] {
- break
- }
- mul := v_0_1_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
v_0_1_1 := v_0_1.Args[1]
- if v_0_1_1.Op != OpConst64 {
+ if v_0_1_1.Op != OpRsh32x64 {
break
}
- s := v_0_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpSub32 {
break
}
_ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAvg64u {
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
_ = v_0_0_0.Args[1]
- if x != v_0_0_0.Args[0] {
- break
- }
- mul := v_0_0_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpConst64 {
+ if mul_0.Op != OpConst32 {
break
}
m := mul_0.AuxInt
if x != mul.Args[1] {
break
}
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if v_0_0_1.Op != OpRsh32x64 {
break
}
- s := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
break
}
- c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
return false
}
-func rewriteValuegeneric_OpEq64_30(v *Value) bool {
+func rewriteValuegeneric_OpEq32_70(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
- // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
- // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMul64 {
+ if v_0.Op != OpMul32 {
break
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64Ux64 {
+ if v_0_0.Op != OpSub32 {
break
}
_ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAvg64u {
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
_ = v_0_0_0.Args[1]
- if x != v_0_0_0.Args[0] {
- break
- }
- mul := v_0_0_0.Args[1]
- if mul.Op != OpHmul64u {
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
_ = mul.Args[1]
break
}
mul_1 := mul.Args[1]
- if mul_1.Op != OpConst64 {
+ if mul_1.Op != OpConst32 {
break
}
m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
break
}
- s := v_0_0_1.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ if v_0_1.Op != OpConst32 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpLeq64U)
- v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
- v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v2.AuxInt = int64(udivisible(64, c).m)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v3.AuxInt = int64(64 - udivisible(64, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
- v4.AuxInt = int64(udivisible(64, c).max)
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh64x64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64x64 {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd64 {
- break
- }
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- if n != v_1_0_0.Args[0] {
- break
- }
- v_1_0_0_1 := v_1_0_0.Args[1]
- if v_1_0_0_1.Op != OpRsh64Ux64 {
- break
- }
- if v_1_0_0_1.Type != t {
- break
- }
- _ = v_1_0_0_1.Args[1]
- v_1_0_0_1_0 := v_1_0_0_1.Args[0]
- if v_1_0_0_1_0.Op != OpRsh64x64 {
+ if v_1_0.Op != OpConst32 {
break
}
- if v_1_0_0_1_0.Type != t {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub32 {
break
}
- _ = v_1_0_0_1_0.Args[1]
- if n != v_1_0_0_1_0.Args[0] {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
- if v_1_0_0_1_0_1.Op != OpConst64 {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd32 {
break
}
- if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ _ = v_1_1_0_0.Args[1]
+ mul := v_1_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
- if v_1_0_0_1_0_1.AuxInt != 63 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- v_1_0_0_1_1 := v_1_0_0_1.Args[1]
- if v_1_0_0_1_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
break
}
- if v_1_0_0_1_1.Type != typ.UInt64 {
+ if x != v_1_1_0_0.Args[1] {
break
}
- kbar := v_1_0_0_1_1.AuxInt
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- k := v_1_0_1.AuxInt
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- if v_1_1.Type != typ.UInt64 {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- if v_1_1.AuxInt != k {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
- n := v.Args[0]
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpLsh64x64 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpRsh64x64 {
+ if v_1_0.Op != OpConst32 {
break
}
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpAdd64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub32 {
break
}
- t := v_1_0_0.Type
- _ = v_1_0_0.Args[1]
- v_1_0_0_0 := v_1_0_0.Args[0]
- if v_1_0_0_0.Op != OpRsh64Ux64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- if v_1_0_0_0.Type != t {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd32 {
break
}
- _ = v_1_0_0_0.Args[1]
- v_1_0_0_0_0 := v_1_0_0_0.Args[0]
- if v_1_0_0_0_0.Op != OpRsh64x64 {
+ _ = v_1_1_0_0.Args[1]
+ mul := v_1_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
- if v_1_0_0_0_0.Type != t {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
- _ = v_1_0_0_0_0.Args[1]
- if n != v_1_0_0_0_0.Args[0] {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
- if v_1_0_0_0_0_1.Op != OpConst64 {
+ m := mul_1.AuxInt
+ if x != v_1_1_0_0.Args[1] {
break
}
- if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_1_0_0_0_0_1.AuxInt != 63 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- v_1_0_0_0_1 := v_1_0_0_0.Args[1]
- if v_1_0_0_0_1.Op != OpConst64 {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- if v_1_0_0_0_1.Type != typ.UInt64 {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- kbar := v_1_0_0_0_1.AuxInt
- if n != v_1_0_0.Args[1] {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- v_1_0_1 := v_1_0.Args[1]
- if v_1_0_1.Op != OpConst64 {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- if v_1_0_1.Type != typ.UInt64 {
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- k := v_1_0_1.AuxInt
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
+ break
+ }
+ c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst64 {
+ if v_1_1.Op != OpSub32 {
break
}
- if v_1_1.Type != typ.UInt64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- if v_1_1.AuxInt != k {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd32 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ _ = v_1_1_0_0.Args[1]
+ if x != v_1_1_0_0.Args[0] {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
- v0.AddArg(v1)
- v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
- return true
- }
- // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
- for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh64x64 {
- break
- }
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64x64 {
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpHmul32 {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd64 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- if n != v_0_0_0.Args[0] {
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
break
}
- v_0_0_0_1 := v_0_0_0.Args[1]
- if v_0_0_0_1.Op != OpRsh64Ux64 {
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- if v_0_0_0_1.Type != t {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- _ = v_0_0_0_1.Args[1]
- v_0_0_0_1_0 := v_0_0_0_1.Args[0]
- if v_0_0_0_1_0.Op != OpRsh64x64 {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- if v_0_0_0_1_0.Type != t {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- _ = v_0_0_0_1_0.Args[1]
- if n != v_0_0_0_1_0.Args[0] {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
- if v_0_0_0_1_0_1.Op != OpConst64 {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- if v_0_0_0_1_0_1.AuxInt != 63 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst32 {
break
}
- v_0_0_0_1_1 := v_0_0_0_1.Args[1]
- if v_0_0_0_1_1.Op != OpConst64 {
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub32 {
break
}
- if v_0_0_0_1_1.Type != typ.UInt64 {
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
- kbar := v_0_0_0_1_1.AuxInt
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd32 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ _ = v_1_1_0_0.Args[1]
+ if x != v_1_1_0_0.Args[0] {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpHmul32 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
- if v_0_1.AuxInt != k {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
- v0.AddArg(v1)
- v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
- return true
- }
- // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
- // cond: k > 0 && k < 63 && kbar == 64 - k
- // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
- for {
- n := v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpLsh64x64 {
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
break
}
- _ = v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpRsh64x64 {
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
break
}
- _ = v_0_0.Args[1]
- v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpAdd64 {
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
break
}
- t := v_0_0_0.Type
- _ = v_0_0_0.Args[1]
- v_0_0_0_0 := v_0_0_0.Args[0]
- if v_0_0_0_0.Op != OpRsh64Ux64 {
+ if v_1_1_1_1.AuxInt != 31 {
break
}
- if v_0_0_0_0.Type != t {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- _ = v_0_0_0_0.Args[1]
- v_0_0_0_0_0 := v_0_0_0_0.Args[0]
- if v_0_0_0_0_0.Op != OpRsh64x64 {
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- if v_0_0_0_0_0.Type != t {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub32 {
break
}
- _ = v_0_0_0_0_0.Args[1]
- if n != v_0_0_0_0_0.Args[0] {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
- v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
- if v_0_0_0_0_0_1.Op != OpConst64 {
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd32 {
break
}
- if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ _ = v_1_0_0_0.Args[1]
+ mul := v_1_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
- if v_0_0_0_0_0_1.AuxInt != 63 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- v_0_0_0_0_1 := v_0_0_0_0.Args[1]
- if v_0_0_0_0_1.Op != OpConst64 {
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
break
}
- if v_0_0_0_0_1.Type != typ.UInt64 {
+ if x != v_1_0_0_0.Args[1] {
break
}
- kbar := v_0_0_0_0_1.AuxInt
- if n != v_0_0_0.Args[1] {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- v_0_0_1 := v_0_0.Args[1]
- if v_0_0_1.Op != OpConst64 {
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
break
}
- if v_0_0_1.Type != typ.UInt64 {
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
break
}
- k := v_0_0_1.AuxInt
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst64 {
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
break
}
- if v_0_1.Type != typ.UInt64 {
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- if v_0_1.AuxInt != k {
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
break
}
- if !(k > 0 && k < 63 && kbar == 64-k) {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- v.reset(OpEq64)
- v0 := b.NewValue0(v.Pos, OpAnd64, t)
- v0.AddArg(n)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
- v1.AuxInt = int64(1<<uint(k) - 1)
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
- v2.AuxInt = 0
- v.AddArg(v2)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
- // cond: s.Uses == 1
- // result: (Eq64 x y)
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
- s := v.Args[0]
- if s.Op != OpSub64 {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul32 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpConst64 {
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub32 {
break
}
- if v_1.AuxInt != 0 {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
- if !(s.Uses == 1) {
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd32 {
break
}
- v.reset(OpEq64)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
- // cond: s.Uses == 1
- // result: (Eq64 x y)
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64 {
+ _ = v_1_0_0_0.Args[1]
+ mul := v_1_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
break
}
- if v_0.AuxInt != 0 {
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
break
}
- s := v.Args[1]
- if s.Op != OpSub64 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- y := s.Args[1]
- x := s.Args[0]
- if !(s.Uses == 1) {
+ m := mul_1.AuxInt
+ if x != v_1_0_0_0.Args[1] {
break
}
- v.reset(OpEq64)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
- // match: (Eq64F (Const64F [c]) (Const64F [d]))
- // cond:
- // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64F {
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64F {
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
- return true
- }
- // match: (Eq64F (Const64F [d]) (Const64F [c]))
- // cond:
- // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst64F {
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst64F {
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
- return true
- }
- return false
-}
-func rewriteValuegeneric_OpEq8_0(v *Value) bool {
- b := v.Block
- config := b.Func.Config
- typ := &b.Func.Config.Types
- // match: (Eq8 x x)
- // cond:
- // result: (ConstBool [1])
- for {
- x := v.Args[1]
- if x != v.Args[0] {
+ if v_1_0_1_1.AuxInt != 31 {
break
}
- v.reset(OpConstBool)
- v.AuxInt = 1
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
- break
- }
- t := v_0.Type
- c := v_0.AuxInt
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpAdd8 {
+ if v_1.Op != OpMul32 {
break
}
- x := v_1.Args[1]
+ _ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpConst8 {
+ if v_1_0.Op != OpSub32 {
break
}
- if v_1_0.Type != t {
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
- d := v_1_0.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ if x != v_1_0_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst32 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v.AddArg(x)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
_ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
- break
- }
- t := v_0.Type
- c := v_0.AuxInt
+ x := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpAdd8 {
+ if v_1.Op != OpMul32 {
break
}
_ = v_1.Args[1]
- x := v_1.Args[0]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub32 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ if x != v_1_0_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
+ break
+ }
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst8 {
+ if v_1_1.Op != OpConst32 {
break
}
- if v_1_1.Type != t {
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
break
}
- d := v_1_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v.AddArg(x)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd8 {
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub32 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ mul := v_0_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_0_1_0_0.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_80(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub32 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ mul := v_0_1_0_0.Args[0]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_0_1_0_0.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub32 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ if x != v_0_1_0_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst32 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub32 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ if x != v_0_1_0_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub32 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ mul := v_0_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_0_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub32 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ mul := v_0_0_0_0.Args[0]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_0_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub32 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ if x != v_0_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+ // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul32 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub32 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd32 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ if x != v_0_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpHmul32 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst32 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+ break
+ }
+ v.reset(OpLeq32U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+ v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = int64(int32(sdivisible(32, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v4.AuxInt = int64(int32(sdivisible(32, c).a))
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v5.AuxInt = int64(32 - sdivisible(32, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v6.AuxInt = int64(int32(sdivisible(32, c).max))
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32_90(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 31 && kbar == 32 - k
+ // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd32 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh32x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 31 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 31 && kbar == 32-k) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpAnd32, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst32, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst32, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq32 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub32 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq32)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
+ // cond: s.Uses == 1
+ // result: (Eq32 x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ s := v.Args[1]
+ if s.Op != OpSub32 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq32)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
+ // match: (Eq32F (Const32F [c]) (Const32F [d]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32F {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32F {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+ return true
+ }
+ // match: (Eq32F (Const32F [d]) (Const32F [c]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst32F {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst32F {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_0(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd64 {
+ break
+ }
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ if v_1_0.Type != t {
+ break
+ }
+ d := v_1_0.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd64 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != t {
+ break
+ }
+ d := v_1_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd64 {
+ break
+ }
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
+ // cond:
+ // result: (Eq64 (Const64 <t> [c-d]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = c - d
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq64 (Const64 [c]) (Const64 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq64 (Const64 [d]) (Const64 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_10(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ mul := v_1_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ mul := v_1_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_20(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ mul := v_0_1.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_1.Args[1]
+ if x != mul_1.Args[0] {
+ break
+ }
+ mul_1_1 := mul_1.Args[1]
+ if mul_1_1.Op != OpConst64 {
+ break
+ }
+ if mul_1_1.AuxInt != 1 {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ mul := v_0_0.Args[0]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = mul_0.Args[1]
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_0_1 := mul_0.Args[1]
+ if mul_0_1.Op != OpConst64 {
+ break
+ }
+ if mul_0_1.AuxInt != 1 {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ if x != v_1_1_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ if x != v_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ if x != v_0_1_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_30(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAvg64u {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ if x != v_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0.Args[1]
+ if mul.Op != OpHmul64u {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2.AuxInt = int64(udivisible(64, c).m)
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(64 - udivisible(64, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(udivisible(64, c).max)
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ mul := v_1_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_1_1_0_0.Args[1] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_40(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ mul := v_1_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_1_1_0_0.Args[1] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ if x != v_1_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst64 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpSub64 {
+ break
+ }
+ _ = v_1_1.Args[1]
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ v_1_1_0_0 := v_1_1_0.Args[0]
+ if v_1_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_1_0_0.Args[1]
+ if x != v_1_1_0_0.Args[0] {
+ break
+ }
+ mul := v_1_1_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ if x != v_1_1_1.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ mul := v_1_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_1_0_0_0.Args[1] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ mul := v_1_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_1_0_0_0.Args[1] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ if x != v_1_0_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpSub64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ if x != v_1_0_0_0.Args[0] {
+ break
+ }
+ mul := v_1_0_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ if x != v_1_0_1.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ mul := v_0_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_0_1_0_0.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ mul := v_0_1_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_0_1_0_0.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ if x != v_0_1_0_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_50(v *Value) bool {
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst64 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpSub64 {
+ break
+ }
+ _ = v_0_1.Args[1]
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ v_0_1_0_0 := v_0_1_0.Args[0]
+ if v_0_1_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_1_0_0.Args[1]
+ if x != v_0_1_0_0.Args[0] {
+ break
+ }
+ mul := v_0_1_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ if x != v_0_1_1.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 63 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ mul := v_0_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ if x != v_0_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ mul := v_0_0_0_0.Args[0]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ if x != v_0_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ if x != v_0_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst64 {
+ break
+ }
+ m := mul_0.AuxInt
+ if x != mul.Args[1] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+ // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpSub64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpAdd64 {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ if x != v_0_0_0_0.Args[0] {
+ break
+ }
+ mul := v_0_0_0_0.Args[1]
+ if mul.Op != OpHmul64 {
+ break
+ }
+ _ = mul.Args[1]
+ if x != mul.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst64 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ if x != v_0_0_1.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 63 {
+ break
+ }
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+ break
+ }
+ v.reset(OpLeq64U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+ v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3.AuxInt = int64(sdivisible(64, c).m)
+ v2.AddArg(v3)
+ v2.AddArg(x)
+ v1.AddArg(v2)
+ v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4.AuxInt = int64(sdivisible(64, c).a)
+ v1.AddArg(v4)
+ v0.AddArg(v1)
+ v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5.AuxInt = int64(64 - sdivisible(64, c).k)
+ v0.AddArg(v5)
+ v.AddArg(v0)
+ v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v6.AuxInt = int64(sdivisible(64, c).max)
+ v.AddArg(v6)
+ return true
+ }
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ if n != v_1_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_1.Type != t {
+ break
+ }
+ _ = v_1_0_0_1.Args[1]
+ v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+ if v_1_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_1_0.Args[1]
+ if n != v_1_0_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+ if v_1_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+ if v_1_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_1_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ _ = v.Args[1]
+ n := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_1_0.Args[1]
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_1_0_0.Type
+ _ = v_1_0_0.Args[1]
+ v_1_0_0_0 := v_1_0_0.Args[0]
+ if v_1_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_1_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0.Args[1]
+ v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+ if v_1_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_1_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_1_0_0_0_0.Args[1]
+ if n != v_1_0_0_0_0.Args[0] {
+ break
+ }
+ v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+ if v_1_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+ if v_1_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_1_0_0_0_1.AuxInt
+ if n != v_1_0_0.Args[1] {
+ break
+ }
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_1_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1.Type != typ.UInt64 {
+ break
+ }
+ if v_1_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ if n != v_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_1.Type != t {
+ break
+ }
+ _ = v_0_0_0_1.Args[1]
+ v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+ if v_0_0_0_1_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_1_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_1_0.Args[1]
+ if n != v_0_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+ if v_0_0_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_1_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+ if v_0_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_1_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_1_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+ // cond: k > 0 && k < 63 && kbar == 64 - k
+ // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+ for {
+ n := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpLsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0_0.Args[1]
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpAdd64 {
+ break
+ }
+ t := v_0_0_0.Type
+ _ = v_0_0_0.Args[1]
+ v_0_0_0_0 := v_0_0_0.Args[0]
+ if v_0_0_0_0.Op != OpRsh64Ux64 {
+ break
+ }
+ if v_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0.Args[1]
+ v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+ if v_0_0_0_0_0.Op != OpRsh64x64 {
+ break
+ }
+ if v_0_0_0_0_0.Type != t {
+ break
+ }
+ _ = v_0_0_0_0_0.Args[1]
+ if n != v_0_0_0_0_0.Args[0] {
+ break
+ }
+ v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+ if v_0_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_0_0_0_0_1.AuxInt != 63 {
+ break
+ }
+ v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+ if v_0_0_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ kbar := v_0_0_0_0_1.AuxInt
+ if n != v_0_0_0.Args[1] {
+ break
+ }
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1.Type != typ.UInt64 {
+ break
+ }
+ k := v_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1.Type != typ.UInt64 {
+ break
+ }
+ if v_0_1.AuxInt != k {
+ break
+ }
+ if !(k > 0 && k < 63 && kbar == 64-k) {
+ break
+ }
+ v.reset(OpEq64)
+ v0 := b.NewValue0(v.Pos, OpAnd64, t)
+ v0.AddArg(n)
+ v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1.AuxInt = int64(1<<uint(k) - 1)
+ v0.AddArg(v1)
+ v.AddArg(v0)
+ v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2.AuxInt = 0
+ v.AddArg(v2)
+ return true
+ }
+ // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
+ // cond: s.Uses == 1
+ // result: (Eq64 x y)
+ for {
+ _ = v.Args[1]
+ s := v.Args[0]
+ if s.Op != OpSub64 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq64)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64_60(v *Value) bool {
+ // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
+ // cond: s.Uses == 1
+ // result: (Eq64 x y)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ s := v.Args[1]
+ if s.Op != OpSub64 {
+ break
+ }
+ y := s.Args[1]
+ x := s.Args[0]
+ if !(s.Uses == 1) {
+ break
+ }
+ v.reset(OpEq64)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
+ // match: (Eq64F (Const64F [c]) (Const64F [d]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64F {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64F {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+ return true
+ }
+ // match: (Eq64F (Const64F [d]) (Const64F [c]))
+ // cond:
+ // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst64F {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64F {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq8_0(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
+ typ := &b.Func.Config.Types
+ // match: (Eq8 x x)
+ // cond:
+ // result: (ConstBool [1])
+ for {
+ x := v.Args[1]
+ if x != v.Args[0] {
+ break
+ }
+ v.reset(OpConstBool)
+ v.AuxInt = 1
+ return true
+ }
+ // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd8 {
+ break
+ }
+ x := v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
+ break
+ }
+ if v_1_0.Type != t {
+ break
+ }
+ d := v_1_0.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ t := v_0.Type
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpAdd8 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ if v_1_1.Type != t {
+ break
+ }
+ d := v_1_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd8 {
+ break
+ }
+ x := v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst8 {
+ break
+ }
+ t := v_0_0.Type
+ d := v_0_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
+ // cond:
+ // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpAdd8 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ t := v_0_1.Type
+ d := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.Type != t {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpEq8)
+ v0 := b.NewValue0(v.Pos, OpConst8, t)
+ v0.AuxInt = int64(int8(c - d))
+ v.AddArg(v0)
+ v.AddArg(x)
+ return true
+ }
+ // match: (Eq8 (Const8 [c]) (Const8 [d]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq8 (Const8 [d]) (Const8 [c]))
+ // cond:
+ // result: (ConstBool [b2i(c == d)])
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ d := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpConstBool)
+ v.AuxInt = b2i(c == d)
+ return true
+ }
+ // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
+ // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMod8u {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq8 (Const8 [0]) (Mod8u x (Const8 [c])))
+ // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpMod8u {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+ v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v2.AuxInt = c & 0xff
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
+ // cond: x.Op != OpConst8 && sdivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMod8 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst8 {
+ break
+ }
+ if v_1.AuxInt != 0 {
+ break
+ }
+ if !(x.Op != OpConst8 && sdivisibleOK(8, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v2.AuxInt = c
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ return false
+}
+func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+ b := v.Block
+ config := b.Func.Config
+ typ := &b.Func.Config.Types
+ // match: (Eq8 (Const8 [0]) (Mod8 x (Const8 [c])))
+ // cond: x.Op != OpConst8 && sdivisibleOK(8,c) && !hasSmallRotate(config)
+ // result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpConst8 {
+ break
+ }
+ if v_0.AuxInt != 0 {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpMod8 {
+ break
+ }
+ _ = v_1.Args[1]
+ x := v_1.Args[0]
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(x.Op != OpConst8 && sdivisibleOK(8, c) && !hasSmallRotate(config)) {
+ break
+ }
+ v.reset(OpEq32)
+ v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+ v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v2.AuxInt = c
+ v0.AddArg(v2)
+ v.AddArg(v0)
+ v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+ v3.AuxInt = 0
+ v.AddArg(v3)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to8 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpConst8 {
+ break
+ }
+ c := v_1_0.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpTrunc32to8 {
+ break
+ }
+ v_1_1_0 := v_1_1.Args[0]
+ if v_1_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_1_0.Args[1]
+ mul := v_1_1_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_1_0_1 := v_1_1_0.Args[1]
+ if v_1_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to8 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ _ = v.Args[1]
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpMul8 {
+ break
+ }
+ _ = v_1.Args[1]
+ v_1_0 := v_1.Args[0]
+ if v_1_0.Op != OpTrunc32to8 {
+ break
+ }
+ v_1_0_0 := v_1_0.Args[0]
+ if v_1_0_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_1_0_0.Args[1]
+ mul := v_1_0_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_0.Args[0] {
+ break
+ }
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
+ break
+ }
+ m := mul_1.AuxInt
+ v_1_0_0_1 := v_1_0_0.Args[1]
+ if v_1_0_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_1_0_0_1.AuxInt
+ v_1_1 := v_1.Args[1]
+ if v_1_1.Op != OpConst8 {
+ break
+ }
+ c := v_1_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul8 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst8 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to8 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
+ break
+ }
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
+ break
+ }
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
+ break
+ }
+ if x != mul_1.Args[0] {
+ break
+ }
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
+ return true
+ }
+ // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ for {
+ x := v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpMul8 {
+ break
+ }
+ _ = v_0.Args[1]
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpConst8 {
+ break
+ }
+ c := v_0_0.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpTrunc32to8 {
+ break
+ }
+ v_0_1_0 := v_0_1.Args[0]
+ if v_0_1_0.Op != OpRsh32Ux64 {
+ break
+ }
+ _ = v_0_1_0.Args[1]
+ mul := v_0_1_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- x := v_0.Args[1]
- v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpConst8 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
break
}
- t := v_0_0.Type
- d := v_0_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ if x != mul_0.Args[0] {
break
}
- if v_1.Type != t {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
+ m := mul_1.AuxInt
+ v_0_1_0_1 := v_0_1_0.Args[1]
+ if v_0_1_0_1.Op != OpConst64 {
+ break
+ }
+ s := v_0_1_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
- // cond:
- // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+ // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpAdd8 {
+ if v_0.Op != OpMul8 {
break
}
_ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst8 {
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to8 {
break
}
- t := v_0_1.Type
- d := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- if v_1.Type != t {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- c := v_1.AuxInt
- v.reset(OpEq8)
- v0 := b.NewValue0(v.Pos, OpConst8, t)
- v0.AuxInt = int64(int8(c - d))
- v.AddArg(v0)
- v.AddArg(x)
- return true
- }
- // match: (Eq8 (Const8 [c]) (Const8 [d]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpConst32 {
break
}
- c := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ m := mul_0.AuxInt
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpZeroExt8to32 {
break
}
- d := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
- return true
- }
- // match: (Eq8 (Const8 [d]) (Const8 [c]))
- // cond:
- // result: (ConstBool [b2i(c == d)])
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if x != mul_1.Args[0] {
break
}
- d := v_0.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- c := v_1.AuxInt
- v.reset(OpConstBool)
- v.AuxInt = b2i(c == d)
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
+ break
+ }
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ break
+ }
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
+ v.AddArg(v0)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
- // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
- // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+ // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
for {
- _ = v.Args[1]
+ x := v.Args[1]
v_0 := v.Args[0]
- if v_0.Op != OpMod8u {
+ if v_0.Op != OpMul8 {
break
}
_ = v_0.Args[1]
- x := v_0.Args[0]
- v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpConst8 {
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpTrunc32to8 {
break
}
- c := v_0_1.AuxInt
- v_1 := v.Args[1]
- if v_1.Op != OpConst8 {
+ v_0_0_0 := v_0_0.Args[0]
+ if v_0_0_0.Op != OpRsh32Ux64 {
break
}
- if v_1.AuxInt != 0 {
+ _ = v_0_0_0.Args[1]
+ mul := v_0_0_0.Args[0]
+ if mul.Op != OpMul32 {
break
}
- if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+ _ = mul.Args[1]
+ mul_0 := mul.Args[0]
+ if mul_0.Op != OpZeroExt8to32 {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
- v1.AddArg(x)
- v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = c & 0xff
- v0.AddArg(v2)
- v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = 0
- v.AddArg(v3)
- return true
- }
- // match: (Eq8 (Const8 [0]) (Mod8u x (Const8 [c])))
- // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
- // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
- for {
- _ = v.Args[1]
- v_0 := v.Args[0]
- if v_0.Op != OpConst8 {
+ if x != mul_0.Args[0] {
break
}
- if v_0.AuxInt != 0 {
+ mul_1 := mul.Args[1]
+ if mul_1.Op != OpConst32 {
break
}
- v_1 := v.Args[1]
- if v_1.Op != OpMod8u {
+ m := mul_1.AuxInt
+ v_0_0_0_1 := v_0_0_0.Args[1]
+ if v_0_0_0_1.Op != OpConst64 {
break
}
- _ = v_1.Args[1]
- x := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpConst8 {
+ s := v_0_0_0_1.AuxInt
+ v_0_1 := v_0.Args[1]
+ if v_0_1.Op != OpConst8 {
break
}
- c := v_1_1.AuxInt
- if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+ c := v_0_1.AuxInt
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
break
}
- v.reset(OpEq32)
- v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
- v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+ v.reset(OpLeq8U)
+ v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+ v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1.AddArg(v2)
v1.AddArg(x)
v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v2.AuxInt = c & 0xff
- v0.AddArg(v2)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(8 - udivisible(8, c).k)
+ v0.AddArg(v3)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
- v3.AuxInt = 0
- v.AddArg(v3)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(udivisible(8, c).max))
+ v.AddArg(v4)
return true
}
- // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc32to8 {
+ if v_1_1.Op != OpSub8 {
break
}
+ _ = v_1_1.Args[1]
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh32Ux64 {
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
_ = v_1_1_0.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt8to32 {
+ if mul_1.Op != OpSignExt8to32 {
break
}
if x != mul_1.Args[0] {
break
}
s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_1_1_1_0.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
return false
}
-func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+func rewriteValuegeneric_OpEq8_20(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
- // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
}
c := v_1_0.AuxInt
v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpTrunc32to8 {
+ if v_1_1.Op != OpSub8 {
break
}
+ _ = v_1_1.Args[1]
v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpRsh32Ux64 {
+ if v_1_1_0.Op != OpRsh32x64 {
break
}
_ = v_1_1_0.Args[1]
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt8to32 {
+ if mul_0.Op != OpSignExt8to32 {
break
}
if x != mul_0.Args[0] {
break
}
s := v_1_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ v_1_1_1 := v_1_1.Args[1]
+ if v_1_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_1_1.Args[1]
+ v_1_1_1_0 := v_1_1_1.Args[0]
+ if v_1_1_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_1_1_1_0.Args[0] {
+ break
+ }
+ v_1_1_1_1 := v_1_1_1.Args[1]
+ if v_1_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 x (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc32to8 {
+ if v_1_0.Op != OpSub8 {
break
}
+ _ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh32Ux64 {
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
_ = v_1_0_0.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt8to32 {
+ if mul_1.Op != OpSignExt8to32 {
break
}
if x != mul_1.Args[0] {
break
}
s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_1_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
+ break
+ }
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpConst8 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])))
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 x (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])))
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
_ = v.Args[1]
x := v.Args[0]
}
_ = v_1.Args[1]
v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpTrunc32to8 {
+ if v_1_0.Op != OpSub8 {
break
}
+ _ = v_1_0.Args[1]
v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpRsh32Ux64 {
+ if v_1_0_0.Op != OpRsh32x64 {
break
}
_ = v_1_0_0.Args[1]
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt8to32 {
+ if mul_0.Op != OpSignExt8to32 {
break
}
if x != mul_0.Args[0] {
break
}
s := v_1_0_0_1.AuxInt
+ v_1_0_1 := v_1_0.Args[1]
+ if v_1_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_1_0_1.Args[1]
+ v_1_0_1_0 := v_1_0_1.Args[0]
+ if v_1_0_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_1_0_1_0.Args[0] {
+ break
+ }
+ v_1_0_1_1 := v_1_0_1.Args[1]
+ if v_1_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_1_0_1_1.AuxInt != 31 {
+ break
+ }
v_1_1 := v_1.Args[1]
if v_1_1.Op != OpConst8 {
break
}
c := v_1_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc32to8 {
+ if v_0_1.Op != OpSub8 {
break
}
+ _ = v_0_1.Args[1]
v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh32Ux64 {
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
_ = v_0_1_0.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt8to32 {
+ if mul_1.Op != OpSignExt8to32 {
break
}
if x != mul_1.Args[0] {
break
}
s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_0_1_1_0.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
}
c := v_0_0.AuxInt
v_0_1 := v_0.Args[1]
- if v_0_1.Op != OpTrunc32to8 {
+ if v_0_1.Op != OpSub8 {
break
}
+ _ = v_0_1.Args[1]
v_0_1_0 := v_0_1.Args[0]
- if v_0_1_0.Op != OpRsh32Ux64 {
+ if v_0_1_0.Op != OpRsh32x64 {
break
}
_ = v_0_1_0.Args[1]
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt8to32 {
+ if mul_0.Op != OpSignExt8to32 {
break
}
if x != mul_0.Args[0] {
break
}
s := v_0_1_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ v_0_1_1 := v_0_1.Args[1]
+ if v_0_1_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_1_1.Args[1]
+ v_0_1_1_0 := v_0_1_1.Args[0]
+ if v_0_1_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_0_1_1_0.Args[0] {
+ break
+ }
+ v_0_1_1_1 := v_0_1_1.Args[1]
+ if v_0_1_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_1_1_1.AuxInt != 31 {
+ break
+ }
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc32to8 {
+ if v_0_0.Op != OpSub8 {
break
}
+ _ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh32Ux64 {
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
_ = v_0_0_0.Args[1]
}
m := mul_0.AuxInt
mul_1 := mul.Args[1]
- if mul_1.Op != OpZeroExt8to32 {
+ if mul_1.Op != OpSignExt8to32 {
break
}
if x != mul_1.Args[0] {
break
}
s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpConst8 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
- // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])) x)
- // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
- // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+ // match: (Eq8 (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])) x)
+ // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+ // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
for {
x := v.Args[1]
v_0 := v.Args[0]
}
_ = v_0.Args[1]
v_0_0 := v_0.Args[0]
- if v_0_0.Op != OpTrunc32to8 {
+ if v_0_0.Op != OpSub8 {
break
}
+ _ = v_0_0.Args[1]
v_0_0_0 := v_0_0.Args[0]
- if v_0_0_0.Op != OpRsh32Ux64 {
+ if v_0_0_0.Op != OpRsh32x64 {
break
}
_ = v_0_0_0.Args[1]
}
_ = mul.Args[1]
mul_0 := mul.Args[0]
- if mul_0.Op != OpZeroExt8to32 {
+ if mul_0.Op != OpSignExt8to32 {
break
}
if x != mul_0.Args[0] {
break
}
s := v_0_0_0_1.AuxInt
+ v_0_0_1 := v_0_0.Args[1]
+ if v_0_0_1.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0_0_1.Args[1]
+ v_0_0_1_0 := v_0_0_1.Args[0]
+ if v_0_0_1_0.Op != OpSignExt8to32 {
+ break
+ }
+ if x != v_0_0_1_0.Args[0] {
+ break
+ }
+ v_0_0_1_1 := v_0_0_1.Args[1]
+ if v_0_0_1_1.Op != OpConst64 {
+ break
+ }
+ if v_0_0_1_1.AuxInt != 31 {
+ break
+ }
v_0_1 := v_0.Args[1]
if v_0_1.Op != OpConst8 {
break
}
c := v_0_1.AuxInt
- if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+ if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
break
}
v.reset(OpLeq8U)
v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
- v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
- v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v2.AuxInt = int64(int8(udivisible(8, c).m))
+ v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+ v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+ v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v3.AuxInt = int64(int8(sdivisible(8, c).m))
+ v2.AddArg(v3)
+ v2.AddArg(x)
v1.AddArg(v2)
- v1.AddArg(x)
+ v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v4.AuxInt = int64(int8(sdivisible(8, c).a))
+ v1.AddArg(v4)
v0.AddArg(v1)
- v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v3.AuxInt = int64(8 - udivisible(8, c).k)
- v0.AddArg(v3)
+ v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v5.AuxInt = int64(8 - sdivisible(8, c).k)
+ v0.AddArg(v5)
v.AddArg(v0)
- v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
- v4.AuxInt = int64(int8(udivisible(8, c).max))
- v.AddArg(v4)
+ v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+ v6.AuxInt = int64(int8(sdivisible(8, c).max))
+ v.AddArg(v6)
return true
}
// match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
}
return false
}
-func rewriteValuegeneric_OpEq8_20(v *Value) bool {
+func rewriteValuegeneric_OpEq8_30(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
// match: (Eq8 (Lsh8x64 (Rsh8x64 (Add8 <t> (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
return true
}
// match: (Mod8u <t> x (Const8 [c]))
- // cond: x.Op != OpConst8 && c > 0 && umagicOK(8,c)
+ // cond: x.Op != OpConst8 && c > 0 && umagicOK(8, c)
// result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
for {
t := v.Type