]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/compile: add signed divisibility rules
authorBrian Kessler <brian.m.kessler@gmail.com>
Fri, 5 Apr 2019 20:05:07 +0000 (14:05 -0600)
committerBrad Fitzpatrick <bradfitz@golang.org>
Tue, 30 Apr 2019 22:02:07 +0000 (22:02 +0000)
"Division by invariant integers using multiplication" paper
by Granlund and Montgomery contains a method for directly computing
divisibility (x%c == 0 for c constant) by means of the modular inverse.
The method is further elaborated in "Hacker's Delight" by Warren Section 10-17

This general rule can compute divisibilty by one multiplication, and add
and a compare for odd divisors and an additional rotate for even divisors.

To apply the divisibility rule, we must take into account
the rules to rewrite x%c = x-((x/c)*c) and (x/c) for c constant on the first
optimization pass "opt".  This complicates the matching as we want to match
only in the cases where the result of (x/c) is not also needed.
So, we must match on the expanded form of (x/c) in the expression x == c*(x/c)
in the "late opt" pass after common subexpresion elimination.

Note, that if there is an intermediate opt pass introduced in the future we
could simplify these rules by delaying the magic division rewrite to "late opt"
and matching directly on (x/c) in the intermediate opt pass.

On amd64, the divisibility check is 30-45% faster.

name                     old time/op  new time/op  delta`
DivisiblePow2constI64-4  0.83ns ± 1%  0.82ns ± 0%     ~     (p=0.079 n=5+4)
DivisibleconstI64-4      2.68ns ± 1%  1.87ns ± 0%  -30.33%  (p=0.000 n=5+4)
DivisibleWDivconstI64-4  2.69ns ± 1%  2.71ns ± 3%     ~     (p=1.000 n=5+5)
DivisiblePow2constI32-4  1.15ns ± 1%  1.15ns ± 0%     ~     (p=0.238 n=5+4)
DivisibleconstI32-4      2.24ns ± 1%  1.20ns ± 0%  -46.48%  (p=0.016 n=5+4)
DivisibleWDivconstI32-4  2.27ns ± 1%  2.27ns ± 1%     ~     (p=0.683 n=5+5)
DivisiblePow2constI16-4  0.81ns ± 1%  0.82ns ± 1%     ~     (p=0.135 n=5+5)
DivisibleconstI16-4      2.11ns ± 2%  1.20ns ± 1%  -42.99%  (p=0.008 n=5+5)
DivisibleWDivconstI16-4  2.23ns ± 0%  2.27ns ± 2%   +1.79%  (p=0.029 n=4+4)
DivisiblePow2constI8-4   0.81ns ± 1%  0.81ns ± 1%     ~     (p=0.286 n=5+5)
DivisibleconstI8-4       2.13ns ± 3%  1.19ns ± 1%  -43.84%  (p=0.008 n=5+5)
DivisibleWDivconstI8-4   2.23ns ± 1%  2.25ns ± 1%     ~     (p=0.183 n=5+5)

Fixes #30282
Fixes #15806

Change-Id: Id20d78263a4fdfe0509229ae4dfa2fede83fc1d0
Reviewed-on: https://go-review.googlesource.com/c/go/+/173998
Run-TryBot: Brian Kessler <brian.m.kessler@gmail.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
src/cmd/compile/internal/gc/testdata/arith_test.go
src/cmd/compile/internal/ssa/gen/generic.rules
src/cmd/compile/internal/ssa/magic.go
src/cmd/compile/internal/ssa/magic_test.go
src/cmd/compile/internal/ssa/rewritegeneric.go
test/codegen/arithmetic.go

index 9821095f97514692bc16cb58a2896f0818162263..158fedc28efec5fb67260f644b1693f03102539f 100644 (file)
@@ -1283,24 +1283,65 @@ func div19_uint64(n uint64) bool {
        return n%19 == 0
 }
 
+//go:noinline
+func div6_int8(n int8) bool {
+       return n%6 == 0
+}
+
+//go:noinline
+func div6_int16(n int16) bool {
+       return n%6 == 0
+}
+
+//go:noinline
+func div6_int32(n int32) bool {
+       return n%6 == 0
+}
+
+//go:noinline
+func div6_int64(n int64) bool {
+       return n%6 == 0
+}
+
+//go:noinline
+func div19_int8(n int8) bool {
+       return n%19 == 0
+}
+
+//go:noinline
+func div19_int16(n int16) bool {
+       return n%19 == 0
+}
+
+//go:noinline
+func div19_int32(n int32) bool {
+       return n%19 == 0
+}
+
+//go:noinline
+func div19_int64(n int64) bool {
+       return n%19 == 0
+}
+
 // testDivisibility confirms that rewrite rules x%c ==0 for c constant are correct.
 func testDivisibility(t *testing.T) {
+       // unsigned tests
        // test an even and an odd divisor
-       var six, nineteen uint64 = 6, 19
+       var sixU, nineteenU uint64 = 6, 19
        // test all inputs for uint8, uint16
        for i := uint64(0); i <= math.MaxUint16; i++ {
                if i <= math.MaxUint8 {
-                       if want, got := uint8(i)%uint8(six) == 0, div6_uint8(uint8(i)); got != want {
+                       if want, got := uint8(i)%uint8(sixU) == 0, div6_uint8(uint8(i)); got != want {
                                t.Errorf("div6_uint8(%d) = %v want %v", i, got, want)
                        }
-                       if want, got := uint8(i)%uint8(nineteen) == 0, div19_uint8(uint8(i)); got != want {
+                       if want, got := uint8(i)%uint8(nineteenU) == 0, div19_uint8(uint8(i)); got != want {
                                t.Errorf("div6_uint19(%d) = %v want %v", i, got, want)
                        }
                }
-               if want, got := uint16(i)%uint16(six) == 0, div6_uint16(uint16(i)); got != want {
+               if want, got := uint16(i)%uint16(sixU) == 0, div6_uint16(uint16(i)); got != want {
                        t.Errorf("div6_uint16(%d) = %v want %v", i, got, want)
                }
-               if want, got := uint16(i)%uint16(nineteen) == 0, div19_uint16(uint16(i)); got != want {
+               if want, got := uint16(i)%uint16(nineteenU) == 0, div19_uint16(uint16(i)); got != want {
                        t.Errorf("div19_uint16(%d) = %v want %v", i, got, want)
                }
        }
@@ -1308,35 +1349,106 @@ func testDivisibility(t *testing.T) {
        // spot check inputs for uint32 and uint64
        xu := []uint64{
                0, 1, 2, 3, 4, 5,
-               six, 2 * six, 3 * six, 5 * six, 12345 * six,
-               six + 1, 2*six - 5, 3*six + 3, 5*six + 4, 12345*six - 2,
-               nineteen, 2 * nineteen, 3 * nineteen, 5 * nineteen, 12345 * nineteen,
-               nineteen + 1, 2*nineteen - 5, 3*nineteen + 3, 5*nineteen + 4, 12345*nineteen - 2,
+               sixU, 2 * sixU, 3 * sixU, 5 * sixU, 12345 * sixU,
+               sixU + 1, 2*sixU - 5, 3*sixU + 3, 5*sixU + 4, 12345*sixU - 2,
+               nineteenU, 2 * nineteenU, 3 * nineteenU, 5 * nineteenU, 12345 * nineteenU,
+               nineteenU + 1, 2*nineteenU - 5, 3*nineteenU + 3, 5*nineteenU + 4, 12345*nineteenU - 2,
                maxU32, maxU32 - 1, maxU32 - 2, maxU32 - 3, maxU32 - 4,
-               maxU32, maxU32 - 5, maxU32 - 6, maxU32 - 7, maxU32 - 8,
-               maxU32, maxU32 - 9, maxU32 - 10, maxU32 - 11, maxU32 - 12,
-               maxU32, maxU32 - 13, maxU32 - 14, maxU32 - 15, maxU32 - 16,
-               maxU32, maxU32 - 17, maxU32 - 18, maxU32 - 19, maxU32 - 20,
+               maxU32 - 5, maxU32 - 6, maxU32 - 7, maxU32 - 8,
+               maxU32 - 9, maxU32 - 10, maxU32 - 11, maxU32 - 12,
+               maxU32 - 13, maxU32 - 14, maxU32 - 15, maxU32 - 16,
+               maxU32 - 17, maxU32 - 18, maxU32 - 19, maxU32 - 20,
                maxU64, maxU64 - 1, maxU64 - 2, maxU64 - 3, maxU64 - 4,
-               maxU64, maxU64 - 5, maxU64 - 6, maxU64 - 7, maxU64 - 8,
-               maxU64, maxU64 - 9, maxU64 - 10, maxU64 - 11, maxU64 - 12,
-               maxU64, maxU64 - 13, maxU64 - 14, maxU64 - 15, maxU64 - 16,
-               maxU64, maxU64 - 17, maxU64 - 18, maxU64 - 19, maxU64 - 20,
+               maxU64 - 5, maxU64 - 6, maxU64 - 7, maxU64 - 8,
+               maxU64 - 9, maxU64 - 10, maxU64 - 11, maxU64 - 12,
+               maxU64 - 13, maxU64 - 14, maxU64 - 15, maxU64 - 16,
+               maxU64 - 17, maxU64 - 18, maxU64 - 19, maxU64 - 20,
        }
        for _, x := range xu {
                if x <= maxU32 {
-                       if want, got := uint32(x)%uint32(six) == 0, div6_uint32(uint32(x)); got != want {
+                       if want, got := uint32(x)%uint32(sixU) == 0, div6_uint32(uint32(x)); got != want {
                                t.Errorf("div6_uint32(%d) = %v want %v", x, got, want)
                        }
-                       if want, got := uint32(x)%uint32(nineteen) == 0, div19_uint32(uint32(x)); got != want {
+                       if want, got := uint32(x)%uint32(nineteenU) == 0, div19_uint32(uint32(x)); got != want {
                                t.Errorf("div19_uint32(%d) = %v want %v", x, got, want)
                        }
                }
-               if want, got := x%six == 0, div6_uint64(x); got != want {
+               if want, got := x%sixU == 0, div6_uint64(x); got != want {
                        t.Errorf("div6_uint64(%d) = %v want %v", x, got, want)
                }
-               if want, got := x%nineteen == 0, div19_uint64(x); got != want {
+               if want, got := x%nineteenU == 0, div19_uint64(x); got != want {
                        t.Errorf("div19_uint64(%d) = %v want %v", x, got, want)
                }
        }
+
+       // signed tests
+       // test an even and an odd divisor
+       var sixS, nineteenS int64 = 6, 19
+       // test all inputs for int8, int16
+       for i := int64(math.MinInt16); i <= math.MaxInt16; i++ {
+               if math.MinInt8 <= i && i <= math.MaxInt8 {
+                       if want, got := int8(i)%int8(sixS) == 0, div6_int8(int8(i)); got != want {
+                               t.Errorf("div6_int8(%d) = %v want %v", i, got, want)
+                       }
+                       if want, got := int8(i)%int8(nineteenS) == 0, div19_int8(int8(i)); got != want {
+                               t.Errorf("div6_int19(%d) = %v want %v", i, got, want)
+                       }
+               }
+               if want, got := int16(i)%int16(sixS) == 0, div6_int16(int16(i)); got != want {
+                       t.Errorf("div6_int16(%d) = %v want %v", i, got, want)
+               }
+               if want, got := int16(i)%int16(nineteenS) == 0, div19_int16(int16(i)); got != want {
+                       t.Errorf("div19_int16(%d) = %v want %v", i, got, want)
+               }
+       }
+       var minI32, maxI32, minI64, maxI64 int64 = math.MinInt32, math.MaxInt32, math.MinInt64, math.MaxInt64
+       // spot check inputs for int32 and int64
+       xs := []int64{
+               0, 1, 2, 3, 4, 5,
+               -1, -2, -3, -4, -5,
+               sixS, 2 * sixS, 3 * sixS, 5 * sixS, 12345 * sixS,
+               sixS + 1, 2*sixS - 5, 3*sixS + 3, 5*sixS + 4, 12345*sixS - 2,
+               -sixS, -2 * sixS, -3 * sixS, -5 * sixS, -12345 * sixS,
+               -sixS + 1, -2*sixS - 5, -3*sixS + 3, -5*sixS + 4, -12345*sixS - 2,
+               nineteenS, 2 * nineteenS, 3 * nineteenS, 5 * nineteenS, 12345 * nineteenS,
+               nineteenS + 1, 2*nineteenS - 5, 3*nineteenS + 3, 5*nineteenS + 4, 12345*nineteenS - 2,
+               -nineteenS, -2 * nineteenS, -3 * nineteenS, -5 * nineteenS, -12345 * nineteenS,
+               -nineteenS + 1, -2*nineteenS - 5, -3*nineteenS + 3, -5*nineteenS + 4, -12345*nineteenS - 2,
+               minI32, minI32 + 1, minI32 + 2, minI32 + 3, minI32 + 4,
+               minI32 + 5, minI32 + 6, minI32 + 7, minI32 + 8,
+               minI32 + 9, minI32 + 10, minI32 + 11, minI32 + 12,
+               minI32 + 13, minI32 + 14, minI32 + 15, minI32 + 16,
+               minI32 + 17, minI32 + 18, minI32 + 19, minI32 + 20,
+               maxI32, maxI32 - 1, maxI32 - 2, maxI32 - 3, maxI32 - 4,
+               maxI32 - 5, maxI32 - 6, maxI32 - 7, maxI32 - 8,
+               maxI32 - 9, maxI32 - 10, maxI32 - 11, maxI32 - 12,
+               maxI32 - 13, maxI32 - 14, maxI32 - 15, maxI32 - 16,
+               maxI32 - 17, maxI32 - 18, maxI32 - 19, maxI32 - 20,
+               minI64, minI64 + 1, minI64 + 2, minI64 + 3, minI64 + 4,
+               minI64 + 5, minI64 + 6, minI64 + 7, minI64 + 8,
+               minI64 + 9, minI64 + 10, minI64 + 11, minI64 + 12,
+               minI64 + 13, minI64 + 14, minI64 + 15, minI64 + 16,
+               minI64 + 17, minI64 + 18, minI64 + 19, minI64 + 20,
+               maxI64, maxI64 - 1, maxI64 - 2, maxI64 - 3, maxI64 - 4,
+               maxI64 - 5, maxI64 - 6, maxI64 - 7, maxI64 - 8,
+               maxI64 - 9, maxI64 - 10, maxI64 - 11, maxI64 - 12,
+               maxI64 - 13, maxI64 - 14, maxI64 - 15, maxI64 - 16,
+               maxI64 - 17, maxI64 - 18, maxI64 - 19, maxI64 - 20,
+       }
+       for _, x := range xs {
+               if minI32 <= x && x <= maxI32 {
+                       if want, got := int32(x)%int32(sixS) == 0, div6_int32(int32(x)); got != want {
+                               t.Errorf("div6_int32(%d) = %v want %v", x, got, want)
+                       }
+                       if want, got := int32(x)%int32(nineteenS) == 0, div19_int32(int32(x)); got != want {
+                               t.Errorf("div19_int32(%d) = %v want %v", x, got, want)
+                       }
+               }
+               if want, got := x%sixS == 0, div6_int64(x); got != want {
+                       t.Errorf("div6_int64(%d) = %v want %v", x, got, want)
+               }
+               if want, got := x%nineteenS == 0, div19_int64(x); got != want {
+                       t.Errorf("div19_int64(%d) = %v want %v", x, got, want)
+               }
+       }
 }
index 27ce28aa59cc572b9c10c7adf61350033ff51498..454eb498c6c37b66cfde101f56979935638c06e4 100644 (file)
        (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
 (Eq16 (Mod16u x (Const16  [c])) (Const16 [0])) && x.Op != OpConst16 && udivisibleOK(16,c) && !hasSmallRotate(config) ->
        (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xffff])) (Const32 <typ.UInt32> [0]))
+(Eq8 (Mod8 x (Const8  [c])) (Const8 [0])) && x.Op != OpConst8 && sdivisibleOK(8,c) && !hasSmallRotate(config) ->
+       (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+(Eq16 (Mod16 x (Const16  [c])) (Const16 [0])) && x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config) ->
+       (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
 
 // Divisibility checks x%c == 0 convert to multiply and rotate.
 // Note, x%c == 0 is rewritten as x == c*(x/c) during the opt pass
 // Note that if there were an intermediate opt pass, this rule could be applied
 // directly on the Div op and magic division rewrites could be delayed to late opt.
 
+// Unsigned divisibility checks convert to multiply and rotate.
 (Eq8 x (Mul8 (Const8 [c])
   (Trunc32to8
     (Rsh32Ux64
                        (Const64 <typ.UInt64> [int64(udivisible(64,c).max)])
                )
 
+// Signed divisibility checks convert to multiply, add and rotate.
+(Eq8 x (Mul8 (Const8 [c])
+  (Sub8
+    (Rsh32x64
+      mul:(Mul32
+        (Const32 [m])
+        (SignExt8to32 x))
+      (Const64 [s]))
+    (Rsh32x64
+      (SignExt8to32 x)
+      (Const64 [31])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s
+       && x.Op != OpConst8 && sdivisibleOK(8,c)
+ -> (Leq8U
+                       (RotateLeft8 <typ.UInt8>
+                               (Add8 <typ.UInt8>
+                                       (Mul8 <typ.UInt8>
+                                               (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))])
+                                               x)
+                                       (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))])
+                               )
+                               (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)])
+                       )
+                       (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))])
+               )
+
+(Eq16 x (Mul16 (Const16 [c])
+  (Sub16
+    (Rsh32x64
+      mul:(Mul32
+        (Const32 [m])
+        (SignExt16to32 x))
+      (Const64 [s]))
+    (Rsh32x64
+      (SignExt16to32 x)
+      (Const64 [31])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s
+       && x.Op != OpConst16 && sdivisibleOK(16,c)
+ -> (Leq16U
+                       (RotateLeft16 <typ.UInt16>
+                               (Add16 <typ.UInt16>
+                                       (Mul16 <typ.UInt16>
+                                               (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))])
+                                               x)
+                                       (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))])
+                               )
+                               (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)])
+                       )
+                       (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))])
+               )
+
+(Eq32 x (Mul32 (Const32 [c])
+  (Sub32
+    (Rsh64x64
+      mul:(Mul64
+        (Const64 [m])
+        (SignExt32to64 x))
+      (Const64 [s]))
+    (Rsh64x64
+      (SignExt32to64 x)
+      (Const64 [63])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s
+       && x.Op != OpConst32 && sdivisibleOK(32,c)
+ -> (Leq32U
+                       (RotateLeft32 <typ.UInt32>
+                               (Add32 <typ.UInt32>
+                                       (Mul32 <typ.UInt32>
+                                               (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))])
+                                               x)
+                                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))])
+                               )
+                               (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)])
+                       )
+                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))])
+               )
+
+(Eq32 x (Mul32 (Const32 [c])
+  (Sub32
+    (Rsh32x64
+      mul:(Hmul32
+        (Const32 [m])
+        x)
+      (Const64 [s]))
+    (Rsh32x64
+      x
+      (Const64 [31])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1
+       && x.Op != OpConst32 && sdivisibleOK(32,c)
+ -> (Leq32U
+                       (RotateLeft32 <typ.UInt32>
+                               (Add32 <typ.UInt32>
+                                       (Mul32 <typ.UInt32>
+                                               (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))])
+                                               x)
+                                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))])
+                               )
+                               (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)])
+                       )
+                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))])
+               )
+
+(Eq32 x (Mul32 (Const32 [c])
+  (Sub32
+    (Rsh32x64
+      (Add32
+        mul:(Hmul32
+          (Const32 [m])
+          x)
+        x)
+      (Const64 [s]))
+    (Rsh32x64
+      x
+      (Const64 [31])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s
+       && x.Op != OpConst32 && sdivisibleOK(32,c)
+ -> (Leq32U
+                       (RotateLeft32 <typ.UInt32>
+                               (Add32 <typ.UInt32>
+                                       (Mul32 <typ.UInt32>
+                                               (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))])
+                                               x)
+                                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))])
+                               )
+                               (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)])
+                       )
+                       (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))])
+               )
+
+(Eq64 x (Mul64 (Const64 [c])
+  (Sub64
+    (Rsh64x64
+      mul:(Hmul64
+        (Const64 [m])
+        x)
+      (Const64 [s]))
+    (Rsh64x64
+      x
+      (Const64 [63])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1
+       && x.Op != OpConst64 && sdivisibleOK(64,c)
+ -> (Leq64U
+                       (RotateLeft64 <typ.UInt64>
+                               (Add64 <typ.UInt64>
+                                       (Mul64 <typ.UInt64>
+                                               (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)])
+                                               x)
+                                       (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)])
+                               )
+                               (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)])
+                       )
+                       (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)])
+               )
+
+(Eq64 x (Mul64 (Const64 [c])
+  (Sub64
+    (Rsh64x64
+      (Add64
+        mul:(Hmul64
+          (Const64 [m])
+          x)
+        x)
+      (Const64 [s]))
+    (Rsh64x64
+      x
+      (Const64 [63])))
+       )
+)
+  && v.Block.Func.pass.name != "opt" && mul.Uses == 1
+  && m == int64(smagic(64,c).m) && s == smagic(64,c).s
+       && x.Op != OpConst64 && sdivisibleOK(64,c)
+ -> (Leq64U
+                       (RotateLeft64 <typ.UInt64>
+                               (Add64 <typ.UInt64>
+                                       (Mul64 <typ.UInt64>
+                                               (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)])
+                                               x)
+                                       (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)])
+                               )
+                               (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)])
+                       )
+                       (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)])
+               )
+
 // Divisibility check for signed integers for power of two constant are simple mask.
-// However, we must match against the rewritten n%c == 0 -> n - c*(n/c) == 0 -> n == c *(n/c)
+// However, we must match against the rewritten n%c == 0 -> n - c*(n/c) == 0 -> n == c*(n/c)
 // where n/c contains fixup code to handle signed n.
 (Eq8 n (Lsh8x64
   (Rsh8x64
index b1c4eb1222de0a2fa6460b051d3da51233685664..e0c627184bbcd3b11ea8df3fe7585eaa9a6b4bdb 100644 (file)
@@ -195,7 +195,7 @@ func smagic(n uint, c int64) smagicData {
 // by using the modular inverse with respect to the word size 2^n.
 //
 // Given c, compute m such that (c * m) mod 2^n == 1
-// Then if c divides x (x%c ==0), the quotient is given by q = x/c == x*cinv mod 2^n
+// Then if c divides x (x%c ==0), the quotient is given by q = x/c == x*m mod 2^n
 //
 // x can range from 0, c, 2c, 3c, ... ⎣(2^n - 1)/c⎦ * c the maximum multiple
 // Thus, x*m mod 2^n is 0, 1, 2, 3, ... ⎣(2^n - 1)/c⎦
@@ -285,3 +285,97 @@ func udivisible(n uint, c int64) udivisibleData {
                max: max,
        }
 }
+
+// For signed integers, a similar method follows.
+//
+// Given c > 1 and odd, compute m such that (c * m) mod 2^n == 1
+// Then if c divides x (x%c ==0), the quotient is given by q = x/c == x*m mod 2^n
+//
+// x can range from ⎡-2^(n-1)/c⎤ * c, ... -c, 0, c, ...  ⎣(2^(n-1) - 1)/c⎦ * c
+// Thus, x*m mod 2^n is ⎡-2^(n-1)/c⎤, ... -2, -1, 0, 1, 2, ... ⎣(2^(n-1) - 1)/c⎦
+//
+// So, x is a multiple of c if and only if:
+// ⎡-2^(n-1)/c⎤ <= x*m mod 2^n <= ⎣(2^(n-1) - 1)/c⎦
+//
+// Since c > 1 and odd, this can be simplified by
+// ⎡-2^(n-1)/c⎤ == ⎡(-2^(n-1) + 1)/c⎤ == -⎣(2^(n-1) - 1)/c⎦
+//
+// -⎣(2^(n-1) - 1)/c⎦ <= x*m mod 2^n <= ⎣(2^(n-1) - 1)/c⎦
+//
+// To extend this to even integers, consider c = d0 * 2^k where d0 is odd.
+// We can test whether x is divisible by both d0 and 2^k.
+//
+// Let m be such that (d0 * m) mod 2^n == 1.
+// Let q = x*m mod 2^n. Then c divides x if:
+//
+// -⎣(2^(n-1) - 1)/d0⎦ <= q <= ⎣(2^(n-1) - 1)/d0⎦ and q ends in at least k 0-bits
+//
+// To transform this to a single comparison, we use the following theorem (ZRS in Hacker's Delight).
+//
+// For a >= 0 the following conditions are equivalent:
+// 1) -a <= x <= a and x ends in at least k 0-bits
+// 2) RotRight(x+a', k) <= ⎣2a'/2^k⎦
+//
+// Where a' = a & -2^k (a with its right k bits set to zero)
+//
+// To see that 1 & 2 are equivalent, note that -a <= x <= a is equivalent to
+// -a' <= x <= a' if and only if x ends in at least k 0-bits.  Adding -a' to each side gives,
+// 0 <= x + a' <= 2a' and x + a' ends in at least k 0-bits if and only if x does since a' has
+// k 0-bits by definition.  We can use theorem ZRU above with x -> x + a' and a -> 2a' giving 1) == 2).
+//
+// Let m be such that (d0 * m) mod 2^n == 1.
+// Let q = x*m mod 2^n.
+// Let a' = ⎣(2^(n-1) - 1)/d0⎦ & -2^k
+//
+// Then the divisibility test is:
+//
+// RotRight(q+a', k) <= ⎣2a'/2^k⎦
+//
+// Note that the calculation is performed using unsigned integers.
+// Since a' can have n-1 bits, 2a' may have n bits and there is no risk of overflow.
+
+// sdivisibleOK reports whether we should strength reduce a n-bit dividisibilty check by c.
+func sdivisibleOK(n uint, c int64) bool {
+       if c < 0 {
+               // Doesn't work for negative c.
+               return false
+       }
+       // Doesn't work for 0.
+       // Don't use it for powers of 2.
+       return c&(c-1) != 0
+}
+
+type sdivisibleData struct {
+       k   int64  // trailingZeros(c)
+       m   uint64 // m * (c>>k) mod 2^n == 1 multiplicative inverse of odd portion modulo 2^n
+       a   uint64 // ⎣(2^(n-1) - 1)/ (c>>k)⎦ & -(1<<k) additive constant
+       max uint64 // ⎣(2 a) / (1<<k)⎦ max value to for divisibility
+}
+
+func sdivisible(n uint, c int64) sdivisibleData {
+       d := uint64(c)
+       k := bits.TrailingZeros64(d)
+       d0 := d >> uint(k) // the odd portion of the divisor
+
+       mask := ^uint64(0) >> (64 - n)
+
+       // Calculate the multiplicative inverse via Newton's method.
+       // Quadratic convergence doubles the number of correct bits per iteration.
+       m := d0            // initial guess correct to 3-bits d0*d0 mod 8 == 1
+       m = m * (2 - m*d0) // 6-bits
+       m = m * (2 - m*d0) // 12-bits
+       m = m * (2 - m*d0) // 24-bits
+       m = m * (2 - m*d0) // 48-bits
+       m = m * (2 - m*d0) // 96-bits >= 64-bits
+       m = m & mask
+
+       a := ((mask >> 1) / d0) & -(1 << uint(k))
+       max := (2 * a) >> uint(k)
+
+       return sdivisibleData{
+               k:   int64(k),
+               m:   m,
+               a:   a,
+               max: max,
+       }
+}
index 1d15baf5372fac7011cf5ddfd898f75dd6699576..7c6009dea6c83b9e0daeedf30b13a89b36d84844 100644 (file)
@@ -184,7 +184,7 @@ func TestMagicSigned(t *testing.T) {
                                -c - 1, -c, -c + 1, c - 1, c, c + 1,
                                -2*c - 1, -2 * c, -2*c + 1, 2*c - 1, 2 * c, 2*c + 1,
                                -mul - 1, -mul, -mul + 1, mul - 1, mul, mul + 1,
-                               int64(1)<<n - 1, -int64(1)<<n + 1,
+                               int64(1)<<(n-1) - 1, -int64(1) << (n - 1),
                        } {
                                X := new(big.Int).SetInt64(x)
                                if X.Cmp(Min) < 0 || X.Cmp(Max) > 0 {
@@ -303,3 +303,108 @@ func TestDivisibleUnsigned(t *testing.T) {
                }
        }
 }
+
+func testDivisibleExhaustive(t *testing.T, n uint) {
+       minI := -int64(1) << (n - 1)
+       maxI := int64(1) << (n - 1)
+       for c := int64(1); c < maxI; c++ {
+               if !sdivisibleOK(n, int64(c)) {
+                       continue
+               }
+               k := sdivisible(n, int64(c)).k
+               m := sdivisible(n, int64(c)).m
+               a := sdivisible(n, int64(c)).a
+               max := sdivisible(n, int64(c)).max
+               mask := ^uint64(0) >> (64 - n)
+               for i := minI; i < maxI; i++ {
+                       want := i%c == 0
+                       mul := (uint64(i)*m + a) & mask
+                       rot := (mul>>uint(k) | mul<<(n-uint(k))) & mask
+                       got := rot <= max
+                       if want != got {
+                               t.Errorf("signed divisible wrong for %d %% %d == 0: got %v, want %v (k=%d,m=%d,a=%d,max=%d)\n", i, c, got, want, k, m, a, max)
+                       }
+               }
+       }
+}
+
+func TestDivisibleExhaustive8(t *testing.T) {
+       testDivisibleExhaustive(t, 8)
+}
+
+func TestDivisibleExhaustive16(t *testing.T) {
+       if testing.Short() {
+               t.Skip("slow test; skipping")
+       }
+       testDivisibleExhaustive(t, 16)
+}
+
+func TestDivisibleSigned(t *testing.T) {
+       One := new(big.Int).SetInt64(1)
+       for _, n := range [...]uint{8, 16, 32, 64} {
+               TwoNMinusOne := new(big.Int).Lsh(One, n-1)
+               Max := new(big.Int).Sub(TwoNMinusOne, One)
+               Min := new(big.Int).Neg(TwoNMinusOne)
+               for _, c := range [...]int64{
+                       3,
+                       5,
+                       6,
+                       7,
+                       9,
+                       10,
+                       11,
+                       12,
+                       13,
+                       14,
+                       15,
+                       17,
+                       1<<7 - 1,
+                       1<<7 + 1,
+                       1<<15 - 1,
+                       1<<15 + 1,
+                       1<<31 - 1,
+                       1<<31 + 1,
+                       1<<63 - 1,
+               } {
+                       if c>>(n-1) != 0 {
+                               continue // not appropriate for the given n.
+                       }
+                       if !sdivisibleOK(n, int64(c)) {
+                               t.Errorf("expected n=%d c=%d to pass\n", n, c)
+                       }
+                       k := sdivisible(n, int64(c)).k
+                       m := sdivisible(n, int64(c)).m
+                       a := sdivisible(n, int64(c)).a
+                       max := sdivisible(n, int64(c)).max
+                       mask := ^uint64(0) >> (64 - n)
+
+                       C := new(big.Int).SetInt64(c)
+
+                       // Find largest multiple of c.
+                       Mul := new(big.Int).Div(Max, C)
+                       Mul.Mul(Mul, C)
+                       mul := Mul.Int64()
+
+                       // Try some input values, mostly around multiples of c.
+                       for _, x := range [...]int64{
+                               -1, 1,
+                               -c - 1, -c, -c + 1, c - 1, c, c + 1,
+                               -2*c - 1, -2 * c, -2*c + 1, 2*c - 1, 2 * c, 2*c + 1,
+                               -mul - 1, -mul, -mul + 1, mul - 1, mul, mul + 1,
+                               int64(1)<<(n-1) - 1, -int64(1) << (n - 1),
+                       } {
+                               X := new(big.Int).SetInt64(x)
+                               if X.Cmp(Min) < 0 || X.Cmp(Max) > 0 {
+                                       continue
+                               }
+                               want := x%c == 0
+                               mul := (uint64(x)*m + a) & mask
+                               rot := (mul>>uint(k) | mul<<(n-uint(k))) & mask
+                               got := rot <= max
+                               if want != got {
+                                       t.Errorf("signed divisible wrong for %d %% %d == 0: got %v, want %v (k=%d,m=%d,a=%d,max=%d)\n", x, c, got, want, k, m, a, max)
+                               }
+                       }
+               }
+       }
+}
index cefa12d4333d0a2b2ae94fa51e18e3c4cd2952ac..7117f77d2481ff1ecce2515579fd428f7242af52 100644 (file)
@@ -98,17 +98,17 @@ func rewriteValuegeneric(v *Value) bool {
        case OpDiv8u:
                return rewriteValuegeneric_OpDiv8u_0(v)
        case OpEq16:
-               return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v)
+               return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v) || rewriteValuegeneric_OpEq16_50(v)
        case OpEq32:
-               return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v)
+               return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v) || rewriteValuegeneric_OpEq32_70(v) || rewriteValuegeneric_OpEq32_80(v) || rewriteValuegeneric_OpEq32_90(v)
        case OpEq32F:
                return rewriteValuegeneric_OpEq32F_0(v)
        case OpEq64:
-               return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v)
+               return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v) || rewriteValuegeneric_OpEq64_40(v) || rewriteValuegeneric_OpEq64_50(v) || rewriteValuegeneric_OpEq64_60(v)
        case OpEq64F:
                return rewriteValuegeneric_OpEq64F_0(v)
        case OpEq8:
-               return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v)
+               return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v) || rewriteValuegeneric_OpEq8_30(v)
        case OpEqB:
                return rewriteValuegeneric_OpEqB_0(v)
        case OpEqInter:
@@ -9041,6 +9041,92 @@ func rewriteValuegeneric_OpEq16_0(v *Value) bool {
                v.AddArg(v3)
                return true
        }
+       // match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
+       // cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMod16 {
+                       break
+               }
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst16 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst16 {
+                       break
+               }
+               if v_1.AuxInt != 0 {
+                       break
+               }
+               if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v2.AuxInt = c
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq16_10(v *Value) bool {
+       b := v.Block
+       config := b.Func.Config
+       typ := &b.Func.Config.Types
+       // match: (Eq16 (Const16 [0]) (Mod16 x (Const16 [c])))
+       // cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst16 {
+                       break
+               }
+               if v_0.AuxInt != 0 {
+                       break
+               }
+               v_1 := v.Args[1]
+               if v_1.Op != OpMod16 {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst16 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v2.AuxInt = c
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
        // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))))
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -9108,11 +9194,6 @@ func rewriteValuegeneric_OpEq16_0(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq16_10(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
        // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))))
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -9645,6 +9726,11 @@ func rewriteValuegeneric_OpEq16_10(v *Value) bool {
                v.AddArg(v4)
                return true
        }
+       return false
+}
+func rewriteValuegeneric_OpEq16_20(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
        // match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))))
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -9779,11 +9865,6 @@ func rewriteValuegeneric_OpEq16_10(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq16_20(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
        // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -10352,6 +10433,11 @@ func rewriteValuegeneric_OpEq16_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
+       return false
+}
+func rewriteValuegeneric_OpEq16_30(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
        // match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -10509,11 +10595,6 @@ func rewriteValuegeneric_OpEq16_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq16_30(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
        // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))) x)
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -11202,6 +11283,11 @@ func rewriteValuegeneric_OpEq16_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
+       return false
+}
+func rewriteValuegeneric_OpEq16_40(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
        // match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))) x)
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -11382,11 +11468,6 @@ func rewriteValuegeneric_OpEq16_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq16_40(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
        // match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])) x)
        // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
        // result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
@@ -11477,703 +11558,682 @@ func rewriteValuegeneric_OpEq16_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 15 && kbar == 16 - k
-       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+       // match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
                _ = v.Args[1]
-               n := v.Args[0]
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpLsh16x64 {
+               if v_1.Op != OpMul16 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh16x64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd16 {
-                       break
-               }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               if n != v_1_0_0.Args[0] {
-                       break
-               }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpRsh16Ux64 {
-                       break
-               }
-               if v_1_0_0_1.Type != t {
-                       break
-               }
-               _ = v_1_0_0_1.Args[1]
-               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
-               if v_1_0_0_1_0.Op != OpRsh16x64 {
+               if v_1_0.Op != OpConst16 {
                        break
                }
-               if v_1_0_0_1_0.Type != t {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub16 {
                        break
                }
-               _ = v_1_0_0_1_0.Args[1]
-               if n != v_1_0_0_1_0.Args[0] {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
-               if v_1_0_0_1_0_1.Op != OpConst64 {
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               if v_1_0_0_1_0_1.Type != typ.UInt64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               if v_1_0_0_1_0_1.AuxInt != 15 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpSignExt16to32 {
                        break
                }
-               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
-               if v_1_0_0_1_1.Op != OpConst64 {
+               if x != mul_1.Args[0] {
                        break
                }
-               if v_1_0_0_1_1.Type != typ.UInt64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               kbar := v_1_0_0_1_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt16to32 {
                        break
                }
-               k := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if x != v_1_1_1_0.Args[0] {
                        break
                }
-               if v_1_1.Type != typ.UInt64 {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               if v_1_1.AuxInt != k {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               if !(k > 0 && k < 15 && kbar == 16-k) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               v.reset(OpEq16)
-               v0 := b.NewValue0(v.Pos, OpAnd16, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst16, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst16, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 15 && kbar == 16 - k
-       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+       // match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
                _ = v.Args[1]
-               n := v.Args[0]
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpLsh16x64 {
+               if v_1.Op != OpMul16 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh16x64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd16 {
-                       break
-               }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               v_1_0_0_0 := v_1_0_0.Args[0]
-               if v_1_0_0_0.Op != OpRsh16Ux64 {
-                       break
-               }
-               if v_1_0_0_0.Type != t {
-                       break
-               }
-               _ = v_1_0_0_0.Args[1]
-               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
-               if v_1_0_0_0_0.Op != OpRsh16x64 {
-                       break
-               }
-               if v_1_0_0_0_0.Type != t {
+               if v_1_0.Op != OpConst16 {
                        break
                }
-               _ = v_1_0_0_0_0.Args[1]
-               if n != v_1_0_0_0_0.Args[0] {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub16 {
                        break
                }
-               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
-               if v_1_0_0_0_0_1.Op != OpConst64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               if v_1_0_0_0_0_1.Type != typ.UInt64 {
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               if v_1_0_0_0_0_1.AuxInt != 15 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpSignExt16to32 {
                        break
                }
-               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
-               if v_1_0_0_0_1.Op != OpConst64 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if v_1_0_0_0_1.Type != typ.UInt64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               kbar := v_1_0_0_0_1.AuxInt
-               if n != v_1_0_0.Args[1] {
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt16to32 {
                        break
                }
-               k := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if x != v_1_1_1_0.Args[0] {
                        break
                }
-               if v_1_1.Type != typ.UInt64 {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               if v_1_1.AuxInt != k {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               if !(k > 0 && k < 15 && kbar == 16-k) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               v.reset(OpEq16)
-               v0 := b.NewValue0(v.Pos, OpAnd16, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst16, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst16, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 15 && kbar == 16 - k
-       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+       // match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
-               n := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpLsh16x64 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh16x64 {
-                       break
-               }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd16 {
-                       break
-               }
-               t := v_0_0_0.Type
-               _ = v_0_0_0.Args[1]
-               if n != v_0_0_0.Args[0] {
-                       break
-               }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpRsh16Ux64 {
-                       break
-               }
-               if v_0_0_0_1.Type != t {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul16 {
                        break
                }
-               _ = v_0_0_0_1.Args[1]
-               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
-               if v_0_0_0_1_0.Op != OpRsh16x64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub16 {
                        break
                }
-               if v_0_0_0_1_0.Type != t {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
-               _ = v_0_0_0_1_0.Args[1]
-               if n != v_0_0_0_1_0.Args[0] {
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
-               if v_0_0_0_1_0_1.Op != OpConst64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               if v_0_0_0_1_0_1.Type != typ.UInt64 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpSignExt16to32 {
                        break
                }
-               if v_0_0_0_1_0_1.AuxInt != 15 {
+               if x != mul_1.Args[0] {
                        break
                }
-               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
-               if v_0_0_0_1_1.Op != OpConst64 {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               if v_0_0_0_1_1.Type != typ.UInt64 {
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
                        break
                }
-               kbar := v_0_0_0_1_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt16to32 {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               if x != v_1_0_1_0.Args[0] {
                        break
                }
-               k := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
                        break
                }
-               if v_0_1.Type != typ.UInt64 {
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               if v_0_1.AuxInt != k {
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst16 {
                        break
                }
-               if !(k > 0 && k < 15 && kbar == 16-k) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               v.reset(OpEq16)
-               v0 := b.NewValue0(v.Pos, OpAnd16, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst16, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst16, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 15 && kbar == 16 - k
-       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
+       // match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
-               n := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpLsh16x64 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul16 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh16x64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub16 {
                        break
                }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd16 {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
-               t := v_0_0_0.Type
-               _ = v_0_0_0.Args[1]
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpRsh16Ux64 {
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               if v_0_0_0_0.Type != t {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpSignExt16to32 {
                        break
                }
-               _ = v_0_0_0_0.Args[1]
-               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
-               if v_0_0_0_0_0.Op != OpRsh16x64 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if v_0_0_0_0_0.Type != t {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               _ = v_0_0_0_0_0.Args[1]
-               if n != v_0_0_0_0_0.Args[0] {
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
-               if v_0_0_0_0_0_1.Op != OpConst64 {
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
                        break
                }
-               if v_0_0_0_0_0_1.Type != typ.UInt64 {
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt16to32 {
                        break
                }
-               if v_0_0_0_0_0_1.AuxInt != 15 {
+               if x != v_1_0_1_0.Args[0] {
                        break
                }
-               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
-               if v_0_0_0_0_1.Op != OpConst64 {
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
                        break
                }
-               if v_0_0_0_0_1.Type != typ.UInt64 {
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               kbar := v_0_0_0_0_1.AuxInt
-               if n != v_0_0_0.Args[1] {
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst16 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul16 {
                        break
                }
-               k := v_0_0_1.AuxInt
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst16 {
+                       break
+               }
+               c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpSub16 {
                        break
                }
-               if v_0_1.Type != typ.UInt64 {
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
-               if v_0_1.AuxInt != k {
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               if !(k > 0 && k < 15 && kbar == 16-k) {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               v.reset(OpEq16)
-               v0 := b.NewValue0(v.Pos, OpAnd16, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst16, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
-               v0.AddArg(v1)
-               v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst16, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
-               return true
-       }
-       // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
-       // cond: s.Uses == 1
-       // result: (Eq16 x y)
-       for {
-               _ = v.Args[1]
-               s := v.Args[0]
-               if s.Op != OpSub16 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpSignExt16to32 {
                        break
                }
-               y := s.Args[1]
-               x := s.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst16 {
+               if x != mul_1.Args[0] {
                        break
                }
-               if v_1.AuxInt != 0 {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               if !(s.Uses == 1) {
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
                        break
                }
-               v.reset(OpEq16)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
-       // cond: s.Uses == 1
-       // result: (Eq16 x y)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst16 {
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt16to32 {
                        break
                }
-               if v_0.AuxInt != 0 {
+               if x != v_0_1_1_0.Args[0] {
                        break
                }
-               s := v.Args[1]
-               if s.Op != OpSub16 {
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
                        break
                }
-               y := s.Args[1]
-               x := s.Args[0]
-               if !(s.Uses == 1) {
+               if v_0_1_1_1.AuxInt != 31 {
                        break
                }
-               v.reset(OpEq16)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq32_0(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 x x)
-       // cond:
-       // result: (ConstBool [1])
-       for {
-               x := v.Args[1]
-               if x != v.Args[0] {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               v.reset(OpConstBool)
-               v.AuxInt = 1
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
-       // cond:
-       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+       // match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
-               = v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpConst32 {
+               if v_0.Op != OpMul16 {
                        break
                }
-               t := v_0.Type
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpAdd32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst16 {
                        break
                }
-               x := v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub16 {
                        break
                }
-               if v_1_0.Type != t {
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
-               d := v_1_0.AuxInt
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpConst32, t)
-               v0.AuxInt = int64(int32(c - d))
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
-       // cond:
-       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32 {
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               t := v_0.Type
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpAdd32 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpSignExt16to32 {
                        break
                }
-               _ = v_1.Args[1]
-               x := v_1.Args[0]
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if v_1_1.Type != t {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               d := v_1_1.AuxInt
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpConst32, t)
-               v0.AuxInt = int64(int32(c - d))
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
-       // cond:
-       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpAdd32 {
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               x := v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
                        break
                }
-               t := v_0_0.Type
-               d := v_0_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32 {
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt16to32 {
                        break
                }
-               if v_1.Type != t {
+               if x != v_0_1_1_0.Args[0] {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpConst32, t)
-               v0.AuxInt = int64(int32(c - d))
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
+                       break
+               }
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v.AddArg(x)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
-       // cond:
-       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+       // match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
        for {
-               = v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpAdd32 {
+               if v_0.Op != OpMul16 {
                        break
                }
                _ = v_0.Args[1]
-               x := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
-                       break
-               }
-               t := v_0_1.Type
-               d := v_0_1.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32 {
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub16 {
                        break
                }
-               if v_1.Type != t {
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpConst32, t)
-               v0.AuxInt = int64(int32(c - d))
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq32 (Const32 [c]) (Const32 [d]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32 {
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               d := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
-               return true
-       }
-       // match: (Eq32 (Const32 [d]) (Const32 [c]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpSignExt16to32 {
                        break
                }
-               d := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32 {
+               if x != mul_1.Args[0] {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
-               return true
-       }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt16to32 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul32u {
+               if x != v_0_0_1_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
                        break
                }
-               m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               if v_0_0_1_1.AuxInt != 31 {
                        break
                }
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst16 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+       return false
+}
+func rewriteValuegeneric_OpEq16_50(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
+       // result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul16 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub16 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul32u {
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
                _ = mul.Args[1]
-               if x != mul.Args[0] {
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpSignExt16to32 {
+                       break
+               }
+               if x != mul_0.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
@@ -12181,703 +12241,693 @@ func rewriteValuegeneric_OpEq32_0(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt16to32 {
+                       break
+               }
+               if x != v_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst16 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
+                       break
+               }
+               v.reset(OpLeq16U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
+               v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
+               v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
+               v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v3.AuxInt = int64(int16(sdivisible(16, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v4.AuxInt = int64(int16(sdivisible(16, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v5.AuxInt = int64(16 - sdivisible(16, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
+               v6.AuxInt = int64(int16(sdivisible(16, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 15 && kbar == 16 - k
+       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
        for {
                _ = v.Args[1]
-               x := v.Args[0]
+               n := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               if v_1.Op != OpLsh16x64 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
+               if v_1_0.Op != OpRsh16x64 {
                        break
                }
                _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul32u {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd16 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               if n != v_1_0_0.Args[0] {
                        break
                }
-               m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpRsh16Ux64 {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if v_1_0_0_1.Type != t {
                        break
                }
-               s := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               _ = v_1_0_0_1.Args[1]
+               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+               if v_1_0_0_1_0.Op != OpRsh16x64 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_1_0_0_1_0.Type != t {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq32_10(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               _ = v_1_0_0_1_0.Args[1]
+               if n != v_1_0_0_1_0.Args[0] {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
+               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+               if v_1_0_0_1_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul32u {
+               if v_1_0_0_1_0_1.Type != typ.UInt64 {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               if v_1_0_0_1_0_1.AuxInt != 15 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst32 {
+               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+               if v_1_0_0_1_1.Op != OpConst64 {
                        break
                }
-               m := mul_1.AuxInt
+               if v_1_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_1_0_0_1_1.AuxInt
                v_1_0_1 := v_1_0.Args[1]
                if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_1.AuxInt
+               if v_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_1_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               if v_1_1.Op != OpConst64 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_1_1.Type != typ.UInt64 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 15 && kbar == 16-k) {
+                       break
+               }
+               v.reset(OpEq16)
+               v0 := b.NewValue0(v.Pos, OpAnd16, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst16, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v2 := b.NewValue0(v.Pos, OpConst16, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 15 && kbar == 16 - k
+       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               n := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpLsh16x64 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh16x64 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd16 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul32u {
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpRsh16Ux64 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               if v_1_0_0_0.Type != t {
                        break
                }
-               m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               _ = v_1_0_0_0.Args[1]
+               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+               if v_1_0_0_0_0.Op != OpRsh16x64 {
                        break
                }
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               if v_1_0_0_0_0.Type != t {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               _ = v_1_0_0_0_0.Args[1]
+               if n != v_1_0_0_0_0.Args[0] {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
-               return true
-       }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+               if v_1_0_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               if v_1_0_0_0_0_1.Type != typ.UInt64 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               if v_1_0_0_0_0_1.AuxInt != 15 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul32u {
+               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+               if v_1_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               if v_1_0_0_0_1.Type != typ.UInt64 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst32 {
+               kbar := v_1_0_0_0_1.AuxInt
+               if n != v_1_0_0.Args[1] {
                        break
                }
-               m := mul_1.AuxInt
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_1_0_1.Type != typ.UInt64 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
+               k := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 15 && kbar == 16-k) {
+                       break
+               }
+               v.reset(OpEq16)
+               v0 := b.NewValue0(v.Pos, OpAnd16, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst16, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v2 := b.NewValue0(v.Pos, OpConst16, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
                return true
        }
-       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 15 && kbar == 16 - k
+       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
        for {
-               x := v.Args[1]
+               n := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               if v_0.Op != OpLsh16x64 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
+               if v_0_0.Op != OpRsh16x64 {
                        break
                }
                _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul32u {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd16 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               if n != v_0_0_0.Args[0] {
                        break
                }
-               m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpRsh16Ux64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if v_0_0_0_1.Type != t {
                        break
                }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               _ = v_0_0_0_1.Args[1]
+               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+               if v_0_0_0_1_0.Op != OpRsh16x64 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_0_0_0_1_0.Type != t {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
-               return true
-       }
-       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v_0_0_0_1_0.Args[1]
+               if n != v_0_0_0_1_0.Args[0] {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
+               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+               if v_0_0_0_1_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul32u {
+               if v_0_0_0_1_0_1.Type != typ.UInt64 {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               if v_0_0_0_1_0_1.AuxInt != 15 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst32 {
+               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+               if v_0_0_0_1_1.Op != OpConst64 {
                        break
                }
-               m := mul_1.AuxInt
+               if v_0_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_0_0_0_1_1.AuxInt
                v_0_0_1 := v_0_0.Args[1]
                if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_1.AuxInt
+               if v_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               if v_0_1.Op != OpConst64 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_0_1.Type != typ.UInt64 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
+               if v_0_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 15 && kbar == 16-k) {
+                       break
+               }
+               v.reset(OpEq16)
+               v0 := b.NewValue0(v.Pos, OpAnd16, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst16, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v2 := b.NewValue0(v.Pos, OpConst16, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 15 && kbar == 16 - k
+       // result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               n := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpLsh16x64 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh16x64 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd16 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul32u {
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpRsh16Ux64 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               if v_0_0_0_0.Type != t {
                        break
                }
-               if mul_0.Type != typ.UInt32 {
+               _ = v_0_0_0_0.Args[1]
+               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+               if v_0_0_0_0_0.Op != OpRsh16x64 {
                        break
                }
-               m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh32Ux64 {
+               if v_0_0_0_0_0.Type != t {
                        break
                }
-               _ = mul_1.Args[1]
-               if x != mul_1.Args[0] {
+               _ = v_0_0_0_0_0.Args[1]
+               if n != v_0_0_0_0_0.Args[0] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
-                       break
+               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+               if v_0_0_0_0_0_1.Op != OpConst64 {
+                       break
                }
-               if mul_1_1.AuxInt != 1 {
+               if v_0_0_0_0_0_1.Type != typ.UInt64 {
                        break
                }
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if v_0_0_0_0_0_1.AuxInt != 15 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+               if v_0_0_0_0_1.Op != OpConst64 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
-               return true
-       }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
-       for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               if v_0_0_0_0_1.Type != typ.UInt64 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               kbar := v_0_0_0_0_1.AuxInt
+               if n != v_0_0_0.Args[1] {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul32u {
+               if v_0_0_1.Type != typ.UInt64 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh32Ux64 {
+               k := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
                        break
                }
-               _ = mul_0.Args[1]
-               if x != mul_0.Args[0] {
+               if v_0_1.Type != typ.UInt64 {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               if v_0_1.AuxInt != k {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
+               if !(k > 0 && k < 15 && kbar == 16-k) {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst32 {
+               v.reset(OpEq16)
+               v0 := b.NewValue0(v.Pos, OpAnd16, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst16, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst16, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq16 s:(Sub16 x y) (Const16 [0]))
+       // cond: s.Uses == 1
+       // result: (Eq16 x y)
+       for {
+               _ = v.Args[1]
+               s := v.Args[0]
+               if s.Op != OpSub16 {
                        break
                }
-               if mul_1.Type != typ.UInt32 {
+               y := s.Args[1]
+               x := s.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst16 {
                        break
                }
-               m := mul_1.AuxInt
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if v_1.AuxInt != 0 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if !(s.Uses == 1) {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v.reset(OpEq16)
+               v.AddArg(x)
+               v.AddArg(y)
                return true
        }
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq16 (Const16 [0]) s:(Sub16 x y))
+       // cond: s.Uses == 1
+       // result: (Eq16 x y)
        for {
                _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst16 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
+               if v_0.AuxInt != 0 {
                        break
                }
-               _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul32u {
+               s := v.Args[1]
+               if s.Op != OpSub16 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst32 {
+               y := s.Args[1]
+               x := s.Args[0]
+               if !(s.Uses == 1) {
                        break
                }
-               if mul_0.Type != typ.UInt32 {
+               v.reset(OpEq16)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq32_0(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 x x)
+       // cond:
+       // result: (ConstBool [1])
+       for {
+               x := v.Args[1]
+               if x != v.Args[0] {
                        break
                }
-               m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh32Ux64 {
+               v.reset(OpConstBool)
+               v.AuxInt = 1
+               return true
+       }
+       // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
+       // cond:
+       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32 {
                        break
                }
-               _ = mul_1.Args[1]
-               if x != mul_1.Args[0] {
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd32 {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               x := v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               if v_1_0.Type != t {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               d := v_1_0.AuxInt
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpConst32, t)
+               v0.AuxInt = int64(int32(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
+       // cond:
+       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32 {
                        break
                }
-               s := v_1_0_1.AuxInt
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd32 {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
                v_1_1 := v_1.Args[1]
                if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if v_1_1.Type != t {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
+               d := v_1_1.AuxInt
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpConst32, t)
+               v0.AuxInt = int64(int32(c - d))
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               v.AddArg(x)
                return true
        }
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
-       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
+       // match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
+       // cond:
+       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
        for {
                _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd32 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
+               x := v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul32u {
-                       break
+               t := v_0_0.Type
+               d := v_0_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32 {
+                       break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh32Ux64 {
+               if v_1.Type != t {
                        break
                }
-               _ = mul_0.Args[1]
-               if x != mul_0.Args[0] {
+               c := v_1.AuxInt
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpConst32, t)
+               v0.AuxInt = int64(int32(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
+       // cond:
+       // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd32 {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
+               t := v_0_1.Type
+               d := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst32 {
+               if v_1.Type != t {
                        break
                }
-               if mul_1.Type != typ.UInt32 {
+               c := v_1.AuxInt
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpConst32, t)
+               v0.AuxInt = int64(int32(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq32 (Const32 [c]) (Const32 [d]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32 {
                        break
                }
-               m := mul_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32 {
                        break
                }
-               s := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               d := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
+               return true
+       }
+       // match: (Eq32 (Const32 [d]) (Const32 [c]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               d := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32 {
                        break
                }
-               v.reset(OpLeq32U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = int64(int32(udivisible(32, c).m))
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = int64(32 - udivisible(32, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v4.AuxInt = int64(int32(udivisible(32, c).max))
-               v.AddArg(v4)
+               c := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -12886,31 +12936,16 @@ func rewriteValuegeneric_OpEq32_10(v *Value) bool {
                if mul_0.Op != OpConst32 {
                        break
                }
-               if mul_0.Type != typ.UInt32 {
-                       break
-               }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               if x != mul_1.Args[0] {
-                       break
-               }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
-                       break
-               }
-               if mul_1_1.AuxInt != 1 {
+               if x != mul.Args[1] {
                        break
                }
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -12930,65 +12965,46 @@ func rewriteValuegeneric_OpEq32_10(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq32_20(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = mul_0.Args[1]
-               if x != mul_0.Args[0] {
-                       break
-               }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst32 {
                        break
                }
-               if mul_1.Type != typ.UInt32 {
-                       break
-               }
                m := mul_1.AuxInt
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13008,22 +13024,23 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13032,36 +13049,21 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                if mul_0.Op != OpConst32 {
                        break
                }
-               if mul_0.Type != typ.UInt32 {
-                       break
-               }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               if x != mul_1.Args[0] {
+               if x != mul.Args[1] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
-                       break
-               }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
-                       break
-               }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13081,60 +13083,51 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       return false
+}
+func rewriteValuegeneric_OpEq32_10(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = mul_0.Args[1]
-               if x != mul_0.Args[0] {
-                       break
-               }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst32 {
                        break
                }
-               if mul_1.Type != typ.UInt32 {
-                       break
-               }
                m := mul_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13154,36 +13147,27 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
-                       break
-               }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1_1.Args[1]
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpAvg32u {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               if x != v_1_1_0.Args[0] {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_1_1_0.Args[1]
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13196,12 +13180,12 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                if x != mul.Args[1] {
                        break
                }
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13221,36 +13205,27 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
-                       break
-               }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh32Ux64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1_1.Args[1]
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpAvg32u {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               if x != v_1_1_0.Args[0] {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_1_1_0.Args[1]
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13263,12 +13238,12 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13288,31 +13263,22 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAvg32u {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1_0_0.Args[1]
-               if x != v_1_0_0.Args[0] {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_1_0_0.Args[1]
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13325,17 +13291,17 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                if x != mul.Args[1] {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13355,31 +13321,22 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAvg32u {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1_0_0.Args[1]
-               if x != v_1_0_0.Args[0] {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_1_0_0.Args[1]
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13392,17 +13349,17 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13422,35 +13379,28 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               _ = v_0_1.Args[1]
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpAvg32u {
-                       break
-               }
-               _ = v_0_1_0.Args[1]
-               if x != v_0_1_0.Args[0] {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_0_1_0.Args[1]
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13459,16 +13409,31 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                if mul_0.Op != OpConst32 {
                        break
                }
+               if mul_0.Type != typ.UInt32 {
+                       break
+               }
                m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13488,53 +13453,61 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh32Ux64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_1.Args[1]
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpAvg32u {
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               if x != v_0_1_0.Args[0] {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_0_1_0.Args[1]
-               if mul.Op != OpHmul32u {
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
                        break
                }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst32 {
                        break
                }
+               if mul_1.Type != typ.UInt32 {
+                       break
+               }
                m := mul_1.AuxInt
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13554,30 +13527,23 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAvg32u {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               if x != v_0_0_0.Args[0] {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_0_0_0.Args[1]
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
                if mul.Op != OpHmul32u {
                        break
                }
@@ -13586,21 +13552,36 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                if mul_0.Op != OpConst32 {
                        break
                }
+               if mul_0.Type != typ.UInt32 {
+                       break
+               }
                m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
                        break
                }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13620,58 +13601,61 @@ func rewriteValuegeneric_OpEq32_20(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq32_30(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32Ux64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAvg32u {
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               if x != v_0_0_0.Args[0] {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh32Ux64 {
                        break
                }
-               mul := v_0_0_0.Args[1]
-               if mul.Op != OpHmul32u {
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
                        break
                }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst32 {
                        break
                }
+               if mul_1.Type != typ.UInt32 {
+                       break
+               }
                m := mul_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13691,54 +13675,60 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               mul := v_1_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               if mul_0.Type != typ.UInt32 {
                        break
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
+               if mul_1.Op != OpRsh32Ux64 {
                        break
                }
+               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13758,54 +13748,65 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       return false
+}
+func rewriteValuegeneric_OpEq32_20(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               mul := v_1_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               if mul_0.Op != OpRsh32Ux64 {
                        break
                }
+               _ = mul_0.Args[1]
                if x != mul_0.Args[0] {
                        break
                }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               if mul_1.Type != typ.UInt32 {
                        break
                }
                m := mul_1.AuxInt
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13825,54 +13826,60 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 <typ.UInt32> [m]) (Rsh32Ux64 x (Const64 [1]))) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_1_0_0.Args[1]
-               mul := v_1_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               if mul_0.Type != typ.UInt32 {
                        break
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
+               if mul_1.Op != OpRsh32Ux64 {
                        break
                }
+               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               if mul_1_1.AuxInt != 1 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13892,54 +13899,60 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Rsh32Ux64 mul:(Hmul32u (Rsh32Ux64 x (Const64 [1])) (Const32 <typ.UInt32> [m])) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32,c).m+1)/2)) && s == umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = v_1_0_0.Args[1]
-               mul := v_1_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               if mul_0.Op != OpRsh32Ux64 {
                        break
                }
+               _ = mul_0.Args[1]
                if x != mul_0.Args[0] {
                        break
                }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               if mul_1.Type != typ.UInt32 {
                        break
                }
                m := mul_1.AuxInt
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+(umagic(32, c).m+1)/2)) && s == umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -13959,53 +13972,54 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpAvg32u {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               mul := v_0_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_1_1_0.Args[1]
+               if x != v_1_1_0.Args[0] {
+                       break
+               }
+               mul := v_1_1_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1.Args[0] {
+               if x != mul.Args[1] {
                        break
                }
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14025,53 +14039,54 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
-                       break
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
+                       break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpAvg32u {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               mul := v_0_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_1_1_0.Args[1]
+               if x != v_1_1_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               mul := v_1_1_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
-               if x != mul_0.Args[0] {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14091,53 +14106,54 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAvg32u {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               mul := v_0_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_1_0_0.Args[1]
+               if x != v_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1.Args[0] {
+               if x != mul.Args[1] {
                        break
                }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14157,53 +14173,54 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32Ux64 {
                        break
                }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAvg32u {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               mul := v_0_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_1_0_0.Args[1]
+               if x != v_1_0_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               mul := v_1_0_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
-               if x != mul_0.Args[0] {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14223,66 +14240,53 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpAvg32u {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               mul := v_1_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_1_0.Args[1]
+               if x != v_0_1_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               mul_1_0 := mul_1.Args[0]
-               if mul_1_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1_0.Args[0] {
-                       break
-               }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
-                       break
-               }
-               if mul_1_1.AuxInt != 1 {
+               if x != mul.Args[1] {
                        break
                }
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14302,71 +14306,53 @@ func rewriteValuegeneric_OpEq32_30(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq32_40(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh32Ux64 {
                        break
                }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpAvg32u {
                        break
                }
-               _ = v_1_1_0.Args[1]
-               mul := v_1_1_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_1_0.Args[1]
+               if x != v_0_1_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               mul := v_0_1_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
-               _ = mul_0.Args[1]
-               mul_0_0 := mul_0.Args[0]
-               if mul_0_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_0_0.Args[0] {
-                       break
-               }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14386,66 +14372,53 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u (Const32 [m]) x)) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAvg32u {
                        break
                }
-               _ = v_1_0_0.Args[1]
-               mul := v_1_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               _ = v_0_0_0.Args[1]
+               if x != v_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               mul_1_0 := mul_1.Args[0]
-               if mul_1_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1_0.Args[0] {
-                       break
-               }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
-                       break
-               }
-               if mul_1_1.AuxInt != 1 {
+               if x != mul.Args[1] {
                        break
                }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14465,66 +14438,58 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       return false
+}
+func rewriteValuegeneric_OpEq32_30(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 (Mul32 (Rsh32Ux64 (Avg32u x mul:(Hmul32u x (Const32 [m]))) (Const64 [s])) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32,c).m)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
-                       break
-               }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_1_0_0.Args[1]
-               mul := v_1_0_0.Args[0]
-               if mul.Op != OpMul64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               _ = mul_0.Args[1]
-               mul_0_0 := mul_0.Args[0]
-               if mul_0_0.Op != OpZeroExt32to64 {
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAvg32u {
                        break
                }
-               if x != mul_0_0.Args[0] {
+               _ = v_0_0_0.Args[1]
+               if x != v_0_0_0.Args[0] {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               mul := v_0_0_0.Args[1]
+               if mul.Op != OpHmul32u {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(umagic(32, c).m)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14544,31 +14509,32 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               mul := v_0_1_0.Args[0]
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -14579,30 +14545,18 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               mul_1_0 := mul_1.Args[0]
-               if mul_1_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1_0.Args[0] {
-                       break
-               }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               if x != mul_1.Args[0] {
                        break
                }
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14622,65 +14576,54 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               mul := v_0_1_0.Args[0]
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               if mul_0.Op != OpZeroExt32to64 {
                        break
                }
-               _ = mul_0.Args[1]
-               mul_0_0 := mul_0.Args[0]
-               if mul_0_0.Op != OpZeroExt32to64 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if x != mul_0_0.Args[0] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
-                       break
-               }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
-                       break
-               }
-               m := mul_1.AuxInt
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
-                       break
-               }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14700,26 +14643,27 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               mul := v_0_0_0.Args[0]
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -14730,35 +14674,23 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_1.Args[1]
-               mul_1_0 := mul_1.Args[0]
-               if mul_1_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_1_0.Args[0] {
-                       break
-               }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               if x != mul_1.Args[0] {
                        break
                }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14778,47 +14710,36 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_0_0.Args[1]
-               mul := v_0_0_0.Args[0]
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_0.Args[1]
-               mul_0_0 := mul_0.Args[0]
-               if mul_0_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != mul_0_0.Args[0] {
-                       break
-               }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               if mul_0.Op != OpZeroExt32to64 {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
+               if x != mul_0.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
@@ -14826,17 +14747,17 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14856,56 +14777,31 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
-                       break
-               }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
-                       break
-               }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_1_1_0.Args[1]
-               v_1_1_0_0 := v_1_1_0.Args[0]
-               if v_1_1_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_1_1_0_0.Args[1]
-               v_1_1_0_0_0 := v_1_1_0_0.Args[0]
-               if v_1_1_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_1_1_0_0_0.Args[1]
-               v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
-               if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               if x != v_1_1_0_0_0_0.Args[0] {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
-               if v_1_1_0_0_0_1.Op != OpConst64 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_1_0_0_0_1.AuxInt != 32 {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_1_1_0_0.Args[1]
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -14922,12 +14818,12 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                if x != mul_1.Args[0] {
                        break
                }
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -14947,56 +14843,31 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst32 {
-                       break
-               }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc64to32 {
-                       break
-               }
-               v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_1_1_0.Args[1]
-               v_1_1_0_0 := v_1_1_0.Args[0]
-               if v_1_1_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_1_1_0_0.Args[1]
-               v_1_1_0_0_0 := v_1_1_0_0.Args[0]
-               if v_1_1_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_1_1_0_0_0.Args[1]
-               v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
-               if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               if x != v_1_1_0_0_0_0.Args[0] {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
-               if v_1_1_0_0_0_1.Op != OpConst64 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_1_0_0_0_1.AuxInt != 32 {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_1_1_0_0.Args[1]
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -15013,12 +14884,12 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_1_1_0_1 := v_1_1_0.Args[1]
-               if v_1_1_0_1.Op != OpConst64 {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15038,51 +14909,26 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x)) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
-                       break
-               }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_1_0_0.Args[1]
-               v_1_0_0_0 := v_1_0_0.Args[0]
-               if v_1_0_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_1_0_0_0.Args[1]
-               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
-               if v_1_0_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_1_0_0_0_0.Args[1]
-               v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
-               if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != v_1_0_0_0_0_0.Args[0] {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
-               if v_1_0_0_0_0_1.Op != OpConst64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_0_0_0_0_1.AuxInt != 32 {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_1_0_0_0.Args[1]
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -15099,17 +14945,17 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                if x != mul_1.Args[0] {
                        break
                }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15129,56 +14975,26 @@ func rewriteValuegeneric_OpEq32_40(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq32_50(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (ZeroExt32to64 x) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32,c).m/2) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul32 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc64to32 {
-                       break
-               }
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_1_0_0.Args[1]
-               v_1_0_0_0 := v_1_0_0.Args[0]
-               if v_1_0_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_1_0_0_0.Args[1]
-               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
-               if v_1_0_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_1_0_0_0_0.Args[1]
-               v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
-               if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != v_1_0_0_0_0_0.Args[0] {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
-               if v_1_0_0_0_0_1.Op != OpConst64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_0_0_0_0_1.AuxInt != 32 {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_1_0_0_0.Args[1]
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -15195,17 +15011,17 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpConst64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst32 {
+               s := v_0_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+umagic(32, c).m/2) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15225,55 +15041,32 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
-                       break
-               }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
-                       break
-               }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_0_1_0.Args[1]
-               v_0_1_0_0 := v_0_1_0.Args[0]
-               if v_0_1_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_0_1_0_0.Args[1]
-               v_0_1_0_0_0 := v_0_1_0_0.Args[0]
-               if v_0_1_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_0_1_0_0_0.Args[1]
-               v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
-               if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               if x != v_0_1_0_0_0_0.Args[0] {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
-               if v_0_1_0_0_0_1.Op != OpConst64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               if v_0_1_0_0_0_1.AuxInt != 32 {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_0_1_0_0.Args[1]
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -15284,18 +15077,30 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
+               if mul_1.Op != OpRsh64Ux64 {
                        break
                }
-               if x != mul_1.Args[0] {
+               _ = mul_1.Args[1]
+               mul_1_0 := mul_1.Args[0]
+               if mul_1_0.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
+               if x != mul_1_0.Args[0] {
                        break
                }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15315,64 +15120,58 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       return false
+}
+func rewriteValuegeneric_OpEq32_40(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst32 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc64to32 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh64Ux64 {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_1_0.Args[1]
-               v_0_1_0_0 := v_0_1_0.Args[0]
-               if v_0_1_0_0.Op != OpAvg64u {
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               _ = v_0_1_0_0.Args[1]
-               v_0_1_0_0_0 := v_0_1_0_0.Args[0]
-               if v_0_1_0_0_0.Op != OpLsh64x64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_0_1_0_0_0.Args[1]
-               v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
-               if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
+               _ = mul_0.Args[1]
+               mul_0_0 := mul_0.Args[0]
+               if mul_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               if x != v_0_1_0_0_0_0.Args[0] {
+               if x != mul_0_0.Args[0] {
                        break
                }
-               v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
-               if v_0_1_0_0_0_1.Op != OpConst64 {
-                       break
-               }
-               if v_0_1_0_0_0_1.AuxInt != 32 {
-                       break
-               }
-               mul := v_0_1_0_0.Args[1]
-               if mul.Op != OpMul64 {
-                       break
-               }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
                        break
                }
-               if x != mul_0.Args[0] {
+               if mul_0_1.AuxInt != 1 {
                        break
                }
                mul_1 := mul.Args[1]
@@ -15380,12 +15179,12 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_0_1_0_1 := v_0_1_0.Args[1]
-               if v_0_1_0_1.Op != OpConst64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15405,50 +15204,27 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
-                       break
-               }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_0_0_0.Args[1]
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpAvg64u {
-                       break
-               }
-               _ = v_0_0_0_0.Args[1]
-               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
-               if v_0_0_0_0_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_0_0_0_0_0.Args[1]
-               v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
-               if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
-                       break
-               }
-               if x != v_0_0_0_0_0_0.Args[0] {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
-               if v_0_0_0_0_0_1.Op != OpConst64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               if v_0_0_0_0_0_1.AuxInt != 32 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               mul := v_0_0_0_0.Args[1]
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
                if mul.Op != OpMul64 {
                        break
                }
@@ -15459,23 +15235,35 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt32to64 {
+               if mul_1.Op != OpRsh64Ux64 {
                        break
                }
-               if x != mul_1.Args[0] {
+               _ = mul_1.Args[1]
+               mul_1_0 := mul_1.Args[0]
+               if mul_1_0.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               if x != mul_1_0.Args[0] {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15495,59 +15283,48 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
        // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc64to32 {
-                       break
-               }
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = v_0_0_0.Args[1]
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpAvg64u {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               _ = v_0_0_0_0.Args[1]
-               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
-               if v_0_0_0_0_0.Op != OpLsh64x64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_0_0_0_0.Args[1]
-               v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
-               if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               if x != v_0_0_0_0_0_0.Args[0] {
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
-               if v_0_0_0_0_0_1.Op != OpConst64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
                        break
                }
-               if v_0_0_0_0_0_1.AuxInt != 32 {
+               _ = mul_0.Args[1]
+               mul_0_0 := mul_0.Args[0]
+               if mul_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               mul := v_0_0_0_0.Args[1]
-               if mul.Op != OpMul64 {
+               if x != mul_0_0.Args[0] {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt32to64 {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
                        break
                }
-               if x != mul_0.Args[0] {
+               if mul_0_1.AuxInt != 1 {
                        break
                }
                mul_1 := mul.Args[1]
@@ -15555,17 +15332,17 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpConst64 {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst32 {
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
                v.reset(OpLeq32U)
@@ -15585,684 +15362,546 @@ func rewriteValuegeneric_OpEq32_50(v *Value) bool {
                v.AddArg(v4)
                return true
        }
-       // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 31 && kbar == 32 - k
-       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               n := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpLsh32x64 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32x64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd32 {
-                       break
-               }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               if n != v_1_0_0.Args[0] {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpRsh32Ux64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               if v_1_0_0_1.Type != t {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_1_0_0_1.Args[1]
-               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
-               if v_1_0_0_1_0.Op != OpRsh32x64 {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               if v_1_0_0_1_0.Type != t {
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               _ = v_1_0_0_1_0.Args[1]
-               if n != v_1_0_0_1_0.Args[0] {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
                        break
                }
-               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
-               if v_1_0_0_1_0_1.Op != OpConst64 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
                        break
                }
-               if v_1_0_0_1_0_1.Type != typ.UInt64 {
+               _ = mul_1.Args[1]
+               mul_1_0 := mul_1.Args[0]
+               if mul_1_0.Op != OpZeroExt32to64 {
                        break
                }
-               if v_1_0_0_1_0_1.AuxInt != 31 {
+               if x != mul_1_0.Args[0] {
                        break
                }
-               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
-               if v_1_0_0_1_1.Op != OpConst64 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               if v_1_0_0_1_1.Type != typ.UInt64 {
+               if mul_1_1.AuxInt != 1 {
                        break
                }
-               kbar := v_1_0_0_1_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               k := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
-                       break
-               }
-               if v_1_1.Type != typ.UInt64 {
-                       break
-               }
-               if v_1_1.AuxInt != k {
-                       break
-               }
-               if !(k > 0 && k < 31 && kbar == 32-k) {
-                       break
-               }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpAnd32, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst32, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
                v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst32, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 31 && kbar == 32 - k
-       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               n := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpLsh32x64 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh32x64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd32 {
-                       break
-               }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               v_1_0_0_0 := v_1_0_0.Args[0]
-               if v_1_0_0_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               if v_1_0_0_0.Type != t {
-                       break
-               }
-               _ = v_1_0_0_0.Args[1]
-               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
-               if v_1_0_0_0_0.Op != OpRsh32x64 {
-                       break
-               }
-               if v_1_0_0_0_0.Type != t {
-                       break
-               }
-               _ = v_1_0_0_0_0.Args[1]
-               if n != v_1_0_0_0_0.Args[0] {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
-               if v_1_0_0_0_0_1.Op != OpConst64 {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               if v_1_0_0_0_0_1.Type != typ.UInt64 {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_0_0_0_0_1.AuxInt != 31 {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
-               if v_1_0_0_0_1.Op != OpConst64 {
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               if v_1_0_0_0_1.Type != typ.UInt64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
                        break
                }
-               kbar := v_1_0_0_0_1.AuxInt
-               if n != v_1_0_0.Args[1] {
+               _ = mul_0.Args[1]
+               mul_0_0 := mul_0.Args[0]
+               if mul_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if x != mul_0_0.Args[0] {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
                        break
                }
-               k := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if mul_0_1.AuxInt != 1 {
                        break
                }
-               if v_1_1.Type != typ.UInt64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
                        break
                }
-               if v_1_1.AuxInt != k {
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               if !(k > 0 && k < 31 && kbar == 32-k) {
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpAnd32, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst32, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
                v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst32, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 31 && kbar == 32 - k
-       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1]))) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               n := v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpLsh32x64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32x64 {
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd32 {
+               if v_0_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               t := v_0_0_0.Type
                _ = v_0_0_0.Args[1]
-               if n != v_0_0_0.Args[0] {
-                       break
-               }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpRsh32Ux64 {
-                       break
-               }
-               if v_0_0_0_1.Type != t {
-                       break
-               }
-               _ = v_0_0_0_1.Args[1]
-               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
-               if v_0_0_0_1_0.Op != OpRsh32x64 {
-                       break
-               }
-               if v_0_0_0_1_0.Type != t {
-                       break
-               }
-               _ = v_0_0_0_1_0.Args[1]
-               if n != v_0_0_0_1_0.Args[0] {
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
-               if v_0_0_0_1_0_1.Op != OpConst64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
                        break
                }
-               if v_0_0_0_1_0_1.Type != typ.UInt64 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
                        break
                }
-               if v_0_0_0_1_0_1.AuxInt != 31 {
+               _ = mul_1.Args[1]
+               mul_1_0 := mul_1.Args[0]
+               if mul_1_0.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
-               if v_0_0_0_1_1.Op != OpConst64 {
+               if x != mul_1_0.Args[0] {
                        break
                }
-               if v_0_0_0_1_1.Type != typ.UInt64 {
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
                        break
                }
-               kbar := v_0_0_0_1_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if mul_1_1.AuxInt != 1 {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               k := v_0_0_1.AuxInt
+               s := v_0_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
-                       break
-               }
-               if v_0_1.Type != typ.UInt64 {
-                       break
-               }
-               if v_0_1.AuxInt != k {
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               if !(k > 0 && k < 31 && kbar == 32-k) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpAnd32, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst32, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
                v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst32, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 31 && kbar == 32 - k
-       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 mul:(Mul64 (Rsh64Ux64 (ZeroExt32to64 x) (Const64 [1])) (Const64 [m])) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32,c).m+1)/2) && s == 32+umagic(32,c).s-2 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               n := v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpLsh32x64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh32x64 {
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd32 {
+               if v_0_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               t := v_0_0_0.Type
                _ = v_0_0_0.Args[1]
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpRsh32Ux64 {
-                       break
-               }
-               if v_0_0_0_0.Type != t {
-                       break
-               }
-               _ = v_0_0_0_0.Args[1]
-               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
-               if v_0_0_0_0_0.Op != OpRsh32x64 {
-                       break
-               }
-               if v_0_0_0_0_0.Type != t {
-                       break
-               }
-               _ = v_0_0_0_0_0.Args[1]
-               if n != v_0_0_0_0_0.Args[0] {
-                       break
-               }
-               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
-               if v_0_0_0_0_0_1.Op != OpConst64 {
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               if v_0_0_0_0_0_1.Type != typ.UInt64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
                        break
                }
-               if v_0_0_0_0_0_1.AuxInt != 31 {
+               _ = mul_0.Args[1]
+               mul_0_0 := mul_0.Args[0]
+               if mul_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
-               if v_0_0_0_0_1.Op != OpConst64 {
+               if x != mul_0_0.Args[0] {
                        break
                }
-               if v_0_0_0_0_1.Type != typ.UInt64 {
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
                        break
                }
-               kbar := v_0_0_0_0_1.AuxInt
-               if n != v_0_0_0.Args[1] {
+               if mul_0_1.AuxInt != 1 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               k := v_0_0_1.AuxInt
+               s := v_0_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
-                       break
-               }
-               if v_0_1.Type != typ.UInt64 {
-                       break
-               }
-               if v_0_1.AuxInt != k {
+               if v_0_1.Op != OpConst32 {
                        break
                }
-               if !(k > 0 && k < 31 && kbar == 32-k) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<31+(umagic(32, c).m+1)/2) && s == 32+umagic(32, c).s-2 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpAnd32, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst32, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
                v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst32, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
-       // cond: s.Uses == 1
-       // result: (Eq32 x y)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               s := v.Args[0]
-               if s.Op != OpSub32 {
-                       break
-               }
-               y := s.Args[1]
-               x := s.Args[0]
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpConst32 {
+               if v_1.Op != OpMul32 {
                        break
                }
-               if v_1.AuxInt != 0 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               if !(s.Uses == 1) {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               v.reset(OpEq32)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq32_60(v *Value) bool {
-       // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
-       // cond: s.Uses == 1
-       // result: (Eq32 x y)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32 {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               if v_0.AuxInt != 0 {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAvg64u {
                        break
                }
-               s := v.Args[1]
-               if s.Op != OpSub32 {
+               _ = v_1_1_0_0.Args[1]
+               v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+               if v_1_1_0_0_0.Op != OpLsh64x64 {
                        break
                }
-               y := s.Args[1]
-               x := s.Args[0]
-               if !(s.Uses == 1) {
+               _ = v_1_1_0_0_0.Args[1]
+               v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+               if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               v.reset(OpEq32)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
-       // match: (Eq32F (Const32F [c]) (Const32F [d]))
-       // cond:
-       // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32F {
+               if x != v_1_1_0_0_0_0.Args[0] {
                        break
                }
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32F {
+               v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+               if v_1_1_0_0_0_1.Op != OpConst64 {
                        break
                }
-               d := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
-               return true
-       }
-       // match: (Eq32F (Const32F [d]) (Const32F [c]))
-       // cond:
-       // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst32F {
+               if v_1_1_0_0_0_1.AuxInt != 32 {
                        break
                }
-               d := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst32F {
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq64_0(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq64 x x)
-       // cond:
-       // result: (ConstBool [1])
-       for {
-               x := v.Args[1]
-               if x != v.Args[0] {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
                        break
                }
-               v.reset(OpConstBool)
-               v.AuxInt = 1
-               return true
-       }
-       // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
-       // cond:
-       // result: (Eq64 (Const64 <t> [c-d]) x)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               t := v_0.Type
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpAdd64 {
+               if x != mul_1.Args[0] {
                        break
                }
-               x := v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if v_1_0.Type != t {
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               d := v_1_0.AuxInt
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpConst64, t)
-               v0.AuxInt = c - d
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
-       // cond:
-       // result: (Eq64 (Const64 <t> [c-d]) x)
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64 {
-                       break
-               }
-               t := v_0.Type
-               c := v_0.AuxInt
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpAdd64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
-               x := v_1.Args[0]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
+                       break
+               }
+               c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpTrunc64to32 {
                        break
                }
-               if v_1_1.Type != t {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               d := v_1_1.AuxInt
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpConst64, t)
-               v0.AuxInt = c - d
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
-       // cond:
-       // result: (Eq64 (Const64 <t> [c-d]) x)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpAdd64 {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAvg64u {
                        break
                }
-               x := v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               _ = v_1_1_0_0.Args[1]
+               v_1_1_0_0_0 := v_1_1_0_0.Args[0]
+               if v_1_1_0_0_0.Op != OpLsh64x64 {
                        break
                }
-               t := v_0_0.Type
-               d := v_0_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64 {
+               _ = v_1_1_0_0_0.Args[1]
+               v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
+               if v_1_1_0_0_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               if v_1.Type != t {
+               if x != v_1_1_0_0_0_0.Args[0] {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpConst64, t)
-               v0.AuxInt = c - d
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
-       // cond:
-       // result: (Eq64 (Const64 <t> [c-d]) x)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpAdd64 {
+               v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
+               if v_1_1_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_0.Args[1]
-               x := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_1_1_0_0_0_1.AuxInt != 32 {
                        break
                }
-               t := v_0_1.Type
-               d := v_0_1.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64 {
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
-               if v_1.Type != t {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt32to64 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpConst64, t)
-               v0.AuxInt = c - d
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq64 (Const64 [c]) (Const64 [d]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64 {
+               if x != mul_0.Args[0] {
                        break
                }
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
                        break
                }
-               d := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
-               return true
-       }
-       // match: (Eq64 (Const64 [d]) (Const64 [c]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64 {
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               d := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64 {
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul64u {
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+               if v_1_0_0_0_0.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_1_0_0_0_0.Args[1]
+               v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+               if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
+                       break
+               }
+               if x != v_1_0_0_0_0_0.Args[0] {
+                       break
+               }
+               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+               if v_1_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_0_0_1.AuxInt != 32 {
+                       break
+               }
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -16271,238 +15910,189 @@ func rewriteValuegeneric_OpEq64_0(v *Value) bool {
                        break
                }
                m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if x != mul_1.Args[0] {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
                v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
                v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
                v.AddArg(v4)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       return false
+}
+func rewriteValuegeneric_OpEq32_50(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 x (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpTrunc64to32 {
                        break
                }
-               c := v_1_0.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul64u {
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAvg64u {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               _ = v_1_0_0_0.Args[1]
+               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+               if v_1_0_0_0_0.Op != OpLsh64x64 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               _ = v_1_0_0_0_0.Args[1]
+               v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
+               if v_1_0_0_0_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               m := mul_1.AuxInt
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
-                       break
-               }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if x != v_1_0_0_0_0_0.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
-               return true
-       }
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
-       for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+               if v_1_0_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               if v_1_0_0_0_0_1.AuxInt != 32 {
                        break
                }
-               _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul64u {
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpZeroExt32to64 {
                        break
                }
-               m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               if x != mul_0.Args[0] {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
                        break
                }
-               s := v_1_0_1.AuxInt
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
                v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
                v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
                v.AddArg(v4)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpEq64_10(v *Value) bool {
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
-               _ = v.Args[1]
-               x := v.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
-                       break
-               }
-               _ = v_1.Args[1]
-               v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
                        break
                }
-               _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul64u {
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               m := mul_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAvg64u {
                        break
                }
-               s := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               _ = v_0_1_0_0.Args[1]
+               v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+               if v_0_1_0_0_0.Op != OpLsh64x64 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_0_1_0_0_0.Args[1]
+               v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+               if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
-               return true
-       }
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
-       for {
-               x := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if x != v_0_1_0_0_0_0.Args[0] {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+               if v_0_1_0_0_0_1.Op != OpConst64 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1_0_0_0_1.AuxInt != 32 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul64u {
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -16511,109 +16101,173 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                        break
                }
                m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               if x != mul_1.Args[0] {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
                v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
                v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
                v.AddArg(v4)
                return true
        }
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               if v_0_0.Op != OpConst32 {
                        break
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64Ux64 {
                        break
                }
-               _ = mul.Args[1]
-               if x != mul.Args[0] {
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAvg64u {
                        break
                }
-               mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               _ = v_0_1_0_0.Args[1]
+               v_0_1_0_0_0 := v_0_1_0_0.Args[0]
+               if v_0_1_0_0_0.Op != OpLsh64x64 {
                        break
                }
-               m := mul_1.AuxInt
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               _ = v_0_1_0_0_0.Args[1]
+               v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
+               if v_0_1_0_0_0_0.Op != OpZeroExt32to64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if x != v_0_1_0_0_0_0.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
+               if v_0_1_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_0_0_0_1.AuxInt != 32 {
+                       break
+               }
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpMul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt32to64 {
+                       break
+               }
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (Const64 [m]) (ZeroExt32to64 x))) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+               if v_0_0_0_0_0.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_0_0_0_0_0.Args[1]
+               v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+               if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+                       break
+               }
+               if x != v_0_0_0_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+               if v_0_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.AuxInt != 32 {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -16622,60 +16276,96 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                        break
                }
                m := mul_0.AuxInt
-               if x != mul.Args[1] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt32to64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if x != mul_1.Args[0] {
                        break
                }
-               s := v_0_0_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpConst32 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
                v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
                v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
                v.AddArg(v4)
                return true
        }
-       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Trunc64to32 (Rsh64Ux64 (Avg64u (Lsh64x64 (ZeroExt32to64 x) (Const64 [32])) mul:(Mul64 (ZeroExt32to64 x) (Const64 [m]))) (Const64 [s]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32,c).m) && s == 32+umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpTrunc64to32 {
                        break
                }
-               _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+               if v_0_0_0_0_0.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_0_0_0_0_0.Args[1]
+               v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
+               if v_0_0_0_0_0_0.Op != OpZeroExt32to64 {
+                       break
+               }
+               if x != v_0_0_0_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+               if v_0_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.AuxInt != 32 {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
-               if x != mul.Args[0] {
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt32to64 {
+                       break
+               }
+               if x != mul_0.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
@@ -16683,59 +16373,64 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               s := v_0_0_1.AuxInt
+               s := v_0_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpConst32 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(32, c).m) && s == 32+umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = int64(int32(udivisible(32, c).m))
                v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(32 - udivisible(32, c).k)
                v0.AddArg(v3)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(udivisible(32, c).max))
                v.AddArg(v4)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               if v_1_1.Op != OpSub32 {
                        break
                }
                _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul64u {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -16745,134 +16440,176 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
+               if mul_1.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
                        break
                }
-               v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt32to64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if x != v_1_1_1_0.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               if v_1_1.Op != OpSub32 {
                        break
                }
                _ = v_1_1.Args[1]
-               mul := v_1_1.Args[0]
-               if mul.Op != OpHmul64u {
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               if mul_0.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_0.Args[1]
                if x != mul_0.Args[0] {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
-                       break
-               }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst64 {
                        break
                }
                m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
                v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if v_1_1_1.Op != OpRsh64x64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt32to64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               if x != v_1_1_1_0.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               if v_1_0.Op != OpSub32 {
                        break
                }
                _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -16882,143 +16619,185 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
+               if mul_1.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt32to64 {
+                       break
+               }
+               if x != v_1_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
                        break
                }
-               s := v_1_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               if v_1_0.Op != OpSub32 {
                        break
                }
                _ = v_1_0.Args[1]
-               mul := v_1_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               if mul_0.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_0.Args[1]
                if x != mul_0.Args[0] {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
-                       break
-               }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst64 {
                        break
                }
                m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
                v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt32to64 {
+                       break
+               }
+               if x != v_1_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
                        break
                }
-               s := v_1_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               if v_0_0.Op != OpConst32 {
                        break
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1.Op != OpSub32 {
                        break
                }
                _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -17028,88 +16807,101 @@ func rewriteValuegeneric_OpEq64_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
+               if mul_1.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
                        break
                }
-               v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt32to64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if x != v_0_1_1_0.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
        return false
 }
-func rewriteValuegeneric_OpEq64_20(v *Value) bool {
+func rewriteValuegeneric_OpEq32_60(v *Value) bool {
        b := v.Block
        typ := &b.Func.Config.Types
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               if v_0_0.Op != OpConst32 {
                        break
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1.Op != OpSub32 {
                        break
                }
                _ = v_0_1.Args[1]
-               mul := v_0_1.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
                        break
                }
-               _ = mul.Args[1]
-               mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
-                       break
-               }
-               _ = mul_0.Args[1]
-               if x != mul_0.Args[0] {
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpSignExt32to64 {
                        break
                }
-               if mul_0_1.AuxInt != 1 {
+               if x != mul_0.Args[0] {
                        break
                }
                mul_1 := mul.Args[1]
@@ -17117,48 +16909,77 @@ func rewriteValuegeneric_OpEq64_20(v *Value) bool {
                        break
                }
                m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
                v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               if v_0_1_1.Op != OpRsh64x64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt32to64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               if x != v_0_1_1_0.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (Const64 [m]) (SignExt32to64 x)) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpSub32 {
                        break
                }
                _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
@@ -17168,218 +16989,269 @@ func rewriteValuegeneric_OpEq64_20(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpRsh64Ux64 {
+               if mul_1.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_1.Args[1]
                if x != mul_1.Args[0] {
                        break
                }
-               mul_1_1 := mul_1.Args[1]
-               if mul_1_1.Op != OpConst64 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               if mul_1_1.AuxInt != 1 {
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt32to64 {
+                       break
+               }
+               if x != v_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
                        break
                }
-               s := v_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpConst32 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Sub32 (Rsh64x64 mul:(Mul64 (SignExt32to64 x) (Const64 [m])) (Const64 [s])) (Rsh64x64 (SignExt32to64 x) (Const64 [63]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32,c).m) && s == 32+smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpSub32 {
                        break
                }
                _ = v_0_0.Args[1]
-               mul := v_0_0.Args[0]
-               if mul.Op != OpHmul64u {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul64 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpRsh64Ux64 {
+               if mul_0.Op != OpSignExt32to64 {
                        break
                }
-               _ = mul_0.Args[1]
                if x != mul_0.Args[0] {
                        break
                }
-               mul_0_1 := mul_0.Args[1]
-               if mul_0_1.Op != OpConst64 {
-                       break
-               }
-               if mul_0_1.AuxInt != 1 {
-                       break
-               }
                mul_1 := mul.Args[1]
                if mul_1.Op != OpConst64 {
                        break
                }
                m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
                v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt32to64 {
+                       break
+               }
+               if x != v_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
                        break
                }
-               s := v_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpConst32 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(32, c).m) && s == 32+smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               if v_1_1.Op != OpSub32 {
                        break
                }
                _ = v_1_1.Args[1]
                v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpAvg64u {
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_1_0.Args[1]
-               if x != v_1_1_0.Args[0] {
-                       break
-               }
-               mul := v_1_1_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
                if x != mul.Args[1] {
                        break
                }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
                v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpRsh64Ux64 {
+               if v_1_1.Op != OpSub32 {
                        break
                }
                _ = v_1_1.Args[1]
                v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpAvg64u {
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_1_0.Args[1]
-               if x != v_1_1_0.Args[0] {
-                       break
-               }
-               mul := v_1_1_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
@@ -17387,128 +17259,162 @@ func rewriteValuegeneric_OpEq64_20(v *Value) bool {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
                v_1_1_1 := v_1_1.Args[1]
-               if v_1_1_1.Op != OpConst64 {
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               s := v_1_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
-               v1.AddArg(v2)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
-               v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               if v_1_0.Op != OpSub32 {
                        break
                }
                _ = v_1_0.Args[1]
                v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAvg64u {
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_0_0.Args[1]
-               if x != v_1_0_0.Args[0] {
-                       break
-               }
-               mul := v_1_0_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
                if x != mul.Args[1] {
                        break
                }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
                v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               s := v_1_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpMul64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64Ux64 {
+               if v_1_0.Op != OpSub32 {
                        break
                }
                _ = v_1_0.Args[1]
                v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAvg64u {
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_0_0.Args[1]
-               if x != v_1_0_0.Args[0] {
-                       break
-               }
-               mul := v_1_0_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
@@ -17516,136 +17422,170 @@ func rewriteValuegeneric_OpEq64_20(v *Value) bool {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
                v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               s := v_1_0_1.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               if v_0_0.Op != OpConst32 {
                        break
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1.Op != OpSub32 {
                        break
                }
                _ = v_0_1.Args[1]
                v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpAvg64u {
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_1_0.Args[1]
-               if x != v_0_1_0.Args[0] {
-                       break
-               }
-               mul := v_0_1_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
                if x != mul.Args[1] {
                        break
                }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
                v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               if v_0_1_1.Op != OpRsh32x64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst64 {
+               if v_0_0.Op != OpConst32 {
                        break
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpRsh64Ux64 {
+               if v_0_1.Op != OpSub32 {
                        break
                }
                _ = v_0_1.Args[1]
                v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpAvg64u {
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_1_0.Args[1]
-               if x != v_0_1_0.Args[0] {
-                       break
-               }
-               mul := v_0_1_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
@@ -17653,131 +17593,165 @@ func rewriteValuegeneric_OpEq64_20(v *Value) bool {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
                v_0_1_1 := v_0_1.Args[1]
-               if v_0_1_1.Op != OpConst64 {
+               if v_0_1_1.Op != OpRsh32x64 {
                        break
                }
-               s := v_0_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 (Const32 [m]) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpSub32 {
                        break
                }
                _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAvg64u {
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_0_0.Args[1]
-               if x != v_0_0_0.Args[0] {
-                       break
-               }
-               mul := v_0_0_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpConst64 {
+               if mul_0.Op != OpConst32 {
                        break
                }
                m := mul_0.AuxInt
                if x != mul.Args[1] {
                        break
                }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
                v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if v_0_0_1.Op != OpRsh32x64 {
                        break
                }
-               s := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
                        break
                }
-               c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
        return false
 }
-func rewriteValuegeneric_OpEq64_30(v *Value) bool {
+func rewriteValuegeneric_OpEq32_70(v *Value) bool {
        b := v.Block
        typ := &b.Func.Config.Types
-       // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
-       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 mul:(Hmul32 x (Const32 [m])) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m/2)) && s == smagic(32,c).s-1 && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMul64 {
+               if v_0.Op != OpMul32 {
                        break
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64Ux64 {
+               if v_0_0.Op != OpSub32 {
                        break
                }
                _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAvg64u {
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_0_0.Args[1]
-               if x != v_0_0_0.Args[0] {
-                       break
-               }
-               mul := v_0_0_0.Args[1]
-               if mul.Op != OpHmul64u {
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
                _ = mul.Args[1]
@@ -17785,776 +17759,7264 @@ func rewriteValuegeneric_OpEq64_30(v *Value) bool {
                        break
                }
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpConst64 {
+               if mul_1.Op != OpConst32 {
                        break
                }
                m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
                v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
                        break
                }
-               s := v_0_0_1.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               if v_0_1.Op != OpConst32 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m/2)) && s == smagic(32, c).s-1 && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpLeq64U)
-               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
-               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
-               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v2.AuxInt = int64(udivisible(64, c).m)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v3.AuxInt = int64(64 - udivisible(64, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
-               v4.AuxInt = int64(udivisible(64, c).max)
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 63 && kbar == 64 - k
-       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               n := v.Args[0]
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpLsh64x64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64x64 {
-                       break
-               }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd64 {
-                       break
-               }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               if n != v_1_0_0.Args[0] {
-                       break
-               }
-               v_1_0_0_1 := v_1_0_0.Args[1]
-               if v_1_0_0_1.Op != OpRsh64Ux64 {
-                       break
-               }
-               if v_1_0_0_1.Type != t {
-                       break
-               }
-               _ = v_1_0_0_1.Args[1]
-               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
-               if v_1_0_0_1_0.Op != OpRsh64x64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               if v_1_0_0_1_0.Type != t {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub32 {
                        break
                }
-               _ = v_1_0_0_1_0.Args[1]
-               if n != v_1_0_0_1_0.Args[0] {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
-               if v_1_0_0_1_0_1.Op != OpConst64 {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd32 {
                        break
                }
-               if v_1_0_0_1_0_1.Type != typ.UInt64 {
+               _ = v_1_1_0_0.Args[1]
+               mul := v_1_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
-               if v_1_0_0_1_0_1.AuxInt != 63 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
-               if v_1_0_0_1_1.Op != OpConst64 {
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
                        break
                }
-               if v_1_0_0_1_1.Type != typ.UInt64 {
+               if x != v_1_1_0_0.Args[1] {
                        break
                }
-               kbar := v_1_0_0_1_1.AuxInt
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               k := v_1_0_1.AuxInt
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               if v_1_1.Type != typ.UInt64 {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               if v_1_1.AuxInt != k {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               if !(k > 0 && k < 63 && kbar == 64-k) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpAnd64, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst64, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst64, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
-       // cond: k > 0 && k < 63 && kbar == 64 - k
-       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               n := v.Args[0]
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpLsh64x64 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpRsh64x64 {
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               _ = v_1_0.Args[1]
-               v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpAdd64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub32 {
                        break
                }
-               t := v_1_0_0.Type
-               _ = v_1_0_0.Args[1]
-               v_1_0_0_0 := v_1_0_0.Args[0]
-               if v_1_0_0_0.Op != OpRsh64Ux64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               if v_1_0_0_0.Type != t {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd32 {
                        break
                }
-               _ = v_1_0_0_0.Args[1]
-               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
-               if v_1_0_0_0_0.Op != OpRsh64x64 {
+               _ = v_1_1_0_0.Args[1]
+               mul := v_1_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
-               if v_1_0_0_0_0.Type != t {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
-               _ = v_1_0_0_0_0.Args[1]
-               if n != v_1_0_0_0_0.Args[0] {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
-               if v_1_0_0_0_0_1.Op != OpConst64 {
+               m := mul_1.AuxInt
+               if x != v_1_1_0_0.Args[1] {
                        break
                }
-               if v_1_0_0_0_0_1.Type != typ.UInt64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if v_1_0_0_0_0_1.AuxInt != 63 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
-               if v_1_0_0_0_1.Op != OpConst64 {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               if v_1_0_0_0_1.Type != typ.UInt64 {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               kbar := v_1_0_0_0_1.AuxInt
-               if n != v_1_0_0.Args[1] {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               v_1_0_1 := v_1_0.Args[1]
-               if v_1_0_1.Op != OpConst64 {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               if v_1_0_1.Type != typ.UInt64 {
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               k := v_1_0_1.AuxInt
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
+                       break
+               }
+               c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst64 {
+               if v_1_1.Op != OpSub32 {
                        break
                }
-               if v_1_1.Type != typ.UInt64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               if v_1_1.AuxInt != k {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd32 {
                        break
                }
-               if !(k > 0 && k < 63 && kbar == 64-k) {
+               _ = v_1_1_0_0.Args[1]
+               if x != v_1_1_0_0.Args[0] {
                        break
                }
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpAnd64, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst64, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
-               v0.AddArg(v1)
-               v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst64, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
-               return true
-       }
-       // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 63 && kbar == 64 - k
-       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
-       for {
-               n := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpLsh64x64 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64x64 {
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpHmul32 {
                        break
                }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd64 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               t := v_0_0_0.Type
-               _ = v_0_0_0.Args[1]
-               if n != v_0_0_0.Args[0] {
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
                        break
                }
-               v_0_0_0_1 := v_0_0_0.Args[1]
-               if v_0_0_0_1.Op != OpRsh64Ux64 {
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               if v_0_0_0_1.Type != t {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               _ = v_0_0_0_1.Args[1]
-               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
-               if v_0_0_0_1_0.Op != OpRsh64x64 {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               if v_0_0_0_1_0.Type != t {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               _ = v_0_0_0_1_0.Args[1]
-               if n != v_0_0_0_1_0.Args[0] {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
-               if v_0_0_0_1_0_1.Op != OpConst64 {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               if v_0_0_0_1_0_1.Type != typ.UInt64 {
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 x (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               if v_0_0_0_1_0_1.AuxInt != 63 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst32 {
                        break
                }
-               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
-               if v_0_0_0_1_1.Op != OpConst64 {
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub32 {
                        break
                }
-               if v_0_0_0_1_1.Type != typ.UInt64 {
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
-               kbar := v_0_0_0_1_1.AuxInt
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd32 {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               _ = v_1_1_0_0.Args[1]
+               if x != v_1_1_0_0.Args[0] {
                        break
                }
-               k := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpHmul32 {
                        break
                }
-               if v_0_1.Type != typ.UInt64 {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
-               if v_0_1.AuxInt != k {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               if !(k > 0 && k < 63 && kbar == 64-k) {
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
                        break
                }
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpAnd64, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst64, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
-               v0.AddArg(v1)
-               v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst64, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
-               return true
-       }
-       // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
-       // cond: k > 0 && k < 63 && kbar == 64 - k
-       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
-       for {
-               n := v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpLsh64x64 {
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpRsh64x64 {
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
                        break
                }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpAdd64 {
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
                        break
                }
-               t := v_0_0_0.Type
-               _ = v_0_0_0.Args[1]
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpRsh64Ux64 {
+               if v_1_1_1_1.AuxInt != 31 {
                        break
                }
-               if v_0_0_0_0.Type != t {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               _ = v_0_0_0_0.Args[1]
-               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
-               if v_0_0_0_0_0.Op != OpRsh64x64 {
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               if v_0_0_0_0_0.Type != t {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub32 {
                        break
                }
-               _ = v_0_0_0_0_0.Args[1]
-               if n != v_0_0_0_0_0.Args[0] {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
-               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
-               if v_0_0_0_0_0_1.Op != OpConst64 {
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd32 {
                        break
                }
-               if v_0_0_0_0_0_1.Type != typ.UInt64 {
+               _ = v_1_0_0_0.Args[1]
+               mul := v_1_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
-               if v_0_0_0_0_0_1.AuxInt != 63 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
-               if v_0_0_0_0_1.Op != OpConst64 {
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
                        break
                }
-               if v_0_0_0_0_1.Type != typ.UInt64 {
+               if x != v_1_0_0_0.Args[1] {
                        break
                }
-               kbar := v_0_0_0_0_1.AuxInt
-               if n != v_0_0_0.Args[1] {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpConst64 {
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
                        break
                }
-               if v_0_0_1.Type != typ.UInt64 {
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
                        break
                }
-               k := v_0_0_1.AuxInt
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst64 {
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
                        break
                }
-               if v_0_1.Type != typ.UInt64 {
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               if v_0_1.AuxInt != k {
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               if !(k > 0 && k < 63 && kbar == 64-k) {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               v.reset(OpEq64)
-               v0 := b.NewValue0(v.Pos, OpAnd64, t)
-               v0.AddArg(n)
-               v1 := b.NewValue0(v.Pos, OpConst64, t)
-               v1.AuxInt = int64(1<<uint(k) - 1)
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v2 := b.NewValue0(v.Pos, OpConst64, t)
-               v2.AuxInt = 0
-               v.AddArg(v2)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
-       // cond: s.Uses == 1
-       // result: (Eq64 x y)
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               s := v.Args[0]
-               if s.Op != OpSub64 {
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul32 {
                        break
                }
-               y := s.Args[1]
-               x := s.Args[0]
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64 {
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub32 {
                        break
                }
-               if v_1.AuxInt != 0 {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
-               if !(s.Uses == 1) {
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd32 {
                        break
                }
-               v.reset(OpEq64)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
-       // cond: s.Uses == 1
-       // result: (Eq64 x y)
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64 {
+               _ = v_1_0_0_0.Args[1]
+               mul := v_1_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
                        break
                }
-               if v_0.AuxInt != 0 {
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
                        break
                }
-               s := v.Args[1]
-               if s.Op != OpSub64 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               y := s.Args[1]
-               x := s.Args[0]
-               if !(s.Uses == 1) {
+               m := mul_1.AuxInt
+               if x != v_1_0_0_0.Args[1] {
                        break
                }
-               v.reset(OpEq64)
-               v.AddArg(x)
-               v.AddArg(y)
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
-       // match: (Eq64F (Const64F [c]) (Const64F [d]))
-       // cond:
-       // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64F {
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
                        break
                }
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64F {
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
                        break
                }
-               d := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
-               return true
-       }
-       // match: (Eq64F (Const64F [d]) (Const64F [c]))
-       // cond:
-       // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst64F {
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
                        break
                }
-               d := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst64F {
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
-               return true
-       }
-       return false
-}
-func rewriteValuegeneric_OpEq8_0(v *Value) bool {
-       b := v.Block
-       config := b.Func.Config
-       typ := &b.Func.Config.Types
-       // match: (Eq8 x x)
-       // cond:
-       // result: (ConstBool [1])
-       for {
-               x := v.Args[1]
-               if x != v.Args[0] {
+               if v_1_0_1_1.AuxInt != 31 {
                        break
                }
-               v.reset(OpConstBool)
-               v.AuxInt = 1
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
-       // cond:
-       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst8 {
-                       break
-               }
-               t := v_0.Type
-               c := v_0.AuxInt
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpAdd8 {
+               if v_1.Op != OpMul32 {
                        break
                }
-               x := v_1.Args[1]
+               = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpConst8 {
+               if v_1_0.Op != OpSub32 {
                        break
                }
-               if v_1_0.Type != t {
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
-               d := v_1_0.AuxInt
-               v.reset(OpEq8)
-               v0 := b.NewValue0(v.Pos, OpConst8, t)
-               v0.AuxInt = int64(int8(c - d))
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               if x != v_1_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst32 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v.AddArg(x)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
-       // cond:
-       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       // match: (Eq32 x (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
                _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst8 {
-                       break
-               }
-               t := v_0.Type
-               c := v_0.AuxInt
+               x := v.Args[0]
                v_1 := v.Args[1]
-               if v_1.Op != OpAdd8 {
+               if v_1.Op != OpMul32 {
                        break
                }
                _ = v_1.Args[1]
-               x := v_1.Args[0]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub32 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               if x != v_1_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
+                       break
+               }
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst8 {
+               if v_1_1.Op != OpConst32 {
                        break
                }
-               if v_1_1.Type != t {
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
                        break
                }
-               d := v_1_1.AuxInt
-               v.reset(OpEq8)
-               v0 := b.NewValue0(v.Pos, OpConst8, t)
-               v0.AuxInt = int64(int8(c - d))
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v.AddArg(x)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
-       // cond:
-       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
        for {
-               = v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpAdd8 {
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               mul := v_0_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_0_1_0_0.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq32_80(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               mul := v_0_1_0_0.Args[0]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_0_1_0_0.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               if x != v_0_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Const32 [c]) (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst32 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               if x != v_0_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 (Const32 [m]) x) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               mul := v_0_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_0_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 mul:(Hmul32 x (Const32 [m])) x) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               mul := v_0_0_0_0.Args[0]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_0_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 (Const32 [m]) x)) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               if x != v_0_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 (Mul32 (Sub32 (Rsh32x64 (Add32 x mul:(Hmul32 x (Const32 [m]))) (Const64 [s])) (Rsh32x64 x (Const64 [31]))) (Const32 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32,c).m)) && s == smagic(32,c).s && x.Op != OpConst32 && sdivisibleOK(32,c)
+       // result: (Leq32U (RotateLeft32 <typ.UInt32> (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).a))]) ) (Const32 <typ.UInt32> [int64(32-sdivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(sdivisible(32,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub32 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               if x != v_0_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpHmul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst32 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(smagic(32, c).m)) && s == smagic(32, c).s && x.Op != OpConst32 && sdivisibleOK(32, c)) {
+                       break
+               }
+               v.reset(OpLeq32U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpAdd32, typ.UInt32)
+               v2 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = int64(int32(sdivisible(32, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v4.AuxInt = int64(int32(sdivisible(32, c).a))
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v5.AuxInt = int64(32 - sdivisible(32, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v6.AuxInt = int64(int32(sdivisible(32, c).max))
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 31 && kbar == 32 - k
+       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       for {
+               _ = v.Args[1]
+               n := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpLsh32x64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               if n != v_1_0_0.Args[0] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpRsh32Ux64 {
+                       break
+               }
+               if v_1_0_0_1.Type != t {
+                       break
+               }
+               _ = v_1_0_0_1.Args[1]
+               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+               if v_1_0_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               if v_1_0_0_1_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_1_0.Args[1]
+               if n != v_1_0_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+               if v_1_0_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_0_0_1_0_1.AuxInt != 31 {
+                       break
+               }
+               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+               if v_1_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_1_0_0_1_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 31 && kbar == 32-k) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpAnd32, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst32, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst32, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq32 n (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 31 && kbar == 32 - k
+       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       for {
+               _ = v.Args[1]
+               n := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpLsh32x64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd32 {
+                       break
+               }
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               if v_1_0_0_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+               if v_1_0_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               if v_1_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_0_0.Args[1]
+               if n != v_1_0_0_0_0.Args[0] {
+                       break
+               }
+               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+               if v_1_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_0_0_0_0_1.AuxInt != 31 {
+                       break
+               }
+               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+               if v_1_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_1_0_0_0_1.AuxInt
+               if n != v_1_0_0.Args[1] {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 31 && kbar == 32-k) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpAnd32, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst32, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst32, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 31 && kbar == 32 - k
+       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       for {
+               n := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpLsh32x64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               if n != v_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpRsh32Ux64 {
+                       break
+               }
+               if v_0_0_0_1.Type != t {
+                       break
+               }
+               _ = v_0_0_0_1.Args[1]
+               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+               if v_0_0_0_1_0.Op != OpRsh32x64 {
+                       break
+               }
+               if v_0_0_0_1_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_1_0.Args[1]
+               if n != v_0_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+               if v_0_0_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_0_0_1_0_1.AuxInt != 31 {
+                       break
+               }
+               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+               if v_0_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_0_0_0_1_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 31 && kbar == 32-k) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpAnd32, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst32, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst32, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq32_90(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq32 (Lsh32x64 (Rsh32x64 (Add32 <t> (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 31 && kbar == 32 - k
+       // result: (Eq32 (And32 <t> n (Const32 <t> [int64(1<<uint(k)-1)])) (Const32 <t> [0]))
+       for {
+               n := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpLsh32x64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd32 {
+                       break
+               }
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               if v_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+               if v_0_0_0_0_0.Op != OpRsh32x64 {
+                       break
+               }
+               if v_0_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_0_0.Args[1]
+               if n != v_0_0_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+               if v_0_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.AuxInt != 31 {
+                       break
+               }
+               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+               if v_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_0_0_0_0_1.AuxInt
+               if n != v_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 31 && kbar == 32-k) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpAnd32, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst32, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst32, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq32 s:(Sub32 x y) (Const32 [0]))
+       // cond: s.Uses == 1
+       // result: (Eq32 x y)
+       for {
+               _ = v.Args[1]
+               s := v.Args[0]
+               if s.Op != OpSub32 {
+                       break
+               }
+               y := s.Args[1]
+               x := s.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32 {
+                       break
+               }
+               if v_1.AuxInt != 0 {
+                       break
+               }
+               if !(s.Uses == 1) {
+                       break
+               }
+               v.reset(OpEq32)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+       // match: (Eq32 (Const32 [0]) s:(Sub32 x y))
+       // cond: s.Uses == 1
+       // result: (Eq32 x y)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32 {
+                       break
+               }
+               if v_0.AuxInt != 0 {
+                       break
+               }
+               s := v.Args[1]
+               if s.Op != OpSub32 {
+                       break
+               }
+               y := s.Args[1]
+               x := s.Args[0]
+               if !(s.Uses == 1) {
+                       break
+               }
+               v.reset(OpEq32)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq32F_0(v *Value) bool {
+       // match: (Eq32F (Const32F [c]) (Const32F [d]))
+       // cond:
+       // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32F {
+                       break
+               }
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32F {
+                       break
+               }
+               d := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+               return true
+       }
+       // match: (Eq32F (Const32F [d]) (Const32F [c]))
+       // cond:
+       // result: (ConstBool [b2i(auxTo32F(c) == auxTo32F(d))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst32F {
+                       break
+               }
+               d := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst32F {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(auxTo32F(c) == auxTo32F(d))
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_0(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 x x)
+       // cond:
+       // result: (ConstBool [1])
+       for {
+               x := v.Args[1]
+               if x != v.Args[0] {
+                       break
+               }
+               v.reset(OpConstBool)
+               v.AuxInt = 1
+               return true
+       }
+       // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
+       // cond:
+       // result: (Eq64 (Const64 <t> [c-d]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64 {
+                       break
+               }
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd64 {
+                       break
+               }
+               x := v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               if v_1_0.Type != t {
+                       break
+               }
+               d := v_1_0.AuxInt
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpConst64, t)
+               v0.AuxInt = c - d
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
+       // cond:
+       // result: (Eq64 (Const64 <t> [c-d]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64 {
+                       break
+               }
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != t {
+                       break
+               }
+               d := v_1_1.AuxInt
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpConst64, t)
+               v0.AuxInt = c - d
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
+       // cond:
+       // result: (Eq64 (Const64 <t> [c-d]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd64 {
+                       break
+               }
+               x := v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               t := v_0_0.Type
+               d := v_0_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64 {
+                       break
+               }
+               if v_1.Type != t {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpConst64, t)
+               v0.AuxInt = c - d
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
+       // cond:
+       // result: (Eq64 (Const64 <t> [c-d]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               t := v_0_1.Type
+               d := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64 {
+                       break
+               }
+               if v_1.Type != t {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpConst64, t)
+               v0.AuxInt = c - d
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq64 (Const64 [c]) (Const64 [d]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64 {
+                       break
+               }
+               d := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
+               return true
+       }
+       // match: (Eq64 (Const64 [d]) (Const64 [c]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64 {
+                       break
+               }
+               d := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_10(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) x) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u x (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64,c).m/2) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+umagic(64, c).m/2) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
+                       break
+               }
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               mul := v_1_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
+                       break
+               }
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               mul := v_1_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
+                       break
+               }
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_20(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               mul := v_0_1.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Const64 [m]) (Rsh64Ux64 x (Const64 [1]))) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_1.Args[1]
+               if x != mul_1.Args[0] {
+                       break
+               }
+               mul_1_1 := mul_1.Args[1]
+               if mul_1_1.Op != OpConst64 {
+                       break
+               }
+               if mul_1_1.AuxInt != 1 {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Rsh64Ux64 mul:(Hmul64u (Rsh64Ux64 x (Const64 [1])) (Const64 [m])) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64,c).m+1)/2) && s == umagic(64,c).s-2 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               mul := v_0_0.Args[0]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = mul_0.Args[1]
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_0_1 := mul_0.Args[1]
+               if mul_0_1.Op != OpConst64 {
+                       break
+               }
+               if mul_0_1.AuxInt != 1 {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<63+(umagic(64, c).m+1)/2) && s == umagic(64, c).s-2 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               if x != v_1_1_0.Args[0] {
+                       break
+               }
+               mul := v_1_1_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               if x != v_1_1_0.Args[0] {
+                       break
+               }
+               mul := v_1_1_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               if x != v_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               if x != v_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               if x != v_0_1_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s]))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               if x != v_0_1_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u (Const64 [m]) x)) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               if x != v_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_30(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 (Mul64 (Rsh64Ux64 (Avg64u x mul:(Hmul64u x (Const64 [m]))) (Const64 [s])) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64,c).m) && s == umagic(64,c).s-1 && x.Op != OpConst64 && udivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(udivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(64-udivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(udivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAvg64u {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               if x != v_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0.Args[1]
+               if mul.Op != OpHmul64u {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(64, c).m) && s == umagic(64, c).s-1 && x.Op != OpConst64 && udivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v2.AuxInt = int64(udivisible(64, c).m)
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(64 - udivisible(64, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(udivisible(64, c).max)
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 (Const64 [m]) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 mul:(Hmul64 x (Const64 [m])) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m/2) && s == smagic(64,c).s-1 && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m/2) && s == smagic(64, c).s-1 && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_1_0_0.Args[1]
+               mul := v_1_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_1_1_0_0.Args[1] {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_40(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_1_0_0.Args[1]
+               mul := v_1_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_1_1_0_0.Args[1] {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_1_0_0.Args[1]
+               if x != v_1_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst64 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_1.Args[1]
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               v_1_1_0_0 := v_1_1_0.Args[0]
+               if v_1_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_1_0_0.Args[1]
+               if x != v_1_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_1_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               if x != v_1_1_1.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               mul := v_1_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_1_0_0_0.Args[1] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               mul := v_1_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_1_0_0_0.Args[1] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               if x != v_1_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 x (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               if x != v_1_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_1_0_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               if x != v_1_0_1.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               mul := v_0_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_0_1_0_0.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               mul := v_0_1_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_0_1_0_0.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               if x != v_0_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_50(v *Value) bool {
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (Eq64 (Mul64 (Const64 [c]) (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst64 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_1.Args[1]
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               v_0_1_0_0 := v_0_1_0.Args[0]
+               if v_0_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_1_0_0.Args[1]
+               if x != v_0_1_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_1_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               if x != v_0_1_1.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 63 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 (Const64 [m]) x) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               mul := v_0_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               if x != v_0_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 mul:(Hmul64 x (Const64 [m])) x) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               mul := v_0_0_0_0.Args[0]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               if x != v_0_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 (Const64 [m]) x)) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               if x != v_0_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst64 {
+                       break
+               }
+               m := mul_0.AuxInt
+               if x != mul.Args[1] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 (Mul64 (Sub64 (Rsh64x64 (Add64 x mul:(Hmul64 x (Const64 [m]))) (Const64 [s])) (Rsh64x64 x (Const64 [63]))) (Const64 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64,c).m) && s == smagic(64,c).s && x.Op != OpConst64 && sdivisibleOK(64,c)
+       // result: (Leq64U (RotateLeft64 <typ.UInt64> (Add64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(sdivisible(64,c).m)]) x) (Const64 <typ.UInt64> [int64(sdivisible(64,c).a)]) ) (Const64 <typ.UInt64> [int64(64-sdivisible(64,c).k)]) ) (Const64 <typ.UInt64> [int64(sdivisible(64,c).max)]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpSub64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               if x != v_0_0_0_0.Args[0] {
+                       break
+               }
+               mul := v_0_0_0_0.Args[1]
+               if mul.Op != OpHmul64 {
+                       break
+               }
+               _ = mul.Args[1]
+               if x != mul.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst64 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               if x != v_0_0_1.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 63 {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(64, c).m) && s == smagic(64, c).s && x.Op != OpConst64 && sdivisibleOK(64, c)) {
+                       break
+               }
+               v.reset(OpLeq64U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft64, typ.UInt64)
+               v1 := b.NewValue0(v.Pos, OpAdd64, typ.UInt64)
+               v2 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
+               v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v3.AuxInt = int64(sdivisible(64, c).m)
+               v2.AddArg(v3)
+               v2.AddArg(x)
+               v1.AddArg(v2)
+               v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v4.AuxInt = int64(sdivisible(64, c).a)
+               v1.AddArg(v4)
+               v0.AddArg(v1)
+               v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v5.AuxInt = int64(64 - sdivisible(64, c).k)
+               v0.AddArg(v5)
+               v.AddArg(v0)
+               v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+               v6.AuxInt = int64(sdivisible(64, c).max)
+               v.AddArg(v6)
+               return true
+       }
+       // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 63 && kbar == 64 - k
+       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       for {
+               _ = v.Args[1]
+               n := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               if n != v_1_0_0.Args[0] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               if v_1_0_0_1.Type != t {
+                       break
+               }
+               _ = v_1_0_0_1.Args[1]
+               v_1_0_0_1_0 := v_1_0_0_1.Args[0]
+               if v_1_0_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               if v_1_0_0_1_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_1_0.Args[1]
+               if n != v_1_0_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
+               if v_1_0_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_0_0_1_0_1.AuxInt != 63 {
+                       break
+               }
+               v_1_0_0_1_1 := v_1_0_0_1.Args[1]
+               if v_1_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_1_0_0_1_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 63 && kbar == 64-k) {
+                       break
+               }
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpAnd64, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst64, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst64, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq64 n (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
+       // cond: k > 0 && k < 63 && kbar == 64 - k
+       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       for {
+               _ = v.Args[1]
+               n := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_1_0.Args[1]
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpAdd64 {
+                       break
+               }
+               t := v_1_0_0.Type
+               _ = v_1_0_0.Args[1]
+               v_1_0_0_0 := v_1_0_0.Args[0]
+               if v_1_0_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               if v_1_0_0_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_0.Args[1]
+               v_1_0_0_0_0 := v_1_0_0_0.Args[0]
+               if v_1_0_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               if v_1_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_1_0_0_0_0.Args[1]
+               if n != v_1_0_0_0_0.Args[0] {
+                       break
+               }
+               v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
+               if v_1_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_0_0_0_0_1.AuxInt != 63 {
+                       break
+               }
+               v_1_0_0_0_1 := v_1_0_0_0.Args[1]
+               if v_1_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_1_0_0_0_1.AuxInt
+               if n != v_1_0_0.Args[1] {
+                       break
+               }
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_1_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_1_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 63 && kbar == 64-k) {
+                       break
+               }
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpAnd64, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst64, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst64, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 63 && kbar == 64 - k
+       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       for {
+               n := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               if n != v_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpRsh64Ux64 {
+                       break
+               }
+               if v_0_0_0_1.Type != t {
+                       break
+               }
+               _ = v_0_0_0_1.Args[1]
+               v_0_0_0_1_0 := v_0_0_0_1.Args[0]
+               if v_0_0_0_1_0.Op != OpRsh64x64 {
+                       break
+               }
+               if v_0_0_0_1_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_1_0.Args[1]
+               if n != v_0_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
+               if v_0_0_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_1_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_0_0_1_0_1.AuxInt != 63 {
+                       break
+               }
+               v_0_0_0_1_1 := v_0_0_0_1.Args[1]
+               if v_0_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_1_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_0_0_0_1_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 63 && kbar == 64-k) {
+                       break
+               }
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpAnd64, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst64, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst64, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq64 (Lsh64x64 (Rsh64x64 (Add64 <t> (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
+       // cond: k > 0 && k < 63 && kbar == 64 - k
+       // result: (Eq64 (And64 <t> n (Const64 <t> [int64(1<<uint(k)-1)])) (Const64 <t> [0]))
+       for {
+               n := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpLsh64x64 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpAdd64 {
+                       break
+               }
+               t := v_0_0_0.Type
+               _ = v_0_0_0.Args[1]
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpRsh64Ux64 {
+                       break
+               }
+               if v_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_0.Args[1]
+               v_0_0_0_0_0 := v_0_0_0_0.Args[0]
+               if v_0_0_0_0_0.Op != OpRsh64x64 {
+                       break
+               }
+               if v_0_0_0_0_0.Type != t {
+                       break
+               }
+               _ = v_0_0_0_0_0.Args[1]
+               if n != v_0_0_0_0_0.Args[0] {
+                       break
+               }
+               v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
+               if v_0_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_0_0_0_0_1.AuxInt != 63 {
+                       break
+               }
+               v_0_0_0_0_1 := v_0_0_0_0.Args[1]
+               if v_0_0_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               kbar := v_0_0_0_0_1.AuxInt
+               if n != v_0_0_0.Args[1] {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1.Type != typ.UInt64 {
+                       break
+               }
+               k := v_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1.Type != typ.UInt64 {
+                       break
+               }
+               if v_0_1.AuxInt != k {
+                       break
+               }
+               if !(k > 0 && k < 63 && kbar == 64-k) {
+                       break
+               }
+               v.reset(OpEq64)
+               v0 := b.NewValue0(v.Pos, OpAnd64, t)
+               v0.AddArg(n)
+               v1 := b.NewValue0(v.Pos, OpConst64, t)
+               v1.AuxInt = int64(1<<uint(k) - 1)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpConst64, t)
+               v2.AuxInt = 0
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Eq64 s:(Sub64 x y) (Const64 [0]))
+       // cond: s.Uses == 1
+       // result: (Eq64 x y)
+       for {
+               _ = v.Args[1]
+               s := v.Args[0]
+               if s.Op != OpSub64 {
+                       break
+               }
+               y := s.Args[1]
+               x := s.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64 {
+                       break
+               }
+               if v_1.AuxInt != 0 {
+                       break
+               }
+               if !(s.Uses == 1) {
+                       break
+               }
+               v.reset(OpEq64)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64_60(v *Value) bool {
+       // match: (Eq64 (Const64 [0]) s:(Sub64 x y))
+       // cond: s.Uses == 1
+       // result: (Eq64 x y)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64 {
+                       break
+               }
+               if v_0.AuxInt != 0 {
+                       break
+               }
+               s := v.Args[1]
+               if s.Op != OpSub64 {
+                       break
+               }
+               y := s.Args[1]
+               x := s.Args[0]
+               if !(s.Uses == 1) {
+                       break
+               }
+               v.reset(OpEq64)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq64F_0(v *Value) bool {
+       // match: (Eq64F (Const64F [c]) (Const64F [d]))
+       // cond:
+       // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64F {
+                       break
+               }
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64F {
+                       break
+               }
+               d := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+               return true
+       }
+       // match: (Eq64F (Const64F [d]) (Const64F [c]))
+       // cond:
+       // result: (ConstBool [b2i(auxTo64F(c) == auxTo64F(d))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst64F {
+                       break
+               }
+               d := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst64F {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(auxTo64F(c) == auxTo64F(d))
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq8_0(v *Value) bool {
+       b := v.Block
+       config := b.Func.Config
+       typ := &b.Func.Config.Types
+       // match: (Eq8 x x)
+       // cond:
+       // result: (ConstBool [1])
+       for {
+               x := v.Args[1]
+               if x != v.Args[0] {
+                       break
+               }
+               v.reset(OpConstBool)
+               v.AuxInt = 1
+               return true
+       }
+       // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
+       // cond:
+       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd8 {
+                       break
+               }
+               x := v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst8 {
+                       break
+               }
+               if v_1_0.Type != t {
+                       break
+               }
+               d := v_1_0.AuxInt
+               v.reset(OpEq8)
+               v0 := b.NewValue0(v.Pos, OpConst8, t)
+               v0.AuxInt = int64(int8(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
+       // cond:
+       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               t := v_0.Type
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpAdd8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst8 {
+                       break
+               }
+               if v_1_1.Type != t {
+                       break
+               }
+               d := v_1_1.AuxInt
+               v.reset(OpEq8)
+               v0 := b.NewValue0(v.Pos, OpConst8, t)
+               v0.AuxInt = int64(int8(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
+       // cond:
+       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd8 {
+                       break
+               }
+               x := v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst8 {
+                       break
+               }
+               t := v_0_0.Type
+               d := v_0_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               if v_1.Type != t {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpEq8)
+               v0 := b.NewValue0(v.Pos, OpConst8, t)
+               v0.AuxInt = int64(int8(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
+       // cond:
+       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpAdd8 {
+                       break
+               }
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst8 {
+                       break
+               }
+               t := v_0_1.Type
+               d := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               if v_1.Type != t {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpEq8)
+               v0 := b.NewValue0(v.Pos, OpConst8, t)
+               v0.AuxInt = int64(int8(c - d))
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+       // match: (Eq8 (Const8 [c]) (Const8 [d]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               c := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               d := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
+               return true
+       }
+       // match: (Eq8 (Const8 [d]) (Const8 [c]))
+       // cond:
+       // result: (ConstBool [b2i(c == d)])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               d := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               c := v_1.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = b2i(c == d)
+               return true
+       }
+       // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
+       // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMod8u {
+                       break
+               }
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst8 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               if v_1.AuxInt != 0 {
+                       break
+               }
+               if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = c & 0xff
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
+       // match: (Eq8 (Const8 [0]) (Mod8u x (Const8 [c])))
+       // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               if v_0.AuxInt != 0 {
+                       break
+               }
+               v_1 := v.Args[1]
+               if v_1.Op != OpMod8u {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst8 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v2.AuxInt = c & 0xff
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
+       // match: (Eq8 (Mod8 x (Const8 [c])) (Const8 [0]))
+       // cond: x.Op != OpConst8 && sdivisibleOK(8,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMod8 {
+                       break
+               }
+               _ = v_0.Args[1]
+               x := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst8 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpConst8 {
+                       break
+               }
+               if v_1.AuxInt != 0 {
+                       break
+               }
+               if !(x.Op != OpConst8 && sdivisibleOK(8, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v2.AuxInt = c
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+       b := v.Block
+       config := b.Func.Config
+       typ := &b.Func.Config.Types
+       // match: (Eq8 (Const8 [0]) (Mod8 x (Const8 [c])))
+       // cond: x.Op != OpConst8 && sdivisibleOK(8,c) && !hasSmallRotate(config)
+       // result: (Eq32 (Mod32 <typ.Int32> (SignExt8to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpConst8 {
+                       break
+               }
+               if v_0.AuxInt != 0 {
+                       break
+               }
+               v_1 := v.Args[1]
+               if v_1.Op != OpMod8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               x := v_1.Args[0]
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst8 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(x.Op != OpConst8 && sdivisibleOK(8, c) && !hasSmallRotate(config)) {
+                       break
+               }
+               v.reset(OpEq32)
+               v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v2.AuxInt = c
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
+               v3.AuxInt = 0
+               v.AddArg(v3)
+               return true
+       }
+       // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst8 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc32to8 {
+                       break
+               }
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt8to32 {
+                       break
+               }
+               if x != mul_1.Args[0] {
+                       break
+               }
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpConst8 {
+                       break
+               }
+               c := v_1_0.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpTrunc32to8 {
+                       break
+               }
+               v_1_1_0 := v_1_1.Args[0]
+               if v_1_1_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_1_1_0.Args[1]
+               mul := v_1_1_0.Args[0]
+               if mul.Op != OpMul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt8to32 {
+                       break
+               }
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_1_0_1 := v_1_1_0.Args[1]
+               if v_1_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc32to8 {
+                       break
+               }
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt8to32 {
+                       break
+               }
+               if x != mul_1.Args[0] {
+                       break
+               }
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst8 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpMul8 {
+                       break
+               }
+               _ = v_1.Args[1]
+               v_1_0 := v_1.Args[0]
+               if v_1_0.Op != OpTrunc32to8 {
+                       break
+               }
+               v_1_0_0 := v_1_0.Args[0]
+               if v_1_0_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_1_0_0.Args[1]
+               mul := v_1_0_0.Args[0]
+               if mul.Op != OpMul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt8to32 {
+                       break
+               }
+               if x != mul_0.Args[0] {
+                       break
+               }
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
+                       break
+               }
+               m := mul_1.AuxInt
+               v_1_0_0_1 := v_1_0_0.Args[1]
+               if v_1_0_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_1_0_0_1.AuxInt
+               v_1_1 := v_1.Args[1]
+               if v_1_1.Op != OpConst8 {
+                       break
+               }
+               c := v_1_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul8 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst8 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc32to8 {
+                       break
+               }
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul32 {
+                       break
+               }
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
+                       break
+               }
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt8to32 {
+                       break
+               }
+               if x != mul_1.Args[0] {
+                       break
+               }
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
+               return true
+       }
+       // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       for {
+               x := v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpMul8 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpConst8 {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpTrunc32to8 {
+                       break
+               }
+               v_0_1_0 := v_0_1.Args[0]
+               if v_0_1_0.Op != OpRsh32Ux64 {
+                       break
+               }
+               _ = v_0_1_0.Args[1]
+               mul := v_0_1_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               x := v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpConst8 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt8to32 {
                        break
                }
-               t := v_0_0.Type
-               d := v_0_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst8 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if v_1.Type != t {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq8)
-               v0 := b.NewValue0(v.Pos, OpConst8, t)
-               v0.AuxInt = int64(int8(c - d))
+               m := mul_1.AuxInt
+               v_0_1_0_1 := v_0_1_0.Args[1]
+               if v_0_1_0_1.Op != OpConst64 {
+                       break
+               }
+               s := v_0_1_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
-       // cond:
-       // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x)
+       // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
        for {
-               = v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpAdd8 {
+               if v_0.Op != OpMul8 {
                        break
                }
                _ = v_0.Args[1]
-               x := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst8 {
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpTrunc32to8 {
                        break
                }
-               t := v_0_1.Type
-               d := v_0_1.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst8 {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               if v_1.Type != t {
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpEq8)
-               v0 := b.NewValue0(v.Pos, OpConst8, t)
-               v0.AuxInt = int64(int8(c - d))
-               v.AddArg(v0)
-               v.AddArg(x)
-               return true
-       }
-       // match: (Eq8 (Const8 [c]) (Const8 [d]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst8 {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpConst32 {
                        break
                }
-               c := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst8 {
+               m := mul_0.AuxInt
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpZeroExt8to32 {
                        break
                }
-               d := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
-               return true
-       }
-       // match: (Eq8 (Const8 [d]) (Const8 [c]))
-       // cond:
-       // result: (ConstBool [b2i(c == d)])
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst8 {
+               if x != mul_1.Args[0] {
                        break
                }
-               d := v_0.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst8 {
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               c := v_1.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = b2i(c == d)
+               s := v_0_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst8 {
+                       break
+               }
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+                       break
+               }
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
+               v.AddArg(v0)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq8 (Mod8u x (Const8 [c])) (Const8 [0]))
-       // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
-       // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
+       // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
        for {
-               = v.Args[1]
+               x := v.Args[1]
                v_0 := v.Args[0]
-               if v_0.Op != OpMod8u {
+               if v_0.Op != OpMul8 {
                        break
                }
                _ = v_0.Args[1]
-               x := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpConst8 {
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpTrunc32to8 {
                        break
                }
-               c := v_0_1.AuxInt
-               v_1 := v.Args[1]
-               if v_1.Op != OpConst8 {
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpRsh32Ux64 {
                        break
                }
-               if v_1.AuxInt != 0 {
+               _ = v_0_0_0.Args[1]
+               mul := v_0_0_0.Args[0]
+               if mul.Op != OpMul32 {
                        break
                }
-               if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+               _ = mul.Args[1]
+               mul_0 := mul.Args[0]
+               if mul_0.Op != OpZeroExt8to32 {
                        break
                }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
-               v1.AddArg(x)
-               v0.AddArg(v1)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = c & 0xff
-               v0.AddArg(v2)
-               v.AddArg(v0)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = 0
-               v.AddArg(v3)
-               return true
-       }
-       // match: (Eq8 (Const8 [0]) (Mod8u x (Const8 [c])))
-       // cond: x.Op != OpConst8 && udivisibleOK(8,c) && !hasSmallRotate(config)
-       // result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt8to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xff])) (Const32 <typ.UInt32> [0]))
-       for {
-               _ = v.Args[1]
-               v_0 := v.Args[0]
-               if v_0.Op != OpConst8 {
+               if x != mul_0.Args[0] {
                        break
                }
-               if v_0.AuxInt != 0 {
+               mul_1 := mul.Args[1]
+               if mul_1.Op != OpConst32 {
                        break
                }
-               v_1 := v.Args[1]
-               if v_1.Op != OpMod8u {
+               m := mul_1.AuxInt
+               v_0_0_0_1 := v_0_0_0.Args[1]
+               if v_0_0_0_1.Op != OpConst64 {
                        break
                }
-               _ = v_1.Args[1]
-               x := v_1.Args[0]
-               v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpConst8 {
+               s := v_0_0_0_1.AuxInt
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpConst8 {
                        break
                }
-               c := v_1_1.AuxInt
-               if !(x.Op != OpConst8 && udivisibleOK(8, c) && !hasSmallRotate(config)) {
+               c := v_0_1.AuxInt
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
                        break
                }
-               v.reset(OpEq32)
-               v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
-               v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
+               v.reset(OpLeq8U)
+               v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
+               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1.AddArg(v2)
                v1.AddArg(x)
                v0.AddArg(v1)
-               v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v2.AuxInt = c & 0xff
-               v0.AddArg(v2)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(8 - udivisible(8, c).k)
+               v0.AddArg(v3)
                v.AddArg(v0)
-               v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
-               v3.AuxInt = 0
-               v.AddArg(v3)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(udivisible(8, c).max))
+               v.AddArg(v4)
                return true
        }
-       // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
@@ -18569,11 +25031,12 @@ func rewriteValuegeneric_OpEq8_0(v *Value) bool {
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc32to8 {
+               if v_1_1.Op != OpSub8 {
                        break
                }
+               _ = v_1_1.Args[1]
                v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh32Ux64 {
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_1_0.Args[1]
@@ -18588,7 +25051,7 @@ func rewriteValuegeneric_OpEq8_0(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt8to32 {
+               if mul_1.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_1.Args[0] {
@@ -18599,34 +25062,58 @@ func rewriteValuegeneric_OpEq8_0(v *Value) bool {
                        break
                }
                s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_1_1_1_0.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
        return false
 }
-func rewriteValuegeneric_OpEq8_10(v *Value) bool {
+func rewriteValuegeneric_OpEq8_20(v *Value) bool {
        b := v.Block
        typ := &b.Func.Config.Types
-       // match: (Eq8 x (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 x (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
@@ -18641,11 +25128,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                c := v_1_0.AuxInt
                v_1_1 := v_1.Args[1]
-               if v_1_1.Op != OpTrunc32to8 {
+               if v_1_1.Op != OpSub8 {
                        break
                }
+               _ = v_1_1.Args[1]
                v_1_1_0 := v_1_1.Args[0]
-               if v_1_1_0.Op != OpRsh32Ux64 {
+               if v_1_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_1_0.Args[1]
@@ -18655,7 +25143,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt8to32 {
+               if mul_0.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_0.Args[0] {
@@ -18671,29 +25159,53 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_1_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               v_1_1_1 := v_1_1.Args[1]
+               if v_1_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_1_1.Args[1]
+               v_1_1_1_0 := v_1_1_1.Args[0]
+               if v_1_1_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_1_1_1_0.Args[0] {
+                       break
+               }
+               v_1_1_1_1 := v_1_1_1.Args[1]
+               if v_1_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 x (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
@@ -18703,11 +25215,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc32to8 {
+               if v_1_0.Op != OpSub8 {
                        break
                }
+               _ = v_1_0.Args[1]
                v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh32Ux64 {
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_0_0.Args[1]
@@ -18722,7 +25235,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt8to32 {
+               if mul_1.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_1.Args[0] {
@@ -18733,34 +25246,58 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_1_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
+                       break
+               }
                v_1_1 := v_1.Args[1]
                if v_1_1.Op != OpConst8 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 x (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])))
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 x (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])))
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                _ = v.Args[1]
                x := v.Args[0]
@@ -18770,11 +25307,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = v_1.Args[1]
                v_1_0 := v_1.Args[0]
-               if v_1_0.Op != OpTrunc32to8 {
+               if v_1_0.Op != OpSub8 {
                        break
                }
+               _ = v_1_0.Args[1]
                v_1_0_0 := v_1_0.Args[0]
-               if v_1_0_0.Op != OpRsh32Ux64 {
+               if v_1_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_1_0_0.Args[1]
@@ -18784,7 +25322,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt8to32 {
+               if mul_0.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_0.Args[0] {
@@ -18800,34 +25338,58 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_1_0_0_1.AuxInt
+               v_1_0_1 := v_1_0.Args[1]
+               if v_1_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_1_0_1.Args[1]
+               v_1_0_1_0 := v_1_0_1.Args[0]
+               if v_1_0_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_1_0_1_0.Args[0] {
+                       break
+               }
+               v_1_0_1_1 := v_1_0_1.Args[1]
+               if v_1_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_1_0_1_1.AuxInt != 31 {
+                       break
+               }
                v_1_1 := v_1.Args[1]
                if v_1_1.Op != OpConst8 {
                        break
                }
                c := v_1_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
@@ -18841,11 +25403,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc32to8 {
+               if v_0_1.Op != OpSub8 {
                        break
                }
+               _ = v_0_1.Args[1]
                v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh32Ux64 {
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_1_0.Args[1]
@@ -18860,7 +25423,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt8to32 {
+               if mul_1.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_1.Args[0] {
@@ -18871,29 +25434,53 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_0_1_1_0.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Mul8 (Const8 [c]) (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s])))) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 (Mul8 (Const8 [c]) (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31])))) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
@@ -18907,11 +25494,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                c := v_0_0.AuxInt
                v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpTrunc32to8 {
+               if v_0_1.Op != OpSub8 {
                        break
                }
+               _ = v_0_1.Args[1]
                v_0_1_0 := v_0_1.Args[0]
-               if v_0_1_0.Op != OpRsh32Ux64 {
+               if v_0_1_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_1_0.Args[1]
@@ -18921,7 +25509,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt8to32 {
+               if mul_0.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_0.Args[0] {
@@ -18937,29 +25525,53 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_0_1_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               v_0_1_1 := v_0_1.Args[1]
+               if v_0_1_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_1_1.Args[1]
+               v_0_1_1_0 := v_0_1_1.Args[0]
+               if v_0_1_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_0_1_1_0.Args[0] {
+                       break
+               }
+               v_0_1_1_1 := v_0_1_1.Args[1]
+               if v_0_1_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_1_1_1.AuxInt != 31 {
+                       break
+               }
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt8to32 x)) (Const64 [s]))) (Const8 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt8to32 x)) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
@@ -18968,11 +25580,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc32to8 {
+               if v_0_0.Op != OpSub8 {
                        break
                }
+               _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh32Ux64 {
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_0_0.Args[1]
@@ -18987,7 +25600,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                m := mul_0.AuxInt
                mul_1 := mul.Args[1]
-               if mul_1.Op != OpZeroExt8to32 {
+               if mul_1.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_1.Args[0] {
@@ -18998,34 +25611,58 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
                v_0_1 := v_0.Args[1]
                if v_0_1.Op != OpConst8 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
-       // match: (Eq8 (Mul8 (Trunc32to8 (Rsh32Ux64 mul:(Mul32 (ZeroExt8to32 x) (Const32 [m])) (Const64 [s]))) (Const8 [c])) x)
-       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8,c).m) && s == 8+umagic(8,c).s && x.Op != OpConst8 && udivisibleOK(8,c)
-       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(8-udivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(udivisible(8,c).max))]) )
+       // match: (Eq8 (Mul8 (Sub8 (Rsh32x64 mul:(Mul32 (SignExt8to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt8to32 x) (Const64 [31]))) (Const8 [c])) x)
+       // cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8,c).m) && s == 8+smagic(8,c).s && x.Op != OpConst8 && sdivisibleOK(8,c)
+       // result: (Leq8U (RotateLeft8 <typ.UInt8> (Add8 <typ.UInt8> (Mul8 <typ.UInt8> (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).m))]) x) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).a))]) ) (Const8 <typ.UInt8> [int64(8-sdivisible(8,c).k)]) ) (Const8 <typ.UInt8> [int64(int8(sdivisible(8,c).max))]) )
        for {
                x := v.Args[1]
                v_0 := v.Args[0]
@@ -19034,11 +25671,12 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = v_0.Args[1]
                v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpTrunc32to8 {
+               if v_0_0.Op != OpSub8 {
                        break
                }
+               _ = v_0_0.Args[1]
                v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpRsh32Ux64 {
+               if v_0_0_0.Op != OpRsh32x64 {
                        break
                }
                _ = v_0_0_0.Args[1]
@@ -19048,7 +25686,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                }
                _ = mul.Args[1]
                mul_0 := mul.Args[0]
-               if mul_0.Op != OpZeroExt8to32 {
+               if mul_0.Op != OpSignExt8to32 {
                        break
                }
                if x != mul_0.Args[0] {
@@ -19064,29 +25702,53 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
                        break
                }
                s := v_0_0_0_1.AuxInt
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpRsh32x64 {
+                       break
+               }
+               _ = v_0_0_1.Args[1]
+               v_0_0_1_0 := v_0_0_1.Args[0]
+               if v_0_0_1_0.Op != OpSignExt8to32 {
+                       break
+               }
+               if x != v_0_0_1_0.Args[0] {
+                       break
+               }
+               v_0_0_1_1 := v_0_0_1.Args[1]
+               if v_0_0_1_1.Op != OpConst64 {
+                       break
+               }
+               if v_0_0_1_1.AuxInt != 31 {
+                       break
+               }
                v_0_1 := v_0.Args[1]
                if v_0_1.Op != OpConst8 {
                        break
                }
                c := v_0_1.AuxInt
-               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<8+umagic(8, c).m) && s == 8+umagic(8, c).s && x.Op != OpConst8 && udivisibleOK(8, c)) {
+               if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(8, c).m) && s == 8+smagic(8, c).s && x.Op != OpConst8 && sdivisibleOK(8, c)) {
                        break
                }
                v.reset(OpLeq8U)
                v0 := b.NewValue0(v.Pos, OpRotateLeft8, typ.UInt8)
-               v1 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
-               v2 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v2.AuxInt = int64(int8(udivisible(8, c).m))
+               v1 := b.NewValue0(v.Pos, OpAdd8, typ.UInt8)
+               v2 := b.NewValue0(v.Pos, OpMul8, typ.UInt8)
+               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v3.AuxInt = int64(int8(sdivisible(8, c).m))
+               v2.AddArg(v3)
+               v2.AddArg(x)
                v1.AddArg(v2)
-               v1.AddArg(x)
+               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v4.AuxInt = int64(int8(sdivisible(8, c).a))
+               v1.AddArg(v4)
                v0.AddArg(v1)
-               v3 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v3.AuxInt = int64(8 - udivisible(8, c).k)
-               v0.AddArg(v3)
+               v5 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v5.AuxInt = int64(8 - sdivisible(8, c).k)
+               v0.AddArg(v5)
                v.AddArg(v0)
-               v4 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
-               v4.AuxInt = int64(int8(udivisible(8, c).max))
-               v.AddArg(v4)
+               v6 := b.NewValue0(v.Pos, OpConst8, typ.UInt8)
+               v6.AuxInt = int64(int8(sdivisible(8, c).max))
+               v.AddArg(v6)
                return true
        }
        // match: (Eq8 n (Lsh8x64 (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
@@ -19375,7 +26037,7 @@ func rewriteValuegeneric_OpEq8_10(v *Value) bool {
        }
        return false
 }
-func rewriteValuegeneric_OpEq8_20(v *Value) bool {
+func rewriteValuegeneric_OpEq8_30(v *Value) bool {
        b := v.Block
        typ := &b.Func.Config.Types
        // match: (Eq8 (Lsh8x64 (Rsh8x64 (Add8 <t> (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
@@ -25308,7 +31970,7 @@ func rewriteValuegeneric_OpMod8u_0(v *Value) bool {
                return true
        }
        // match: (Mod8u <t> x (Const8 [c]))
-       // cond: x.Op != OpConst8 && c > 0 && umagicOK(8,c)
+       // cond: x.Op != OpConst8 && c > 0 && umagicOK(8, c)
        // result: (Sub8 x (Mul8 <t> (Div8u <t> x (Const8 <t> [c])) (Const8 <t> [c])))
        for {
                t := v.Type
index 16158b2f4bc870ae1acf6fe725f8aae9c88abaaa..dcbc6d3f97655ff84c431491c6694ed08abb5408 100644 (file)
@@ -216,14 +216,14 @@ func ConstMods(n1 uint, n2 int) (uint, int) {
 }
 
 // Check that divisibility checks x%c==0 are converted to MULs and rotates
-func Divisible(n uint) (even, odd bool) {
+func Divisible(n1 uint, n2 int) (bool, bool, bool, bool) {
        // amd64:"MOVQ\t[$]-6148914691236517205","IMULQ","ROLQ\t[$]63",-"DIVQ"
        // 386:"IMUL3L\t[$]-1431655765","ROLL\t[$]31",-"DIVQ"
        // arm64:"MOVD\t[$]-6148914691236517205","MUL","ROR",-"DIV"
        // arm:"MUL","CMP\t[$]715827882",-".*udiv"
        // ppc64:"MULLD","ROTL\t[$]63"
        // ppc64le:"MULLD","ROTL\t[$]63"
-       even = n%6 == 0
+       evenU := n1%6 == 0
 
        // amd64:"MOVQ\t[$]-8737931403336103397","IMULQ",-"ROLQ",-"DIVQ"
        // 386:"IMUL3L\t[$]678152731",-"ROLL",-"DIVQ"
@@ -231,8 +231,25 @@ func Divisible(n uint) (even, odd bool) {
        // arm:"MUL","CMP\t[$]226050910",-".*udiv"
        // ppc64:"MULLD",-"ROTL"
        // ppc64le:"MULLD",-"ROTL"
-       odd = n%19 == 0
-       return
+       oddU := n1%19 == 0
+
+       // amd64:"IMULQ","ADD","ROLQ\t[$]63",-"DIVQ"
+       // 386:"IMUL3L\t[$]-1431655765","ADDL\t[$]715827882","ROLL\t[$]31",-"DIVQ"
+       // arm64:"MUL","ADD\t[$]3074457345618258602","ROR",-"DIV"
+       // arm:"MUL","ADD\t[$]715827882",-".*udiv"
+       // ppc64:"MULLD","ADD","ROTL\t[$]63"
+       // ppc64le:"MULLD","ADD","ROTL\t[$]63"
+       evenS := n2%6 == 0
+
+       // amd64:"IMULQ","ADD",-"ROLQ",-"DIVQ"
+       // 386:"IMUL3L\t[$]678152731","ADDL\t[$]113025455",-"ROLL",-"DIVQ"
+       // arm64:"MUL","ADD\t[$]485440633518672410",-"ROR",-"DIV"
+       // arm:"MUL","ADD\t[$]113025455",-".*udiv"
+       // ppc64:"MULLD","ADD",-"ROTL"
+       // ppc64le:"MULLD","ADD",-"ROTL"
+       oddS := n2%19 == 0
+
+       return evenU, oddU, evenS, oddS
 }
 
 // Check that fix-up code is not generated for divisions where it has been proven that