]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/compile: optimize integer-in-range checks
authorMichael Munday <mike.munday@ibm.com>
Mon, 20 May 2019 18:55:56 +0000 (11:55 -0700)
committerMichael Munday <mike.munday@ibm.com>
Tue, 3 Mar 2020 14:30:26 +0000 (14:30 +0000)
This CL incorporates code from CL 201206 by Josh Bleecher Snyder
(thanks Josh).

This CL restores the integer-in-range optimizations in the SSA
backend. The fuse pass is enhanced to detect inequalities that
could be merged and fuse their associated blocks while the generic
rules optimize them into a single unsigned comparison.

For example, the inequality `x >= 0 && x < 10` will now be optimized
to `unsigned(x) < 10`.

Overall has a fairly positive impact on binary sizes.

name                      old time/op       new time/op       delta
Template                        192ms ± 1%        192ms ± 1%    ~     (p=0.757 n=17+18)
Unicode                        76.6ms ± 2%       76.5ms ± 2%    ~     (p=0.603 n=19+19)
GoTypes                         694ms ± 1%        693ms ± 1%    ~     (p=0.569 n=19+20)
Compiler                        3.26s ± 0%        3.27s ± 0%  +0.25%  (p=0.000 n=20+20)
SSA                             7.41s ± 0%        7.49s ± 0%  +1.10%  (p=0.000 n=17+19)
Flate                           120ms ± 1%        120ms ± 1%  +0.38%  (p=0.003 n=19+19)
GoParser                        152ms ± 1%        152ms ± 1%    ~     (p=0.061 n=17+19)
Reflect                         422ms ± 1%        425ms ± 2%  +0.76%  (p=0.001 n=18+20)
Tar                             167ms ± 1%        167ms ± 0%    ~     (p=0.730 n=18+19)
XML                             233ms ± 4%        231ms ± 1%    ~     (p=0.752 n=20+17)
LinkCompiler                    927ms ± 8%        928ms ± 8%    ~     (p=0.857 n=19+20)
ExternalLinkCompiler            1.81s ± 2%        1.81s ± 2%    ~     (p=0.513 n=19+20)
LinkWithoutDebugCompiler        556ms ±10%        583ms ±13%  +4.95%  (p=0.007 n=20+20)
[Geo mean]                      478ms             481ms       +0.52%

name                      old user-time/op  new user-time/op  delta
Template                        270ms ± 5%        269ms ± 7%    ~     (p=0.925 n=20+20)
Unicode                         134ms ± 7%        131ms ±14%    ~     (p=0.593 n=18+20)
GoTypes                         981ms ± 3%        987ms ± 2%  +0.63%  (p=0.049 n=19+18)
Compiler                        4.50s ± 2%        4.50s ± 1%    ~     (p=0.588 n=19+20)
SSA                             10.6s ± 2%        10.6s ± 1%    ~     (p=0.141 n=20+19)
Flate                           164ms ± 8%        165ms ±10%    ~     (p=0.738 n=20+20)
GoParser                        202ms ± 5%        203ms ± 6%    ~     (p=0.820 n=20+20)
Reflect                         587ms ± 6%        597ms ± 3%    ~     (p=0.087 n=20+18)
Tar                             230ms ± 6%        228ms ± 8%    ~     (p=0.569 n=19+20)
XML                             311ms ± 6%        314ms ± 5%    ~     (p=0.369 n=20+20)
LinkCompiler                    878ms ± 8%        887ms ± 7%    ~     (p=0.289 n=20+20)
ExternalLinkCompiler            1.60s ± 7%        1.60s ± 7%    ~     (p=0.820 n=20+20)
LinkWithoutDebugCompiler        498ms ±12%        489ms ±11%    ~     (p=0.398 n=20+20)
[Geo mean]                      611ms             611ms       +0.05%

name                      old alloc/op      new alloc/op      delta
Template                       36.1MB ± 0%       36.0MB ± 0%  -0.32%  (p=0.000 n=20+20)
Unicode                        28.3MB ± 0%       28.3MB ± 0%  -0.03%  (p=0.000 n=19+20)
GoTypes                         121MB ± 0%        121MB ± 0%    ~     (p=0.226 n=16+20)
Compiler                        563MB ± 0%        563MB ± 0%    ~     (p=0.166 n=20+19)
SSA                            1.32GB ± 0%       1.33GB ± 0%  +0.88%  (p=0.000 n=20+19)
Flate                          22.7MB ± 0%       22.7MB ± 0%  -0.02%  (p=0.033 n=19+20)
GoParser                       27.9MB ± 0%       27.9MB ± 0%  -0.02%  (p=0.001 n=20+20)
Reflect                        78.3MB ± 0%       78.2MB ± 0%  -0.01%  (p=0.019 n=20+20)
Tar                            34.0MB ± 0%       34.0MB ± 0%  -0.04%  (p=0.000 n=20+20)
XML                            43.9MB ± 0%       43.9MB ± 0%  -0.07%  (p=0.000 n=20+19)
LinkCompiler                    205MB ± 0%        205MB ± 0%  +0.44%  (p=0.000 n=20+18)
ExternalLinkCompiler            223MB ± 0%        223MB ± 0%  +0.03%  (p=0.000 n=20+20)
LinkWithoutDebugCompiler        139MB ± 0%        142MB ± 0%  +1.75%  (p=0.000 n=20+20)
[Geo mean]                     93.7MB            93.9MB       +0.20%

name                      old allocs/op     new allocs/op     delta
Template                         363k ± 0%         361k ± 0%  -0.58%  (p=0.000 n=20+19)
Unicode                          329k ± 0%         329k ± 0%  -0.06%  (p=0.000 n=19+20)
GoTypes                         1.28M ± 0%        1.28M ± 0%  -0.01%  (p=0.000 n=20+20)
Compiler                        5.40M ± 0%        5.40M ± 0%  -0.01%  (p=0.000 n=20+20)
SSA                             12.7M ± 0%        12.8M ± 0%  +0.80%  (p=0.000 n=20+20)
Flate                            228k ± 0%         228k ± 0%    ~     (p=0.194 n=20+20)
GoParser                         295k ± 0%         295k ± 0%  -0.04%  (p=0.000 n=20+20)
Reflect                          949k ± 0%         949k ± 0%  -0.01%  (p=0.000 n=20+20)
Tar                              337k ± 0%         337k ± 0%  -0.06%  (p=0.000 n=20+20)
XML                              418k ± 0%         417k ± 0%  -0.17%  (p=0.000 n=20+20)
LinkCompiler                     553k ± 0%         554k ± 0%  +0.22%  (p=0.000 n=20+19)
ExternalLinkCompiler            1.52M ± 0%        1.52M ± 0%  +0.27%  (p=0.000 n=20+20)
LinkWithoutDebugCompiler         186k ± 0%         186k ± 0%  +0.06%  (p=0.000 n=20+20)
[Geo mean]                       723k              723k       +0.03%

name                      old text-bytes    new text-bytes    delta
HelloSize                       828kB ± 0%        828kB ± 0%  -0.01%  (p=0.000 n=20+20)

name                      old data-bytes    new data-bytes    delta
HelloSize                      13.4kB ± 0%       13.4kB ± 0%    ~     (all equal)

name                      old bss-bytes     new bss-bytes     delta
HelloSize                       180kB ± 0%        180kB ± 0%    ~     (all equal)

name                      old exe-bytes     new exe-bytes     delta
HelloSize                      1.23MB ± 0%       1.23MB ± 0%  -0.33%  (p=0.000 n=20+20)

file      before    after     Δ       %
addr2line 4320075   4311883   -8192   -0.190%
asm       5191932   5187836   -4096   -0.079%
buildid   2835338   2831242   -4096   -0.144%
compile   20531717  20569099  +37382  +0.182%
cover     5322511   5318415   -4096   -0.077%
dist      3723749   3719653   -4096   -0.110%
doc       4743515   4739419   -4096   -0.086%
fix       3413960   3409864   -4096   -0.120%
link      6690119   6686023   -4096   -0.061%
nm        4269616   4265520   -4096   -0.096%
pprof     14942189  14929901  -12288  -0.082%
trace     11807164  11790780  -16384  -0.139%
vet       8384104   8388200   +4096   +0.049%
go        15339076  15334980  -4096   -0.027%
total     132258257 132226007 -32250  -0.024%

Fixes #30645.

Change-Id: If551ac5996097f3685870d083151b5843170aab0
Reviewed-on: https://go-review.googlesource.com/c/go/+/165998
Run-TryBot: Michael Munday <mike.munday@ibm.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
src/cmd/compile/internal/ssa/branchelim.go
src/cmd/compile/internal/ssa/compile.go
src/cmd/compile/internal/ssa/fuse.go
src/cmd/compile/internal/ssa/fuse_comparisons.go [new file with mode: 0644]
src/cmd/compile/internal/ssa/fuse_test.go
src/cmd/compile/internal/ssa/gen/generic.rules
src/cmd/compile/internal/ssa/nilcheck_test.go
src/cmd/compile/internal/ssa/rewritegeneric.go
test/codegen/fuse.go [new file with mode: 0644]

index c7c3f8c15f1c7d87c36aea22370b2c737f825ead..4f9fd8e22e1a0f4e49de9c63f50ed9e3f7e091f9 100644 (file)
@@ -148,7 +148,7 @@ func elimIf(f *Func, loadAddr *sparseSet, dom *Block) bool {
        // the number of useless instructions executed.
        const maxfuseinsts = 2
 
-       if len(simple.Values) > maxfuseinsts || !allTrivial(simple) {
+       if len(simple.Values) > maxfuseinsts || !canSpeculativelyExecute(simple) {
                return false
        }
 
@@ -305,10 +305,10 @@ func elimIfElse(f *Func, loadAddr *sparseSet, b *Block) bool {
                return false
        }
        yes, no := b.Succs[0].Block(), b.Succs[1].Block()
-       if !isLeafPlain(yes) || len(yes.Values) > 1 || !allTrivial(yes) {
+       if !isLeafPlain(yes) || len(yes.Values) > 1 || !canSpeculativelyExecute(yes) {
                return false
        }
-       if !isLeafPlain(no) || len(no.Values) > 1 || !allTrivial(no) {
+       if !isLeafPlain(no) || len(no.Values) > 1 || !canSpeculativelyExecute(no) {
                return false
        }
        if b.Succs[0].Block().Succs[0].Block() != b.Succs[1].Block().Succs[0].Block() {
@@ -415,7 +415,15 @@ func shouldElimIfElse(no, yes, post *Block, arch string) bool {
        }
 }
 
-func allTrivial(b *Block) bool {
+// canSpeculativelyExecute reports whether every value in the block can
+// be evaluated without causing any observable side effects (memory
+// accesses, panics and so on) except for execution time changes. It
+// also ensures that the block does not contain any phis which we can't
+// speculatively execute.
+// Warning: this function cannot currently detect values that represent
+// instructions the execution of which need to be guarded with CPU
+// hardware feature checks. See issue #34950.
+func canSpeculativelyExecute(b *Block) bool {
        // don't fuse memory ops, Phi ops, divides (can panic),
        // or anything else with side-effects
        for _, v := range b.Values {
index 448b1cf8147f7c33758697131b2d19a1558e1c27..2de4e133bfbfbcd2ddabb58fcd05ccdf297e1c9f 100644 (file)
@@ -428,7 +428,7 @@ var passes = [...]pass{
        {name: "gcse deadcode", fn: deadcode, required: true}, // clean out after cse and phiopt
        {name: "nilcheckelim", fn: nilcheckelim},
        {name: "prove", fn: prove},
-       {name: "fuse plain", fn: fusePlain},
+       {name: "early fuse", fn: fuseEarly},
        {name: "decompose builtin", fn: decomposeBuiltIn, required: true},
        {name: "softfloat", fn: softfloat, required: true},
        {name: "late opt", fn: opt, required: true}, // TODO: split required rules and optimizing rules
@@ -436,7 +436,7 @@ var passes = [...]pass{
        {name: "generic deadcode", fn: deadcode, required: true}, // remove dead stores, which otherwise mess up store chain
        {name: "check bce", fn: checkbce},
        {name: "branchelim", fn: branchelim},
-       {name: "fuse", fn: fuseAll},
+       {name: "late fuse", fn: fuseLate},
        {name: "dse", fn: dse},
        {name: "writebarrier", fn: writebarrier, required: true}, // expand write barrier ops
        {name: "insert resched checks", fn: insertLoopReschedChecks,
@@ -491,7 +491,7 @@ var passOrder = [...]constraint{
        // allow deadcode to clean up after nilcheckelim
        {"nilcheckelim", "generic deadcode"},
        // nilcheckelim generates sequences of plain basic blocks
-       {"nilcheckelim", "fuse"},
+       {"nilcheckelim", "late fuse"},
        // nilcheckelim relies on opt to rewrite user nil checks
        {"opt", "nilcheckelim"},
        // tighten will be most effective when as many values have been removed as possible
index c2d4051da832c691847c7cb7f23769eea0dd8e03..f80ec0dc5d503c00e61defe06da26ba2d82fa3ee 100644 (file)
@@ -8,18 +8,18 @@ import (
        "cmd/internal/src"
 )
 
-// fusePlain runs fuse(f, fuseTypePlain).
-func fusePlain(f *Func) { fuse(f, fuseTypePlain) }
+// fuseEarly runs fuse(f, fuseTypePlain|fuseTypeIntInRange).
+func fuseEarly(f *Func) { fuse(f, fuseTypePlain|fuseTypeIntInRange) }
 
-// fuseAll runs fuse(f, fuseTypeAll).
-func fuseAll(f *Func) { fuse(f, fuseTypeAll) }
+// fuseLate runs fuse(f, fuseTypePlain|fuseTypeIf).
+func fuseLate(f *Func) { fuse(f, fuseTypePlain|fuseTypeIf) }
 
 type fuseType uint8
 
 const (
        fuseTypePlain fuseType = 1 << iota
        fuseTypeIf
-       fuseTypeAll = fuseTypePlain | fuseTypeIf
+       fuseTypeIntInRange
 )
 
 // fuse simplifies control flow by joining basic blocks.
@@ -32,6 +32,9 @@ func fuse(f *Func, typ fuseType) {
                        if typ&fuseTypeIf != 0 {
                                changed = fuseBlockIf(b) || changed
                        }
+                       if typ&fuseTypeIntInRange != 0 {
+                               changed = fuseIntegerComparisons(b) || changed
+                       }
                        if typ&fuseTypePlain != 0 {
                                changed = fuseBlockPlain(b) || changed
                        }
diff --git a/src/cmd/compile/internal/ssa/fuse_comparisons.go b/src/cmd/compile/internal/ssa/fuse_comparisons.go
new file mode 100644 (file)
index 0000000..d843fc3
--- /dev/null
@@ -0,0 +1,157 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package ssa
+
+// fuseIntegerComparisons optimizes inequalities such as '1 <= x && x < 5',
+// which can be optimized to 'unsigned(x-1) < 4'.
+//
+// Look for branch structure like:
+//
+//   p
+//   |\
+//   | b
+//   |/ \
+//   s0 s1
+//
+// In our example, p has control '1 <= x', b has control 'x < 5',
+// and s0 and s1 are the if and else results of the comparison.
+//
+// This will be optimized into:
+//
+//   p
+//    \
+//     b
+//    / \
+//   s0 s1
+//
+// where b has the combined control value 'unsigned(x-1) < 4'.
+// Later passes will then fuse p and b.
+func fuseIntegerComparisons(b *Block) bool {
+       if len(b.Preds) != 1 {
+               return false
+       }
+       p := b.Preds[0].Block()
+       if b.Kind != BlockIf || p.Kind != BlockIf {
+               return false
+       }
+
+       // Don't merge control values if b is likely to be bypassed anyway.
+       if p.Likely == BranchLikely && p.Succs[0].Block() != b {
+               return false
+       }
+       if p.Likely == BranchUnlikely && p.Succs[1].Block() != b {
+               return false
+       }
+
+       // Check if the control values combine to make an integer inequality that
+       // can be further optimized later.
+       bc := b.Controls[0]
+       pc := p.Controls[0]
+       if !areMergeableInequalities(bc, pc) {
+               return false
+       }
+
+       // If the first (true) successors match then we have a disjunction (||).
+       // If the second (false) successors match then we have a conjunction (&&).
+       for i, op := range [2]Op{OpOrB, OpAndB} {
+               if p.Succs[i].Block() != b.Succs[i].Block() {
+                       continue
+               }
+
+               // TODO(mundaym): should we also check the cost of executing b?
+               // Currently we might speculatively execute b even if b contains
+               // a lot of instructions. We could just check that len(b.Values)
+               // is lower than a fixed amount. Bear in mind however that the
+               // other optimization passes might yet reduce the cost of b
+               // significantly so we shouldn't be overly conservative.
+               if !canSpeculativelyExecute(b) {
+                       return false
+               }
+
+               // Logically combine the control values for p and b.
+               v := b.NewValue0(bc.Pos, op, bc.Type)
+               v.AddArg(pc)
+               v.AddArg(bc)
+
+               // Set the combined control value as the control value for b.
+               b.SetControl(v)
+
+               // Modify p so that it jumps directly to b.
+               p.removeEdge(i)
+               p.Kind = BlockPlain
+               p.Likely = BranchUnknown
+               p.ResetControls()
+
+               return true
+       }
+
+       // TODO: could negate condition(s) to merge controls.
+       return false
+}
+
+// getConstIntArgIndex returns the index of the first argument that is a
+// constant integer or -1 if no such argument exists.
+func getConstIntArgIndex(v *Value) int {
+       for i, a := range v.Args {
+               switch a.Op {
+               case OpConst8, OpConst16, OpConst32, OpConst64:
+                       return i
+               }
+       }
+       return -1
+}
+
+// isSignedInequality reports whether op represents the inequality < or ≤
+// in the signed domain.
+func isSignedInequality(v *Value) bool {
+       switch v.Op {
+       case OpLess64, OpLess32, OpLess16, OpLess8,
+               OpLeq64, OpLeq32, OpLeq16, OpLeq8:
+               return true
+       }
+       return false
+}
+
+// isUnsignedInequality reports whether op represents the inequality < or ≤
+// in the unsigned domain.
+func isUnsignedInequality(v *Value) bool {
+       switch v.Op {
+       case OpLess64U, OpLess32U, OpLess16U, OpLess8U,
+               OpLeq64U, OpLeq32U, OpLeq16U, OpLeq8U:
+               return true
+       }
+       return false
+}
+
+func areMergeableInequalities(x, y *Value) bool {
+       // We need both inequalities to be either in the signed or unsigned domain.
+       // TODO(mundaym): it would also be good to merge when we have an Eq op that
+       // could be transformed into a Less/Leq. For example in the unsigned
+       // domain 'x == 0 || 3 < x' is equivalent to 'x <= 0 || 3 < x'
+       inequalityChecks := [...]func(*Value) bool{
+               isSignedInequality,
+               isUnsignedInequality,
+       }
+       for _, f := range inequalityChecks {
+               if !f(x) || !f(y) {
+                       continue
+               }
+
+               // Check that both inequalities are comparisons with constants.
+               xi := getConstIntArgIndex(x)
+               if xi < 0 {
+                       return false
+               }
+               yi := getConstIntArgIndex(y)
+               if yi < 0 {
+                       return false
+               }
+
+               // Check that the non-constant arguments to the inequalities
+               // are the same.
+               return x.Args[xi^1] == y.Args[yi^1]
+       }
+       return false
+}
index 77d2aad5c114485f085fe211e2c4b156b4119dc5..5fe3da93ca018689f32e2b85226fbbfd5ceb5262 100644 (file)
@@ -26,7 +26,7 @@ func TestFuseEliminatesOneBranch(t *testing.T) {
                        Exit("mem")))
 
        CheckFunc(fun.f)
-       fuseAll(fun.f)
+       fuseLate(fun.f)
 
        for _, b := range fun.f.Blocks {
                if b == fun.blocks["then"] && b.Kind != BlockInvalid {
@@ -56,7 +56,7 @@ func TestFuseEliminatesBothBranches(t *testing.T) {
                        Exit("mem")))
 
        CheckFunc(fun.f)
-       fuseAll(fun.f)
+       fuseLate(fun.f)
 
        for _, b := range fun.f.Blocks {
                if b == fun.blocks["then"] && b.Kind != BlockInvalid {
@@ -90,7 +90,7 @@ func TestFuseHandlesPhis(t *testing.T) {
                        Exit("mem")))
 
        CheckFunc(fun.f)
-       fuseAll(fun.f)
+       fuseLate(fun.f)
 
        for _, b := range fun.f.Blocks {
                if b == fun.blocks["then"] && b.Kind != BlockInvalid {
@@ -122,7 +122,7 @@ func TestFuseEliminatesEmptyBlocks(t *testing.T) {
                ))
 
        CheckFunc(fun.f)
-       fuseAll(fun.f)
+       fuseLate(fun.f)
 
        for k, b := range fun.blocks {
                if k[:1] == "z" && b.Kind != BlockInvalid {
@@ -153,7 +153,7 @@ func TestFuseSideEffects(t *testing.T) {
                        Goto("loop")))
 
        CheckFunc(fun.f)
-       fuseAll(fun.f)
+       fuseLate(fun.f)
 
        for _, b := range fun.f.Blocks {
                if b == fun.blocks["then"] && b.Kind == BlockInvalid {
@@ -196,7 +196,7 @@ func BenchmarkFuse(b *testing.B) {
                        b.ResetTimer()
                        for i := 0; i < b.N; i++ {
                                fun := c.Fun("entry", blocks...)
-                               fuseAll(fun.f)
+                               fuseLate(fun.f)
                        }
                })
        }
index 54c5ed646f5671a29528c38f1c8e73d9e1e8025d..bc16f5a7af11f40e3b0d6d51036fff6597237996 100644 (file)
 (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) -> (Neq16 (Const16 <t> [int64(int16(c-d))]) x)
 (Neq8  (Const8  <t> [c]) (Add8  (Const8  <t> [d]) x)) -> (Neq8 (Const8 <t> [int64(int8(c-d))]) x)
 
+// signed integer range: ( c <= x && x (<|<=) d ) -> ( unsigned(x-c) (<|<=) unsigned(d-c) )
+(AndB (Leq64 (Const64 [c]) x) ((Less|Leq)64 x (Const64 [d]))) && d >= c -> ((Less|Leq)64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
+(AndB (Leq32 (Const32 [c]) x) ((Less|Leq)32 x (Const32 [d]))) && d >= c -> ((Less|Leq)32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
+(AndB (Leq16 (Const16 [c]) x) ((Less|Leq)16 x (Const16 [d]))) && d >= c -> ((Less|Leq)16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
+(AndB (Leq8  (Const8  [c]) x) ((Less|Leq)8  x (Const8  [d]))) && d >= c -> ((Less|Leq)8U  (Sub8  <x.Type> x (Const8  <x.Type> [c])) (Const8  <x.Type> [d-c]))
+
+// signed integer range: ( c < x && x (<|<=) d ) -> ( unsigned(x-(c+1)) (<|<=) unsigned(d-(c+1)) )
+(AndB (Less64 (Const64 [c]) x) ((Less|Leq)64 x (Const64 [d]))) && d >= c+1 && int64(c+1) > int64(c) -> ((Less|Leq)64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+(AndB (Less32 (Const32 [c]) x) ((Less|Leq)32 x (Const32 [d]))) && d >= c+1 && int32(c+1) > int32(c) -> ((Less|Leq)32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
+(AndB (Less16 (Const16 [c]) x) ((Less|Leq)16 x (Const16 [d]))) && d >= c+1 && int16(c+1) > int16(c) -> ((Less|Leq)16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
+(AndB (Less8  (Const8  [c]) x) ((Less|Leq)8  x (Const8  [d]))) && d >= c+1 && int8(c+1)  > int8(c)  -> ((Less|Leq)8U  (Sub8  <x.Type> x (Const8  <x.Type> [c+1])) (Const8  <x.Type> [d-c-1]))
+
+// unsigned integer range: ( c <= x && x (<|<=) d ) -> ( x-c (<|<=) d-c )
+(AndB (Leq64U (Const64 [c]) x) ((Less|Leq)64U x (Const64 [d]))) && uint64(d) >= uint64(c) -> ((Less|Leq)64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
+(AndB (Leq32U (Const32 [c]) x) ((Less|Leq)32U x (Const32 [d]))) && uint32(d) >= uint32(c) -> ((Less|Leq)32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [int64(int32(d-c))]))
+(AndB (Leq16U (Const16 [c]) x) ((Less|Leq)16U x (Const16 [d]))) && uint16(d) >= uint16(c) -> ((Less|Leq)16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [int64(int16(d-c))]))
+(AndB (Leq8U  (Const8  [c]) x) ((Less|Leq)8U  x (Const8  [d]))) && uint8(d)  >= uint8(c)  -> ((Less|Leq)8U  (Sub8  <x.Type> x (Const8  <x.Type> [c])) (Const8  <x.Type> [int64(int8(d-c))]))
+
+// unsigned integer range: ( c < x && x (<|<=) d ) -> ( x-(c+1) (<|<=) d-(c+1) )
+(AndB (Less64U (Const64 [c]) x) ((Less|Leq)64U x (Const64 [d]))) && uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c) -> ((Less|Leq)64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+(AndB (Less32U (Const32 [c]) x) ((Less|Leq)32U x (Const32 [d]))) && uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c) -> ((Less|Leq)32U (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(c+1))])) (Const32 <x.Type> [int64(int32(d-c-1))]))
+(AndB (Less16U (Const16 [c]) x) ((Less|Leq)16U x (Const16 [d]))) && uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c) -> ((Less|Leq)16U (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(c+1))])) (Const16 <x.Type> [int64(int16(d-c-1))]))
+(AndB (Less8U  (Const8  [c]) x) ((Less|Leq)8U  x (Const8  [d]))) && uint8(d)  >= uint8(c+1)  && uint8(c+1)  > uint8(c)  -> ((Less|Leq)8U  (Sub8  <x.Type> x (Const8  <x.Type> [int64(int8(c+1))]))  (Const8  <x.Type> [int64(int8(d-c-1))]))
+
+// signed integer range: ( c (<|<=) x || x < d ) -> ( unsigned(c-d) (<|<=) unsigned(x-d) )
+(OrB ((Less|Leq)64 (Const64 [c]) x) (Less64 x (Const64 [d]))) && c >= d -> ((Less|Leq)64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
+(OrB ((Less|Leq)32 (Const32 [c]) x) (Less32 x (Const32 [d]))) && c >= d -> ((Less|Leq)32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
+(OrB ((Less|Leq)16 (Const16 [c]) x) (Less16 x (Const16 [d]))) && c >= d -> ((Less|Leq)16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
+(OrB ((Less|Leq)8  (Const8  [c]) x) (Less8  x (Const8  [d]))) && c >= d -> ((Less|Leq)8U  (Const8  <x.Type> [c-d]) (Sub8  <x.Type> x (Const8  <x.Type> [d])))
+
+// signed integer range: ( c (<|<=) x || x <= d ) -> ( unsigned(c-(d+1)) (<|<=) unsigned(x-(d+1)) )
+(OrB ((Less|Leq)64 (Const64 [c]) x) (Leq64 x (Const64 [d]))) && c >= d+1 && int64(d+1) > int64(d) -> ((Less|Leq)64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
+(OrB ((Less|Leq)32 (Const32 [c]) x) (Leq32 x (Const32 [d]))) && c >= d+1 && int32(d+1) > int32(d) -> ((Less|Leq)32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
+(OrB ((Less|Leq)16 (Const16 [c]) x) (Leq16 x (Const16 [d]))) && c >= d+1 && int16(d+1) > int16(d) -> ((Less|Leq)16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
+(OrB ((Less|Leq)8  (Const8  [c]) x) (Leq8  x (Const8  [d]))) && c >= d+1 && int8(d+1)  > int8(d)  -> ((Less|Leq)8U  (Const8  <x.Type> [c-d-1]) (Sub8  <x.Type> x (Const8  <x.Type> [d+1])))
+
+// unsigned integer range: ( c (<|<=) x || x < d ) -> ( c-d (<|<=) x-d )
+(OrB ((Less|Leq)64U (Const64 [c]) x) (Less64U x (Const64 [d]))) && uint64(c) >= uint64(d) -> ((Less|Leq)64U (Const64 <x.Type>               [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
+(OrB ((Less|Leq)32U (Const32 [c]) x) (Less32U x (Const32 [d]))) && uint32(c) >= uint32(d) -> ((Less|Leq)32U (Const32 <x.Type> [int64(int32(c-d))]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
+(OrB ((Less|Leq)16U (Const16 [c]) x) (Less16U x (Const16 [d]))) && uint16(c) >= uint16(d) -> ((Less|Leq)16U (Const16 <x.Type> [int64(int16(c-d))]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
+(OrB ((Less|Leq)8U  (Const8  [c]) x) (Less8U  x (Const8  [d]))) && uint8(c)  >= uint8(d)  -> ((Less|Leq)8U  (Const8  <x.Type> [int64( int8(c-d))]) (Sub8  <x.Type> x (Const8  <x.Type> [d])))
+
+// unsigned integer range: ( c (<|<=) x || x <= d ) -> ( c-(d+1) (<|<=) x-(d+1) )
+(OrB ((Less|Leq)64U (Const64 [c]) x) (Leq64U x (Const64 [d]))) && uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d) -> ((Less|Leq)64U (Const64 <x.Type>               [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
+(OrB ((Less|Leq)32U (Const32 [c]) x) (Leq32U x (Const32 [d]))) && uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d) -> ((Less|Leq)32U (Const32 <x.Type> [int64(int32(c-d-1))]) (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(d+1))])))
+(OrB ((Less|Leq)16U (Const16 [c]) x) (Leq16U x (Const16 [d]))) && uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d) -> ((Less|Leq)16U (Const16 <x.Type> [int64(int16(c-d-1))]) (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(d+1))])))
+(OrB ((Less|Leq)8U  (Const8  [c]) x) (Leq8U  x (Const8  [d]))) && uint8(c)  >= uint8(d+1)  && uint8(d+1)  > uint8(d)  -> ((Less|Leq)8U  (Const8  <x.Type> [int64( int8(c-d-1))]) (Sub8  <x.Type> x (Const8  <x.Type> [int64( int8(d+1))])))
+
 // Canonicalize x-const to x+(-const)
 (Sub64 x (Const64 <t> [c])) && x.Op != OpConst64 -> (Add64 (Const64 <t> [-c]) x)
 (Sub32 x (Const32 <t> [c])) && x.Op != OpConst32 -> (Add32 (Const32 <t> [int64(int32(-c))]) x)
index f728e8ee25d7f549674c430e93810c248f00ced7..16d94614d815c575435a220ee1a4e385d61e5dbc 100644 (file)
@@ -87,7 +87,7 @@ func TestNilcheckSimple(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -124,7 +124,7 @@ func TestNilcheckDomOrder(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -157,7 +157,7 @@ func TestNilcheckAddr(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -191,7 +191,7 @@ func TestNilcheckAddPtr(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -235,7 +235,7 @@ func TestNilcheckPhi(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -276,7 +276,7 @@ func TestNilcheckKeepRemove(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -323,7 +323,7 @@ func TestNilcheckInFalseBranch(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -374,7 +374,7 @@ func TestNilcheckUser(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
@@ -418,7 +418,7 @@ func TestNilcheckBug(t *testing.T) {
        nilcheckelim(fun.f)
 
        // clean up the removed nil check
-       fusePlain(fun.f)
+       fuse(fun.f, fuseTypePlain)
        deadcode(fun.f)
 
        CheckFunc(fun.f)
index 0a4879a8adb0c785bcbb995f0e6b53c0b75818ec..9e743838ab961f803ce6a6120a7acf8ee4358bad 100644 (file)
@@ -30,6 +30,8 @@ func rewriteValuegeneric(v *Value) bool {
                return rewriteValuegeneric_OpAnd64(v)
        case OpAnd8:
                return rewriteValuegeneric_OpAnd8(v)
+       case OpAndB:
+               return rewriteValuegeneric_OpAndB(v)
        case OpArraySelect:
                return rewriteValuegeneric_OpArraySelect(v)
        case OpCom16:
@@ -278,6 +280,8 @@ func rewriteValuegeneric(v *Value) bool {
                return rewriteValuegeneric_OpOr64(v)
        case OpOr8:
                return rewriteValuegeneric_OpOr8(v)
+       case OpOrB:
+               return rewriteValuegeneric_OpOrB(v)
        case OpPhi:
                return rewriteValuegeneric_OpPhi(v)
        case OpPtrIndex:
@@ -2328,125 +2332,1443 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool {
        }
        return false
 }
-func rewriteValuegeneric_OpArraySelect(v *Value) bool {
+func rewriteValuegeneric_OpAndB(v *Value) bool {
+       v_1 := v.Args[1]
        v_0 := v.Args[0]
-       // match: (ArraySelect (ArrayMake1 x))
-       // result: x
+       b := v.Block
+       // match: (AndB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
+       // cond: d >= c
+       // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpArrayMake1 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               x := v_0.Args[0]
-               v.copyOf(x)
-               return true
+               break
        }
-       // match: (ArraySelect [0] (IData x))
-       // result: (IData x)
+       // match: (AndB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
+       // cond: d >= c
+       // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
        for {
-               if v.AuxInt != 0 || v_0.Op != OpIData {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               x := v_0.Args[0]
-               v.reset(OpIData)
-               v.AddArg(x)
-               return true
+               break
        }
-       return false
-}
-func rewriteValuegeneric_OpCom16(v *Value) bool {
-       v_0 := v.Args[0]
-       // match: (Com16 (Com16 x))
-       // result: x
+       // match: (AndB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
+       // cond: d >= c
+       // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpCom16 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq32 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               x := v_0.Args[0]
-               v.copyOf(x)
-               return true
+               break
        }
-       // match: (Com16 (Const16 [c]))
-       // result: (Const16 [^c])
+       // match: (AndB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
+       // cond: d >= c
+       // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpConst16 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq32 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               c := v_0.AuxInt
-               v.reset(OpConst16)
-               v.AuxInt = ^c
-               return true
+               break
        }
-       // match: (Com16 (Add16 (Const16 [-1]) x))
-       // result: (Neg16 x)
+       // match: (AndB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
+       // cond: d >= c
+       // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpAdd16 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
-                       if v_0_0.Op != OpConst16 || v_0_0.AuxInt != -1 {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq16 {
                                continue
                        }
-                       x := v_0_1
-                       v.reset(OpNeg16)
-                       v.AddArg(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
                        return true
                }
                break
        }
-       return false
-}
-func rewriteValuegeneric_OpCom32(v *Value) bool {
-       v_0 := v.Args[0]
-       // match: (Com32 (Com32 x))
-       // result: x
+       // match: (AndB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
+       // cond: d >= c
+       // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpCom32 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq16 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               x := v_0.Args[0]
-               v.copyOf(x)
-               return true
+               break
        }
-       // match: (Com32 (Const32 [c]))
-       // result: (Const32 [^c])
+       // match: (AndB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
+       // cond: d >= c
+       // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpConst32 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq8 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
                }
-               c := v_0.AuxInt
-               v.reset(OpConst32)
-               v.AuxInt = ^c
-               return true
+               break
        }
-       // match: (Com32 (Add32 (Const32 [-1]) x))
-       // result: (Neg32 x)
+       // match: (AndB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
+       // cond: d >= c
+       // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [d-c]))
        for {
-               if v_0.Op != OpAdd32 {
-                       break
-               }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               v_0_1 := v_0.Args[1]
-               for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
-                       if v_0_0.Op != OpConst32 || v_0_0.AuxInt != -1 {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq8 {
                                continue
                        }
-                       x := v_0_1
-                       v.reset(OpNeg32)
-                       v.AddArg(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
                        return true
                }
                break
        }
-       return false
-}
-func rewriteValuegeneric_OpCom64(v *Value) bool {
-       v_0 := v.Args[0]
-       // match: (Com64 (Com64 x))
-       // result: x
+       // match: (AndB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
+       // cond: d >= c+1 && int64(c+1) > int64(c)
+       // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int64(c+1) > int64(c)) {
+                               continue
+                       }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
+       // cond: d >= c+1 && int64(c+1) > int64(c)
+       // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int64(c+1) > int64(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
+       // cond: d >= c+1 && int32(c+1) > int32(c)
+       // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess32 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int32(c+1) > int32(c)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
+       // cond: d >= c+1 && int32(c+1) > int32(c)
+       // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c+1])) (Const32 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess32 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int32(c+1) > int32(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
+       // cond: d >= c+1 && int16(c+1) > int16(c)
+       // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess16 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int16(c+1) > int16(c)) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
+       // cond: d >= c+1 && int16(c+1) > int16(c)
+       // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c+1])) (Const16 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess16 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int16(c+1) > int16(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
+       // cond: d >= c+1 && int8(c+1) > int8(c)
+       // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess8 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int8(c+1) > int8(c)) {
+                               continue
+                       }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
+       // cond: d >= c+1 && int8(c+1) > int8(c)
+       // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c+1])) (Const8 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess8 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(d >= c+1 && int8(c+1) > int8(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
+       // cond: uint64(d) >= uint64(c)
+       // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq64U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(d) >= uint64(c)) {
+                               continue
+                       }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
+       // cond: uint64(d) >= uint64(c)
+       // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c])) (Const64 <x.Type> [d-c]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq64U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(d) >= uint64(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
+       // cond: uint32(d) >= uint32(c)
+       // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [int64(int32(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq32U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(d) >= uint32(c)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
+       // cond: uint32(d) >= uint32(c)
+       // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [c])) (Const32 <x.Type> [int64(int32(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq32U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(d) >= uint32(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
+       // cond: uint16(d) >= uint16(c)
+       // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [int64(int16(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(d) >= uint16(c)) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
+       // cond: uint16(d) >= uint16(c)
+       // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [c])) (Const16 <x.Type> [int64(int16(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(d) >= uint16(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
+       // cond: uint8(d) >= uint8(c)
+       // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [int64(int8(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq8U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(d) >= uint8(c)) {
+                               continue
+                       }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
+       // cond: uint8(d) >= uint8(c)
+       // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [c])) (Const8 <x.Type> [int64(int8(d-c))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq8U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(d) >= uint8(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = c
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d - c))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
+       // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
+       // result: (Less64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess64U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
+                               continue
+                       }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
+       // cond: uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)
+       // result: (Leq64U (Sub64 <x.Type> x (Const64 <x.Type> [c+1])) (Const64 <x.Type> [d-c-1]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess64U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(d) >= uint64(c+1) && uint64(c+1) > uint64(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v1.AuxInt = c + 1
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d - c - 1
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
+       // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
+       // result: (Less32U (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(c+1))])) (Const32 <x.Type> [int64(int32(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess32U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = int64(int32(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
+       // cond: uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)
+       // result: (Leq32U (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(c+1))])) (Const32 <x.Type> [int64(int32(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess32U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(d) >= uint32(c+1) && uint32(c+1) > uint32(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v1.AuxInt = int64(int32(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
+       // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
+       // result: (Less16U (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(c+1))])) (Const16 <x.Type> [int64(int16(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = int64(int16(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
+       // cond: uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)
+       // result: (Leq16U (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(c+1))])) (Const16 <x.Type> [int64(int16(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(d) >= uint16(c+1) && uint16(c+1) > uint16(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v1.AuxInt = int64(int16(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
+       // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
+       // result: (Less8U (Sub8 <x.Type> x (Const8 <x.Type> [int64(int8(c+1))])) (Const8 <x.Type> [int64(int8(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess8U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
+                               continue
+                       }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = int64(int8(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       // match: (AndB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
+       // cond: uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)
+       // result: (Leq8U (Sub8 <x.Type> x (Const8 <x.Type> [int64(int8(c+1))])) (Const8 <x.Type> [int64(int8(d-c-1))]))
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess8U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(d) >= uint8(c+1) && uint8(c+1) > uint8(c)) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v1 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v1.AuxInt = int64(int8(c + 1))
+                       v0.AddArg2(x, v1)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d - c - 1))
+                       v.AddArg2(v0, v2)
+                       return true
+               }
+               break
+       }
+       return false
+}
+func rewriteValuegeneric_OpArraySelect(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (ArraySelect (ArrayMake1 x))
+       // result: x
+       for {
+               if v_0.Op != OpArrayMake1 {
+                       break
+               }
+               x := v_0.Args[0]
+               v.copyOf(x)
+               return true
+       }
+       // match: (ArraySelect [0] (IData x))
+       // result: (IData x)
+       for {
+               if v.AuxInt != 0 || v_0.Op != OpIData {
+                       break
+               }
+               x := v_0.Args[0]
+               v.reset(OpIData)
+               v.AddArg(x)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpCom16(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (Com16 (Com16 x))
+       // result: x
+       for {
+               if v_0.Op != OpCom16 {
+                       break
+               }
+               x := v_0.Args[0]
+               v.copyOf(x)
+               return true
+       }
+       // match: (Com16 (Const16 [c]))
+       // result: (Const16 [^c])
+       for {
+               if v_0.Op != OpConst16 {
+                       break
+               }
+               c := v_0.AuxInt
+               v.reset(OpConst16)
+               v.AuxInt = ^c
+               return true
+       }
+       // match: (Com16 (Add16 (Const16 [-1]) x))
+       // result: (Neg16 x)
+       for {
+               if v_0.Op != OpAdd16 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
+                       if v_0_0.Op != OpConst16 || v_0_0.AuxInt != -1 {
+                               continue
+                       }
+                       x := v_0_1
+                       v.reset(OpNeg16)
+                       v.AddArg(x)
+                       return true
+               }
+               break
+       }
+       return false
+}
+func rewriteValuegeneric_OpCom32(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (Com32 (Com32 x))
+       // result: x
+       for {
+               if v_0.Op != OpCom32 {
+                       break
+               }
+               x := v_0.Args[0]
+               v.copyOf(x)
+               return true
+       }
+       // match: (Com32 (Const32 [c]))
+       // result: (Const32 [^c])
+       for {
+               if v_0.Op != OpConst32 {
+                       break
+               }
+               c := v_0.AuxInt
+               v.reset(OpConst32)
+               v.AuxInt = ^c
+               return true
+       }
+       // match: (Com32 (Add32 (Const32 [-1]) x))
+       // result: (Neg32 x)
+       for {
+               if v_0.Op != OpAdd32 {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               v_0_1 := v_0.Args[1]
+               for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
+                       if v_0_0.Op != OpConst32 || v_0_0.AuxInt != -1 {
+                               continue
+                       }
+                       x := v_0_1
+                       v.reset(OpNeg32)
+                       v.AddArg(x)
+                       return true
+               }
+               break
+       }
+       return false
+}
+func rewriteValuegeneric_OpCom64(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (Com64 (Com64 x))
+       // result: x
        for {
                if v_0.Op != OpCom64 {
                        break
@@ -14402,1423 +15724,2741 @@ func rewriteValuegeneric_OpNeqPtr(v *Value) bool {
                        if v_1.Op != OpOffPtr {
                                continue
                        }
-                       o2 := v_1.AuxInt
-                       p2 := v_1.Args[0]
-                       if !(isSamePtr(p1, p2)) {
+                       o2 := v_1.AuxInt
+                       p2 := v_1.Args[0]
+                       if !(isSamePtr(p1, p2)) {
+                               continue
+                       }
+                       v.reset(OpConstBool)
+                       v.AuxInt = b2i(o1 != o2)
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (Const32 [c]) (Const32 [d]))
+       // result: (ConstBool [b2i(c != d)])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConstBool)
+                       v.AuxInt = b2i(c != d)
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (Const64 [c]) (Const64 [d]))
+       // result: (ConstBool [b2i(c != d)])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConstBool)
+                       v.AuxInt = b2i(c != d)
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (LocalAddr _ _) (Addr _))
+       // result: (ConstBool [1])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
+                               continue
+                       }
+                       v.reset(OpConstBool)
+                       v.AuxInt = 1
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
+       // result: (ConstBool [1])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOffPtr {
+                               continue
+                       }
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
+                               continue
+                       }
+                       v.reset(OpConstBool)
+                       v.AuxInt = 1
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
+       // result: (ConstBool [1])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
+                               continue
+                       }
+                       v_1_0 := v_1.Args[0]
+                       if v_1_0.Op != OpAddr {
+                               continue
+                       }
+                       v.reset(OpConstBool)
+                       v.AuxInt = 1
+                       return true
+               }
+               break
+       }
+       // match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
+       // result: (ConstBool [1])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOffPtr {
+                               continue
+                       }
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
+                               continue
+                       }
+                       v_1_0 := v_1.Args[0]
+                       if v_1_0.Op != OpAddr {
                                continue
                        }
                        v.reset(OpConstBool)
-                       v.AuxInt = b2i(o1 != o2)
+                       v.AuxInt = 1
                        return true
                }
                break
        }
-       // match: (NeqPtr (Const32 [c]) (Const32 [d]))
-       // result: (ConstBool [b2i(c != d)])
+       // match: (NeqPtr (AddPtr p1 o1) p2)
+       // cond: isSamePtr(p1, p2)
+       // result: (IsNonNil o1)
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 {
+                       if v_0.Op != OpAddPtr {
                                continue
                        }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst32 {
+                       o1 := v_0.Args[1]
+                       p1 := v_0.Args[0]
+                       p2 := v_1
+                       if !(isSamePtr(p1, p2)) {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConstBool)
-                       v.AuxInt = b2i(c != d)
+                       v.reset(OpIsNonNil)
+                       v.AddArg(o1)
                        return true
                }
                break
        }
-       // match: (NeqPtr (Const64 [c]) (Const64 [d]))
-       // result: (ConstBool [b2i(c != d)])
+       // match: (NeqPtr (Const32 [0]) p)
+       // result: (IsNonNil p)
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 {
-                               continue
-                       }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst64 {
+                       if v_0.Op != OpConst32 || v_0.AuxInt != 0 {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConstBool)
-                       v.AuxInt = b2i(c != d)
+                       p := v_1
+                       v.reset(OpIsNonNil)
+                       v.AddArg(p)
                        return true
                }
                break
        }
-       // match: (NeqPtr (LocalAddr _ _) (Addr _))
-       // result: (ConstBool [1])
+       // match: (NeqPtr (Const64 [0]) p)
+       // result: (IsNonNil p)
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpLocalAddr || v_1.Op != OpAddr {
+                       if v_0.Op != OpConst64 || v_0.AuxInt != 0 {
                                continue
                        }
-                       v.reset(OpConstBool)
-                       v.AuxInt = 1
+                       p := v_1
+                       v.reset(OpIsNonNil)
+                       v.AddArg(p)
                        return true
                }
                break
        }
-       // match: (NeqPtr (OffPtr (LocalAddr _ _)) (Addr _))
-       // result: (ConstBool [1])
+       // match: (NeqPtr (ConstNil) p)
+       // result: (IsNonNil p)
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOffPtr {
-                               continue
-                       }
-                       v_0_0 := v_0.Args[0]
-                       if v_0_0.Op != OpLocalAddr || v_1.Op != OpAddr {
+                       if v_0.Op != OpConstNil {
                                continue
                        }
-                       v.reset(OpConstBool)
-                       v.AuxInt = 1
+                       p := v_1
+                       v.reset(OpIsNonNil)
+                       v.AddArg(p)
                        return true
                }
-               break
+               break
+       }
+       return false
+}
+func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       typ := &b.Func.Config.Types
+       // match: (NeqSlice x y)
+       // result: (NeqPtr (SlicePtr x) (SlicePtr y))
+       for {
+               x := v_0
+               y := v_1
+               v.reset(OpNeqPtr)
+               v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
+               v1.AddArg(y)
+               v.AddArg2(v0, v1)
+               return true
+       }
+}
+func rewriteValuegeneric_OpNilCheck(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       config := b.Func.Config
+       fe := b.Func.fe
+       // match: (NilCheck (GetG mem) mem)
+       // result: mem
+       for {
+               if v_0.Op != OpGetG {
+                       break
+               }
+               mem := v_0.Args[0]
+               if mem != v_1 {
+                       break
+               }
+               v.copyOf(mem)
+               return true
+       }
+       // match: (NilCheck (Load (OffPtr [c] (SP)) (StaticCall {sym} _)) _)
+       // cond: isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
+       // result: (Invalid)
+       for {
+               if v_0.Op != OpLoad {
+                       break
+               }
+               _ = v_0.Args[1]
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpOffPtr {
+                       break
+               }
+               c := v_0_0.AuxInt
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpSP {
+                       break
+               }
+               v_0_1 := v_0.Args[1]
+               if v_0_1.Op != OpStaticCall {
+                       break
+               }
+               sym := v_0_1.Aux
+               if !(isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+                       break
+               }
+               v.reset(OpInvalid)
+               return true
+       }
+       // match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) (StaticCall {sym} _))) _)
+       // cond: isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
+       // result: (Invalid)
+       for {
+               if v_0.Op != OpOffPtr {
+                       break
+               }
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpLoad {
+                       break
+               }
+               _ = v_0_0.Args[1]
+               v_0_0_0 := v_0_0.Args[0]
+               if v_0_0_0.Op != OpOffPtr {
+                       break
+               }
+               c := v_0_0_0.AuxInt
+               v_0_0_0_0 := v_0_0_0.Args[0]
+               if v_0_0_0_0.Op != OpSP {
+                       break
+               }
+               v_0_0_1 := v_0_0.Args[1]
+               if v_0_0_1.Op != OpStaticCall {
+                       break
+               }
+               sym := v_0_0_1.Aux
+               if !(isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+                       break
+               }
+               v.reset(OpInvalid)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpNot(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (Not (ConstBool [c]))
+       // result: (ConstBool [1-c])
+       for {
+               if v_0.Op != OpConstBool {
+                       break
+               }
+               c := v_0.AuxInt
+               v.reset(OpConstBool)
+               v.AuxInt = 1 - c
+               return true
+       }
+       // match: (Not (Eq64 x y))
+       // result: (Neq64 x y)
+       for {
+               if v_0.Op != OpEq64 {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq64)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (Eq32 x y))
+       // result: (Neq32 x y)
+       for {
+               if v_0.Op != OpEq32 {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq32)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (Eq16 x y))
+       // result: (Neq16 x y)
+       for {
+               if v_0.Op != OpEq16 {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq16)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (Eq8 x y))
+       // result: (Neq8 x y)
+       for {
+               if v_0.Op != OpEq8 {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq8)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (EqB x y))
+       // result: (NeqB x y)
+       for {
+               if v_0.Op != OpEqB {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeqB)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (EqPtr x y))
+       // result: (NeqPtr x y)
+       for {
+               if v_0.Op != OpEqPtr {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeqPtr)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (Eq64F x y))
+       // result: (Neq64F x y)
+       for {
+               if v_0.Op != OpEq64F {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq64F)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (LocalAddr _ _) (OffPtr (Addr _)))
-       // result: (ConstBool [1])
+       // match: (Not (Eq32F x y))
+       // result: (Neq32F x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
-                               continue
-                       }
-                       v_1_0 := v_1.Args[0]
-                       if v_1_0.Op != OpAddr {
-                               continue
-                       }
-                       v.reset(OpConstBool)
-                       v.AuxInt = 1
-                       return true
+               if v_0.Op != OpEq32F {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpNeq32F)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (OffPtr (LocalAddr _ _)) (OffPtr (Addr _)))
-       // result: (ConstBool [1])
+       // match: (Not (Neq64 x y))
+       // result: (Eq64 x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOffPtr {
-                               continue
-                       }
-                       v_0_0 := v_0.Args[0]
-                       if v_0_0.Op != OpLocalAddr || v_1.Op != OpOffPtr {
-                               continue
-                       }
-                       v_1_0 := v_1.Args[0]
-                       if v_1_0.Op != OpAddr {
-                               continue
-                       }
-                       v.reset(OpConstBool)
-                       v.AuxInt = 1
-                       return true
+               if v_0.Op != OpNeq64 {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq64)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (AddPtr p1 o1) p2)
-       // cond: isSamePtr(p1, p2)
-       // result: (IsNonNil o1)
+       // match: (Not (Neq32 x y))
+       // result: (Eq32 x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpAddPtr {
-                               continue
-                       }
-                       o1 := v_0.Args[1]
-                       p1 := v_0.Args[0]
-                       p2 := v_1
-                       if !(isSamePtr(p1, p2)) {
-                               continue
-                       }
-                       v.reset(OpIsNonNil)
-                       v.AddArg(o1)
-                       return true
+               if v_0.Op != OpNeq32 {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq32)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (Const32 [0]) p)
-       // result: (IsNonNil p)
+       // match: (Not (Neq16 x y))
+       // result: (Eq16 x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 || v_0.AuxInt != 0 {
-                               continue
-                       }
-                       p := v_1
-                       v.reset(OpIsNonNil)
-                       v.AddArg(p)
-                       return true
+               if v_0.Op != OpNeq16 {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq16)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (Const64 [0]) p)
-       // result: (IsNonNil p)
+       // match: (Not (Neq8 x y))
+       // result: (Eq8 x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 || v_0.AuxInt != 0 {
-                               continue
-                       }
-                       p := v_1
-                       v.reset(OpIsNonNil)
-                       v.AddArg(p)
-                       return true
+               if v_0.Op != OpNeq8 {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq8)
+               v.AddArg2(x, y)
+               return true
        }
-       // match: (NeqPtr (ConstNil) p)
-       // result: (IsNonNil p)
+       // match: (Not (NeqB x y))
+       // result: (EqB x y)
        for {
-               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConstNil {
-                               continue
-                       }
-                       p := v_1
-                       v.reset(OpIsNonNil)
-                       v.AddArg(p)
-                       return true
+               if v_0.Op != OpNeqB {
+                       break
                }
-               break
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEqB)
+               v.AddArg2(x, y)
+               return true
        }
-       return false
-}
-func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       typ := &b.Func.Config.Types
-       // match: (NeqSlice x y)
-       // result: (NeqPtr (SlicePtr x) (SlicePtr y))
+       // match: (Not (NeqPtr x y))
+       // result: (EqPtr x y)
        for {
-               x := v_0
-               y := v_1
-               v.reset(OpNeqPtr)
-               v0 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
-               v0.AddArg(x)
-               v1 := b.NewValue0(v.Pos, OpSlicePtr, typ.BytePtr)
-               v1.AddArg(y)
-               v.AddArg2(v0, v1)
+               if v_0.Op != OpNeqPtr {
+                       break
+               }
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEqPtr)
+               v.AddArg2(x, y)
                return true
        }
-}
-func rewriteValuegeneric_OpNilCheck(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       config := b.Func.Config
-       fe := b.Func.fe
-       // match: (NilCheck (GetG mem) mem)
-       // result: mem
+       // match: (Not (Neq64F x y))
+       // result: (Eq64F x y)
        for {
-               if v_0.Op != OpGetG {
+               if v_0.Op != OpNeq64F {
                        break
                }
-               mem := v_0.Args[0]
-               if mem != v_1 {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq64F)
+               v.AddArg2(x, y)
+               return true
+       }
+       // match: (Not (Neq32F x y))
+       // result: (Eq32F x y)
+       for {
+               if v_0.Op != OpNeq32F {
                        break
                }
-               v.copyOf(mem)
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpEq32F)
+               v.AddArg2(x, y)
                return true
        }
-       // match: (NilCheck (Load (OffPtr [c] (SP)) (StaticCall {sym} _)) _)
-       // cond: isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
-       // result: (Invalid)
+       // match: (Not (Less64 x y))
+       // result: (Leq64 y x)
        for {
-               if v_0.Op != OpLoad {
+               if v_0.Op != OpLess64 {
                        break
                }
-               _ = v_0.Args[1]
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpOffPtr {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq64)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less32 x y))
+       // result: (Leq32 y x)
+       for {
+               if v_0.Op != OpLess32 {
                        break
                }
-               c := v_0_0.AuxInt
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpSP {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq32)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less16 x y))
+       // result: (Leq16 y x)
+       for {
+               if v_0.Op != OpLess16 {
                        break
                }
-               v_0_1 := v_0.Args[1]
-               if v_0_1.Op != OpStaticCall {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq16)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less8 x y))
+       // result: (Leq8 y x)
+       for {
+               if v_0.Op != OpLess8 {
                        break
                }
-               sym := v_0_1.Aux
-               if !(isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq8)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less64U x y))
+       // result: (Leq64U y x)
+       for {
+               if v_0.Op != OpLess64U {
                        break
                }
-               v.reset(OpInvalid)
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq64U)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (NilCheck (OffPtr (Load (OffPtr [c] (SP)) (StaticCall {sym} _))) _)
-       // cond: isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")
-       // result: (Invalid)
+       // match: (Not (Less32U x y))
+       // result: (Leq32U y x)
        for {
-               if v_0.Op != OpOffPtr {
+               if v_0.Op != OpLess32U {
                        break
                }
-               v_0_0 := v_0.Args[0]
-               if v_0_0.Op != OpLoad {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq32U)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less16U x y))
+       // result: (Leq16U y x)
+       for {
+               if v_0.Op != OpLess16U {
                        break
                }
-               _ = v_0_0.Args[1]
-               v_0_0_0 := v_0_0.Args[0]
-               if v_0_0_0.Op != OpOffPtr {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq16U)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Less8U x y))
+       // result: (Leq8U y x)
+       for {
+               if v_0.Op != OpLess8U {
                        break
                }
-               c := v_0_0_0.AuxInt
-               v_0_0_0_0 := v_0_0_0.Args[0]
-               if v_0_0_0_0.Op != OpSP {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLeq8U)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Leq64 x y))
+       // result: (Less64 y x)
+       for {
+               if v_0.Op != OpLeq64 {
                        break
                }
-               v_0_0_1 := v_0_0.Args[1]
-               if v_0_0_1.Op != OpStaticCall {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLess64)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Leq32 x y))
+       // result: (Less32 y x)
+       for {
+               if v_0.Op != OpLeq32 {
                        break
                }
-               sym := v_0_0_1.Aux
-               if !(isSameSym(sym, "runtime.newobject") && c == config.ctxt.FixedFrameSize()+config.RegSize && warnRule(fe.Debug_checknil(), v, "removed nil check")) {
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLess32)
+               v.AddArg2(y, x)
+               return true
+       }
+       // match: (Not (Leq16 x y))
+       // result: (Less16 y x)
+       for {
+               if v_0.Op != OpLeq16 {
                        break
                }
-               v.reset(OpInvalid)
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLess16)
+               v.AddArg2(y, x)
                return true
        }
-       return false
-}
-func rewriteValuegeneric_OpNot(v *Value) bool {
-       v_0 := v.Args[0]
-       // match: (Not (ConstBool [c]))
-       // result: (ConstBool [1-c])
+       // match: (Not (Leq8 x y))
+       // result: (Less8 y x)
        for {
-               if v_0.Op != OpConstBool {
+               if v_0.Op != OpLeq8 {
                        break
                }
-               c := v_0.AuxInt
-               v.reset(OpConstBool)
-               v.AuxInt = 1 - c
+               y := v_0.Args[1]
+               x := v_0.Args[0]
+               v.reset(OpLess8)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (Not (Eq64 x y))
-       // result: (Neq64 x y)
+       // match: (Not (Leq64U x y))
+       // result: (Less64U y x)
        for {
-               if v_0.Op != OpEq64 {
+               if v_0.Op != OpLeq64U {
                        break
                }
                y := v_0.Args[1]
                x := v_0.Args[0]
-               v.reset(OpNeq64)
-               v.AddArg2(x, y)
+               v.reset(OpLess64U)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (Not (Eq32 x y))
-       // result: (Neq32 x y)
+       // match: (Not (Leq32U x y))
+       // result: (Less32U y x)
        for {
-               if v_0.Op != OpEq32 {
+               if v_0.Op != OpLeq32U {
                        break
                }
                y := v_0.Args[1]
                x := v_0.Args[0]
-               v.reset(OpNeq32)
-               v.AddArg2(x, y)
+               v.reset(OpLess32U)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (Not (Eq16 x y))
-       // result: (Neq16 x y)
+       // match: (Not (Leq16U x y))
+       // result: (Less16U y x)
        for {
-               if v_0.Op != OpEq16 {
+               if v_0.Op != OpLeq16U {
                        break
                }
                y := v_0.Args[1]
                x := v_0.Args[0]
-               v.reset(OpNeq16)
-               v.AddArg2(x, y)
+               v.reset(OpLess16U)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (Not (Eq8 x y))
-       // result: (Neq8 x y)
+       // match: (Not (Leq8U x y))
+       // result: (Less8U y x)
        for {
-               if v_0.Op != OpEq8 {
+               if v_0.Op != OpLeq8U {
                        break
                }
                y := v_0.Args[1]
                x := v_0.Args[0]
-               v.reset(OpNeq8)
-               v.AddArg2(x, y)
+               v.reset(OpLess8U)
+               v.AddArg2(y, x)
                return true
        }
-       // match: (Not (EqB x y))
-       // result: (NeqB x y)
+       return false
+}
+func rewriteValuegeneric_OpOffPtr(v *Value) bool {
+       v_0 := v.Args[0]
+       // match: (OffPtr (OffPtr p [b]) [a])
+       // result: (OffPtr p [a+b])
        for {
-               if v_0.Op != OpEqB {
+               a := v.AuxInt
+               if v_0.Op != OpOffPtr {
+                       break
+               }
+               b := v_0.AuxInt
+               p := v_0.Args[0]
+               v.reset(OpOffPtr)
+               v.AuxInt = a + b
+               v.AddArg(p)
+               return true
+       }
+       // match: (OffPtr p [0])
+       // cond: v.Type.Compare(p.Type) == types.CMPeq
+       // result: p
+       for {
+               if v.AuxInt != 0 {
+                       break
+               }
+               p := v_0
+               if !(v.Type.Compare(p.Type) == types.CMPeq) {
+                       break
+               }
+               v.copyOf(p)
+               return true
+       }
+       return false
+}
+func rewriteValuegeneric_OpOr16(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       // match: (Or16 (Const16 [c]) (Const16 [d]))
+       // result: (Const16 [int64(int16(c|d))])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConst16)
+                       v.AuxInt = int64(int16(c | d))
+                       return true
+               }
+               break
+       }
+       // match: (Or16 x x)
+       // result: x
+       for {
+               x := v_0
+               if x != v_1 {
                        break
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpNeqB)
-               v.AddArg2(x, y)
-               return true
+               v.copyOf(x)
+               return true
+       }
+       // match: (Or16 (Const16 [0]) x)
+       // result: x
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst16 || v_0.AuxInt != 0 {
+                               continue
+                       }
+                       x := v_1
+                       v.copyOf(x)
+                       return true
+               }
+               break
+       }
+       // match: (Or16 (Const16 [-1]) _)
+       // result: (Const16 [-1])
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst16 || v_0.AuxInt != -1 {
+                               continue
+                       }
+                       v.reset(OpConst16)
+                       v.AuxInt = -1
+                       return true
+               }
+               break
+       }
+       // match: (Or16 x (Or16 x y))
+       // result: (Or16 x y)
+       for {
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       x := v_0
+                       if v_1.Op != OpOr16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if x != v_1_0 {
+                                       continue
+                               }
+                               y := v_1_1
+                               v.reset(OpOr16)
+                               v.AddArg2(x, y)
+                               return true
+                       }
+               }
+               break
        }
-       // match: (Not (EqPtr x y))
-       // result: (NeqPtr x y)
+       // match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
+       // cond: ^(c1 | c2) == 0
+       // result: (Or16 (Const16 <t> [c1]) x)
        for {
-               if v_0.Op != OpEqPtr {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpAnd16 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               x := v_0_0
+                               if v_0_1.Op != OpConst16 {
+                                       continue
+                               }
+                               c2 := v_0_1.AuxInt
+                               if v_1.Op != OpConst16 {
+                                       continue
+                               }
+                               t := v_1.Type
+                               c1 := v_1.AuxInt
+                               if !(^(c1 | c2) == 0) {
+                                       continue
+                               }
+                               v.reset(OpOr16)
+                               v0 := b.NewValue0(v.Pos, OpConst16, t)
+                               v0.AuxInt = c1
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpNeqPtr)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Eq64F x y))
-       // result: (Neq64F x y)
+       // match: (Or16 (Or16 i:(Const16 <t>) z) x)
+       // cond: (z.Op != OpConst16 && x.Op != OpConst16)
+       // result: (Or16 i (Or16 <t> z x))
        for {
-               if v_0.Op != OpEq64F {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOr16 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               i := v_0_0
+                               if i.Op != OpConst16 {
+                                       continue
+                               }
+                               t := i.Type
+                               z := v_0_1
+                               x := v_1
+                               if !(z.Op != OpConst16 && x.Op != OpConst16) {
+                                       continue
+                               }
+                               v.reset(OpOr16)
+                               v0 := b.NewValue0(v.Pos, OpOr16, t)
+                               v0.AddArg2(z, x)
+                               v.AddArg2(i, v0)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpNeq64F)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Eq32F x y))
-       // result: (Neq32F x y)
+       // match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
+       // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x)
        for {
-               if v_0.Op != OpEq32F {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst16 {
+                               continue
+                       }
+                       t := v_0.Type
+                       c := v_0.AuxInt
+                       if v_1.Op != OpOr16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if v_1_0.Op != OpConst16 || v_1_0.Type != t {
+                                       continue
+                               }
+                               d := v_1_0.AuxInt
+                               x := v_1_1
+                               v.reset(OpOr16)
+                               v0 := b.NewValue0(v.Pos, OpConst16, t)
+                               v0.AuxInt = int64(int16(c | d))
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpNeq32F)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Neq64 x y))
-       // result: (Eq64 x y)
+       return false
+}
+func rewriteValuegeneric_OpOr32(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       // match: (Or32 (Const32 [c]) (Const32 [d]))
+       // result: (Const32 [int64(int32(c|d))])
        for {
-               if v_0.Op != OpNeq64 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConst32)
+                       v.AuxInt = int64(int32(c | d))
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq64)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Neq32 x y))
-       // result: (Eq32 x y)
+       // match: (Or32 x x)
+       // result: x
        for {
-               if v_0.Op != OpNeq32 {
+               x := v_0
+               if x != v_1 {
                        break
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq32)
-               v.AddArg2(x, y)
+               v.copyOf(x)
                return true
        }
-       // match: (Not (Neq16 x y))
-       // result: (Eq16 x y)
+       // match: (Or32 (Const32 [0]) x)
+       // result: x
        for {
-               if v_0.Op != OpNeq16 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst32 || v_0.AuxInt != 0 {
+                               continue
+                       }
+                       x := v_1
+                       v.copyOf(x)
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq16)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Neq8 x y))
-       // result: (Eq8 x y)
+       // match: (Or32 (Const32 [-1]) _)
+       // result: (Const32 [-1])
        for {
-               if v_0.Op != OpNeq8 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst32 || v_0.AuxInt != -1 {
+                               continue
+                       }
+                       v.reset(OpConst32)
+                       v.AuxInt = -1
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq8)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (NeqB x y))
-       // result: (EqB x y)
+       // match: (Or32 x (Or32 x y))
+       // result: (Or32 x y)
        for {
-               if v_0.Op != OpNeqB {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       x := v_0
+                       if v_1.Op != OpOr32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if x != v_1_0 {
+                                       continue
+                               }
+                               y := v_1_1
+                               v.reset(OpOr32)
+                               v.AddArg2(x, y)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEqB)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (NeqPtr x y))
-       // result: (EqPtr x y)
+       // match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
+       // cond: ^(c1 | c2) == 0
+       // result: (Or32 (Const32 <t> [c1]) x)
        for {
-               if v_0.Op != OpNeqPtr {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpAnd32 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               x := v_0_0
+                               if v_0_1.Op != OpConst32 {
+                                       continue
+                               }
+                               c2 := v_0_1.AuxInt
+                               if v_1.Op != OpConst32 {
+                                       continue
+                               }
+                               t := v_1.Type
+                               c1 := v_1.AuxInt
+                               if !(^(c1 | c2) == 0) {
+                                       continue
+                               }
+                               v.reset(OpOr32)
+                               v0 := b.NewValue0(v.Pos, OpConst32, t)
+                               v0.AuxInt = c1
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEqPtr)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Neq64F x y))
-       // result: (Eq64F x y)
+       // match: (Or32 (Or32 i:(Const32 <t>) z) x)
+       // cond: (z.Op != OpConst32 && x.Op != OpConst32)
+       // result: (Or32 i (Or32 <t> z x))
        for {
-               if v_0.Op != OpNeq64F {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOr32 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               i := v_0_0
+                               if i.Op != OpConst32 {
+                                       continue
+                               }
+                               t := i.Type
+                               z := v_0_1
+                               x := v_1
+                               if !(z.Op != OpConst32 && x.Op != OpConst32) {
+                                       continue
+                               }
+                               v.reset(OpOr32)
+                               v0 := b.NewValue0(v.Pos, OpOr32, t)
+                               v0.AddArg2(z, x)
+                               v.AddArg2(i, v0)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq64F)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Neq32F x y))
-       // result: (Eq32F x y)
+       // match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
+       // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x)
        for {
-               if v_0.Op != OpNeq32F {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst32 {
+                               continue
+                       }
+                       t := v_0.Type
+                       c := v_0.AuxInt
+                       if v_1.Op != OpOr32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if v_1_0.Op != OpConst32 || v_1_0.Type != t {
+                                       continue
+                               }
+                               d := v_1_0.AuxInt
+                               x := v_1_1
+                               v.reset(OpOr32)
+                               v0 := b.NewValue0(v.Pos, OpConst32, t)
+                               v0.AuxInt = int64(int32(c | d))
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpEq32F)
-               v.AddArg2(x, y)
-               return true
+               break
        }
-       // match: (Not (Less64 x y))
-       // result: (Leq64 y x)
+       return false
+}
+func rewriteValuegeneric_OpOr64(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       // match: (Or64 (Const64 [c]) (Const64 [d]))
+       // result: (Const64 [c|d])
        for {
-               if v_0.Op != OpLess64 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConst64)
+                       v.AuxInt = c | d
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq64)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less32 x y))
-       // result: (Leq32 y x)
+       // match: (Or64 x x)
+       // result: x
        for {
-               if v_0.Op != OpLess32 {
+               x := v_0
+               if x != v_1 {
                        break
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq32)
-               v.AddArg2(y, x)
+               v.copyOf(x)
                return true
        }
-       // match: (Not (Less16 x y))
-       // result: (Leq16 y x)
+       // match: (Or64 (Const64 [0]) x)
+       // result: x
        for {
-               if v_0.Op != OpLess16 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst64 || v_0.AuxInt != 0 {
+                               continue
+                       }
+                       x := v_1
+                       v.copyOf(x)
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq16)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less8 x y))
-       // result: (Leq8 y x)
+       // match: (Or64 (Const64 [-1]) _)
+       // result: (Const64 [-1])
        for {
-               if v_0.Op != OpLess8 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst64 || v_0.AuxInt != -1 {
+                               continue
+                       }
+                       v.reset(OpConst64)
+                       v.AuxInt = -1
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq8)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less64U x y))
-       // result: (Leq64U y x)
+       // match: (Or64 x (Or64 x y))
+       // result: (Or64 x y)
        for {
-               if v_0.Op != OpLess64U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       x := v_0
+                       if v_1.Op != OpOr64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if x != v_1_0 {
+                                       continue
+                               }
+                               y := v_1_1
+                               v.reset(OpOr64)
+                               v.AddArg2(x, y)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq64U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less32U x y))
-       // result: (Leq32U y x)
+       // match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
+       // cond: ^(c1 | c2) == 0
+       // result: (Or64 (Const64 <t> [c1]) x)
        for {
-               if v_0.Op != OpLess32U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpAnd64 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               x := v_0_0
+                               if v_0_1.Op != OpConst64 {
+                                       continue
+                               }
+                               c2 := v_0_1.AuxInt
+                               if v_1.Op != OpConst64 {
+                                       continue
+                               }
+                               t := v_1.Type
+                               c1 := v_1.AuxInt
+                               if !(^(c1 | c2) == 0) {
+                                       continue
+                               }
+                               v.reset(OpOr64)
+                               v0 := b.NewValue0(v.Pos, OpConst64, t)
+                               v0.AuxInt = c1
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq32U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less16U x y))
-       // result: (Leq16U y x)
+       // match: (Or64 (Or64 i:(Const64 <t>) z) x)
+       // cond: (z.Op != OpConst64 && x.Op != OpConst64)
+       // result: (Or64 i (Or64 <t> z x))
        for {
-               if v_0.Op != OpLess16U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOr64 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               i := v_0_0
+                               if i.Op != OpConst64 {
+                                       continue
+                               }
+                               t := i.Type
+                               z := v_0_1
+                               x := v_1
+                               if !(z.Op != OpConst64 && x.Op != OpConst64) {
+                                       continue
+                               }
+                               v.reset(OpOr64)
+                               v0 := b.NewValue0(v.Pos, OpOr64, t)
+                               v0.AddArg2(z, x)
+                               v.AddArg2(i, v0)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq16U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Less8U x y))
-       // result: (Leq8U y x)
+       // match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
+       // result: (Or64 (Const64 <t> [c|d]) x)
        for {
-               if v_0.Op != OpLess8U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst64 {
+                               continue
+                       }
+                       t := v_0.Type
+                       c := v_0.AuxInt
+                       if v_1.Op != OpOr64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if v_1_0.Op != OpConst64 || v_1_0.Type != t {
+                                       continue
+                               }
+                               d := v_1_0.AuxInt
+                               x := v_1_1
+                               v.reset(OpOr64)
+                               v0 := b.NewValue0(v.Pos, OpConst64, t)
+                               v0.AuxInt = c | d
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLeq8U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq64 x y))
-       // result: (Less64 y x)
+       return false
+}
+func rewriteValuegeneric_OpOr8(v *Value) bool {
+       v_1 := v.Args[1]
+       v_0 := v.Args[0]
+       b := v.Block
+       // match: (Or8 (Const8 [c]) (Const8 [d]))
+       // result: (Const8 [int64(int8(c|d))])
        for {
-               if v_0.Op != OpLeq64 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0.AuxInt
+                       if v_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1.AuxInt
+                       v.reset(OpConst8)
+                       v.AuxInt = int64(int8(c | d))
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess64)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq32 x y))
-       // result: (Less32 y x)
+       // match: (Or8 x x)
+       // result: x
        for {
-               if v_0.Op != OpLeq32 {
+               x := v_0
+               if x != v_1 {
                        break
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess32)
-               v.AddArg2(y, x)
+               v.copyOf(x)
                return true
        }
-       // match: (Not (Leq16 x y))
-       // result: (Less16 y x)
+       // match: (Or8 (Const8 [0]) x)
+       // result: x
        for {
-               if v_0.Op != OpLeq16 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst8 || v_0.AuxInt != 0 {
+                               continue
+                       }
+                       x := v_1
+                       v.copyOf(x)
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess16)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq8 x y))
-       // result: (Less8 y x)
+       // match: (Or8 (Const8 [-1]) _)
+       // result: (Const8 [-1])
        for {
-               if v_0.Op != OpLeq8 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst8 || v_0.AuxInt != -1 {
+                               continue
+                       }
+                       v.reset(OpConst8)
+                       v.AuxInt = -1
+                       return true
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess8)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq64U x y))
-       // result: (Less64U y x)
+       // match: (Or8 x (Or8 x y))
+       // result: (Or8 x y)
        for {
-               if v_0.Op != OpLeq64U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       x := v_0
+                       if v_1.Op != OpOr8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if x != v_1_0 {
+                                       continue
+                               }
+                               y := v_1_1
+                               v.reset(OpOr8)
+                               v.AddArg2(x, y)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess64U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq32U x y))
-       // result: (Less32U y x)
+       // match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
+       // cond: ^(c1 | c2) == 0
+       // result: (Or8 (Const8 <t> [c1]) x)
        for {
-               if v_0.Op != OpLeq32U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpAnd8 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               x := v_0_0
+                               if v_0_1.Op != OpConst8 {
+                                       continue
+                               }
+                               c2 := v_0_1.AuxInt
+                               if v_1.Op != OpConst8 {
+                                       continue
+                               }
+                               t := v_1.Type
+                               c1 := v_1.AuxInt
+                               if !(^(c1 | c2) == 0) {
+                                       continue
+                               }
+                               v.reset(OpOr8)
+                               v0 := b.NewValue0(v.Pos, OpConst8, t)
+                               v0.AuxInt = c1
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess32U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq16U x y))
-       // result: (Less16U y x)
+       // match: (Or8 (Or8 i:(Const8 <t>) z) x)
+       // cond: (z.Op != OpConst8 && x.Op != OpConst8)
+       // result: (Or8 i (Or8 <t> z x))
        for {
-               if v_0.Op != OpLeq16U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpOr8 {
+                               continue
+                       }
+                       _ = v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       v_0_1 := v_0.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
+                               i := v_0_0
+                               if i.Op != OpConst8 {
+                                       continue
+                               }
+                               t := i.Type
+                               z := v_0_1
+                               x := v_1
+                               if !(z.Op != OpConst8 && x.Op != OpConst8) {
+                                       continue
+                               }
+                               v.reset(OpOr8)
+                               v0 := b.NewValue0(v.Pos, OpOr8, t)
+                               v0.AddArg2(z, x)
+                               v.AddArg2(i, v0)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess16U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
-       // match: (Not (Leq8U x y))
-       // result: (Less8U y x)
+       // match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
+       // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x)
        for {
-               if v_0.Op != OpLeq8U {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpConst8 {
+                               continue
+                       }
+                       t := v_0.Type
+                       c := v_0.AuxInt
+                       if v_1.Op != OpOr8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       v_1_0 := v_1.Args[0]
+                       v_1_1 := v_1.Args[1]
+                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
+                               if v_1_0.Op != OpConst8 || v_1_0.Type != t {
+                                       continue
+                               }
+                               d := v_1_0.AuxInt
+                               x := v_1_1
+                               v.reset(OpOr8)
+                               v0 := b.NewValue0(v.Pos, OpConst8, t)
+                               v0.AuxInt = int64(int8(c | d))
+                               v.AddArg2(v0, x)
+                               return true
+                       }
                }
-               y := v_0.Args[1]
-               x := v_0.Args[0]
-               v.reset(OpLess8U)
-               v.AddArg2(y, x)
-               return true
+               break
        }
        return false
 }
-func rewriteValuegeneric_OpOffPtr(v *Value) bool {
+func rewriteValuegeneric_OpOrB(v *Value) bool {
+       v_1 := v.Args[1]
        v_0 := v.Args[0]
-       // match: (OffPtr (OffPtr p [b]) [a])
-       // result: (OffPtr p [a+b])
+       b := v.Block
+       // match: (OrB (Less64 (Const64 [c]) x) (Less64 x (Const64 [d])))
+       // cond: c >= d
+       // result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
        for {
-               a := v.AuxInt
-               if v_0.Op != OpOffPtr {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
-               b := v_0.AuxInt
-               p := v_0.Args[0]
-               v.reset(OpOffPtr)
-               v.AuxInt = a + b
-               v.AddArg(p)
-               return true
+               break
        }
-       // match: (OffPtr p [0])
-       // cond: v.Type.Compare(p.Type) == types.CMPeq
-       // result: p
+       // match: (OrB (Leq64 (Const64 [c]) x) (Less64 x (Const64 [d])))
+       // cond: c >= d
+       // result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
        for {
-               if v.AuxInt != 0 {
-                       break
-               }
-               p := v_0
-               if !(v.Type.Compare(p.Type) == types.CMPeq) {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq64 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
-               v.copyOf(p)
-               return true
+               break
        }
-       return false
-}
-func rewriteValuegeneric_OpOr16(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       // match: (Or16 (Const16 [c]) (Const16 [d]))
-       // result: (Const16 [int64(int16(c|d))])
+       // match: (OrB (Less32 (Const32 [c]) x) (Less32 x (Const32 [d])))
+       // cond: c >= d
+       // result: (Less32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst16 {
+                       if v_0.Op != OpLess32 {
                                continue
                        }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst16 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConst16)
-                       v.AuxInt = int64(int16(c | d))
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or16 x x)
-       // result: x
+       // match: (OrB (Leq32 (Const32 [c]) x) (Less32 x (Const32 [d])))
+       // cond: c >= d
+       // result: (Leq32U (Const32 <x.Type> [c-d]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
        for {
-               x := v_0
-               if x != v_1 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLeq32 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
-               v.copyOf(x)
-               return true
+               break
        }
-       // match: (Or16 (Const16 [0]) x)
-       // result: x
+       // match: (OrB (Less16 (Const16 [c]) x) (Less16 x (Const16 [d])))
+       // cond: c >= d
+       // result: (Less16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst16 || v_0.AuxInt != 0 {
+                       if v_0.Op != OpLess16 {
                                continue
                        }
-                       x := v_1
-                       v.copyOf(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or16 (Const16 [-1]) _)
-       // result: (Const16 [-1])
+       // match: (OrB (Leq16 (Const16 [c]) x) (Less16 x (Const16 [d])))
+       // cond: c >= d
+       // result: (Leq16U (Const16 <x.Type> [c-d]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst16 || v_0.AuxInt != -1 {
+                       if v_0.Op != OpLeq16 {
                                continue
                        }
-                       v.reset(OpConst16)
-                       v.AuxInt = -1
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or16 x (Or16 x y))
-       // result: (Or16 x y)
+       // match: (OrB (Less8 (Const8 [c]) x) (Less8 x (Const8 [d])))
+       // cond: c >= d
+       // result: (Less8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       x := v_0
-                       if v_1.Op != OpOr16 {
+                       if v_0.Op != OpLess8 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8 {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if x != v_1_0 {
-                                       continue
-                               }
-                               y := v_1_1
-                               v.reset(OpOr16)
-                               v.AddArg2(x, y)
-                               return true
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
                        }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or16 (And16 x (Const16 [c2])) (Const16 <t> [c1]))
-       // cond: ^(c1 | c2) == 0
-       // result: (Or16 (Const16 <t> [c1]) x)
+       // match: (OrB (Leq8 (Const8 [c]) x) (Less8 x (Const8 [d])))
+       // cond: c >= d
+       // result: (Leq8U (Const8 <x.Type> [c-d]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpAnd16 {
+                       if v_0.Op != OpLeq8 {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               x := v_0_0
-                               if v_0_1.Op != OpConst16 {
-                                       continue
-                               }
-                               c2 := v_0_1.AuxInt
-                               if v_1.Op != OpConst16 {
-                                       continue
-                               }
-                               t := v_1.Type
-                               c1 := v_1.AuxInt
-                               if !(^(c1 | c2) == 0) {
-                                       continue
-                               }
-                               v.reset(OpOr16)
-                               v0 := b.NewValue0(v.Pos, OpConst16, t)
-                               v0.AuxInt = c1
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
                        }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or16 (Or16 i:(Const16 <t>) z) x)
-       // cond: (z.Op != OpConst16 && x.Op != OpConst16)
-       // result: (Or16 i (Or16 <t> z x))
+       // match: (OrB (Less64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
+       // cond: c >= d+1 && int64(d+1) > int64(d)
+       // result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOr16 {
+                       if v_0.Op != OpLess64 {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               i := v_0_0
-                               if i.Op != OpConst16 {
-                                       continue
-                               }
-                               t := i.Type
-                               z := v_0_1
-                               x := v_1
-                               if !(z.Op != OpConst16 && x.Op != OpConst16) {
-                                       continue
-                               }
-                               v.reset(OpOr16)
-                               v0 := b.NewValue0(v.Pos, OpOr16, t)
-                               v0.AddArg2(z, x)
-                               v.AddArg2(i, v0)
-                               return true
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int64(d+1) > int64(d)) {
+                               continue
                        }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or16 (Const16 <t> [c]) (Or16 (Const16 <t> [d]) x))
-       // result: (Or16 (Const16 <t> [int64(int16(c|d))]) x)
+       // match: (OrB (Leq64 (Const64 [c]) x) (Leq64 x (Const64 [d])))
+       // cond: c >= d+1 && int64(d+1) > int64(d)
+       // result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst16 {
+                       if v_0.Op != OpLeq64 {
                                continue
                        }
-                       t := v_0.Type
-                       c := v_0.AuxInt
-                       if v_1.Op != OpOr16 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64 {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if v_1_0.Op != OpConst16 || v_1_0.Type != t {
-                                       continue
-                               }
-                               d := v_1_0.AuxInt
-                               x := v_1_1
-                               v.reset(OpOr16)
-                               v0 := b.NewValue0(v.Pos, OpConst16, t)
-                               v0.AuxInt = int64(int16(c | d))
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int64(d+1) > int64(d)) {
+                               continue
                        }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       return false
-}
-func rewriteValuegeneric_OpOr32(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       // match: (Or32 (Const32 [c]) (Const32 [d]))
-       // result: (Const32 [int64(int32(c|d))])
+       // match: (OrB (Less32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
+       // cond: c >= d+1 && int32(d+1) > int32(d)
+       // result: (Less32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 {
+                       if v_0.Op != OpLess32 {
                                continue
                        }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst32 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConst32)
-                       v.AuxInt = int64(int32(c | d))
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int32(d+1) > int32(d)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or32 x x)
-       // result: x
-       for {
-               x := v_0
-               if x != v_1 {
-                       break
-               }
-               v.copyOf(x)
-               return true
-       }
-       // match: (Or32 (Const32 [0]) x)
-       // result: x
+       // match: (OrB (Leq32 (Const32 [c]) x) (Leq32 x (Const32 [d])))
+       // cond: c >= d+1 && int32(d+1) > int32(d)
+       // result: (Leq32U (Const32 <x.Type> [c-d-1]) (Sub32 <x.Type> x (Const32 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 || v_0.AuxInt != 0 {
+                       if v_0.Op != OpLeq32 {
                                continue
                        }
-                       x := v_1
-                       v.copyOf(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int32(d+1) > int32(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or32 (Const32 [-1]) _)
-       // result: (Const32 [-1])
+       // match: (OrB (Less16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
+       // cond: c >= d+1 && int16(d+1) > int16(d)
+       // result: (Less16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 || v_0.AuxInt != -1 {
+                       if v_0.Op != OpLess16 {
                                continue
                        }
-                       v.reset(OpConst32)
-                       v.AuxInt = -1
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int16(d+1) > int16(d)) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or32 x (Or32 x y))
-       // result: (Or32 x y)
+       // match: (OrB (Leq16 (Const16 [c]) x) (Leq16 x (Const16 [d])))
+       // cond: c >= d+1 && int16(d+1) > int16(d)
+       // result: (Leq16U (Const16 <x.Type> [c-d-1]) (Sub16 <x.Type> x (Const16 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       x := v_0
-                       if v_1.Op != OpOr32 {
+                       if v_0.Op != OpLeq16 {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16 {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if x != v_1_0 {
-                                       continue
-                               }
-                               y := v_1_1
-                               v.reset(OpOr32)
-                               v.AddArg2(x, y)
-                               return true
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int16(d+1) > int16(d)) {
+                               continue
                        }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or32 (And32 x (Const32 [c2])) (Const32 <t> [c1]))
-       // cond: ^(c1 | c2) == 0
-       // result: (Or32 (Const32 <t> [c1]) x)
+       // match: (OrB (Less8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
+       // cond: c >= d+1 && int8(d+1) > int8(d)
+       // result: (Less8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpAnd32 {
+                       if v_0.Op != OpLess8 {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               x := v_0_0
-                               if v_0_1.Op != OpConst32 {
-                                       continue
-                               }
-                               c2 := v_0_1.AuxInt
-                               if v_1.Op != OpConst32 {
-                                       continue
-                               }
-                               t := v_1.Type
-                               c1 := v_1.AuxInt
-                               if !(^(c1 | c2) == 0) {
-                                       continue
-                               }
-                               v.reset(OpOr32)
-                               v0 := b.NewValue0(v.Pos, OpConst32, t)
-                               v0.AuxInt = c1
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int8(d+1) > int8(d)) {
+                               continue
                        }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or32 (Or32 i:(Const32 <t>) z) x)
-       // cond: (z.Op != OpConst32 && x.Op != OpConst32)
-       // result: (Or32 i (Or32 <t> z x))
+       // match: (OrB (Leq8 (Const8 [c]) x) (Leq8 x (Const8 [d])))
+       // cond: c >= d+1 && int8(d+1) > int8(d)
+       // result: (Leq8U (Const8 <x.Type> [c-d-1]) (Sub8 <x.Type> x (Const8 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOr32 {
+                       if v_0.Op != OpLeq8 {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               i := v_0_0
-                               if i.Op != OpConst32 {
-                                       continue
-                               }
-                               t := i.Type
-                               z := v_0_1
-                               x := v_1
-                               if !(z.Op != OpConst32 && x.Op != OpConst32) {
-                                       continue
-                               }
-                               v.reset(OpOr32)
-                               v0 := b.NewValue0(v.Pos, OpOr32, t)
-                               v0.AddArg2(z, x)
-                               v.AddArg2(i, v0)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8 {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
                        }
+                       d := v_1_1.AuxInt
+                       if !(c >= d+1 && int8(d+1) > int8(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or32 (Const32 <t> [c]) (Or32 (Const32 <t> [d]) x))
-       // result: (Or32 (Const32 <t> [int64(int32(c|d))]) x)
+       // match: (OrB (Less64U (Const64 [c]) x) (Less64U x (Const64 [d])))
+       // cond: uint64(c) >= uint64(d)
+       // result: (Less64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst32 {
+                       if v_0.Op != OpLess64U {
                                continue
                        }
-                       t := v_0.Type
-                       c := v_0.AuxInt
-                       if v_1.Op != OpOr32 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64U {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if v_1_0.Op != OpConst32 || v_1_0.Type != t {
-                                       continue
-                               }
-                               d := v_1_0.AuxInt
-                               x := v_1_1
-                               v.reset(OpOr32)
-                               v0 := b.NewValue0(v.Pos, OpConst32, t)
-                               v0.AuxInt = int64(int32(c | d))
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(c) >= uint64(d)) {
+                               continue
                        }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       return false
-}
-func rewriteValuegeneric_OpOr64(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       // match: (Or64 (Const64 [c]) (Const64 [d]))
-       // result: (Const64 [c|d])
+       // match: (OrB (Leq64U (Const64 [c]) x) (Less64U x (Const64 [d])))
+       // cond: uint64(c) >= uint64(d)
+       // result: (Leq64U (Const64 <x.Type> [c-d]) (Sub64 <x.Type> x (Const64 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 {
+                       if v_0.Op != OpLeq64U {
                                continue
                        }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst64 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConst64)
-                       v.AuxInt = c | d
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(c) >= uint64(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or64 x x)
-       // result: x
+       // match: (OrB (Less32U (Const32 [c]) x) (Less32U x (Const32 [d])))
+       // cond: uint32(c) >= uint32(d)
+       // result: (Less32U (Const32 <x.Type> [int64(int32(c-d))]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
        for {
-               x := v_0
-               if x != v_1 {
-                       break
+               for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+                       if v_0.Op != OpLess32U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(c) >= uint32(d)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = int64(int32(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
-               v.copyOf(x)
-               return true
+               break
        }
-       // match: (Or64 (Const64 [0]) x)
-       // result: x
+       // match: (OrB (Leq32U (Const32 [c]) x) (Less32U x (Const32 [d])))
+       // cond: uint32(c) >= uint32(d)
+       // result: (Leq32U (Const32 <x.Type> [int64(int32(c-d))]) (Sub32 <x.Type> x (Const32 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 || v_0.AuxInt != 0 {
+                       if v_0.Op != OpLeq32U {
                                continue
                        }
-                       x := v_1
-                       v.copyOf(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(c) >= uint32(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = int64(int32(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or64 (Const64 [-1]) _)
-       // result: (Const64 [-1])
+       // match: (OrB (Less16U (Const16 [c]) x) (Less16U x (Const16 [d])))
+       // cond: uint16(c) >= uint16(d)
+       // result: (Less16U (Const16 <x.Type> [int64(int16(c-d))]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 || v_0.AuxInt != -1 {
+                       if v_0.Op != OpLess16U {
                                continue
                        }
-                       v.reset(OpConst64)
-                       v.AuxInt = -1
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(c) >= uint16(d)) {
+                               continue
+                       }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = int64(int16(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or64 x (Or64 x y))
-       // result: (Or64 x y)
+       // match: (OrB (Leq16U (Const16 [c]) x) (Less16U x (Const16 [d])))
+       // cond: uint16(c) >= uint16(d)
+       // result: (Leq16U (Const16 <x.Type> [int64(int16(c-d))]) (Sub16 <x.Type> x (Const16 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       x := v_0
-                       if v_1.Op != OpOr64 {
+                       if v_0.Op != OpLeq16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess16U {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if x != v_1_0 {
-                                       continue
-                               }
-                               y := v_1_1
-                               v.reset(OpOr64)
-                               v.AddArg2(x, y)
-                               return true
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(c) >= uint16(d)) {
+                               continue
                        }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = int64(int16(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or64 (And64 x (Const64 [c2])) (Const64 <t> [c1]))
-       // cond: ^(c1 | c2) == 0
-       // result: (Or64 (Const64 <t> [c1]) x)
+       // match: (OrB (Less8U (Const8 [c]) x) (Less8U x (Const8 [d])))
+       // cond: uint8(c) >= uint8(d)
+       // result: (Less8U (Const8 <x.Type> [int64( int8(c-d))]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpAnd64 {
+                       if v_0.Op != OpLess8U {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               x := v_0_0
-                               if v_0_1.Op != OpConst64 {
-                                       continue
-                               }
-                               c2 := v_0_1.AuxInt
-                               if v_1.Op != OpConst64 {
-                                       continue
-                               }
-                               t := v_1.Type
-                               c1 := v_1.AuxInt
-                               if !(^(c1 | c2) == 0) {
-                                       continue
-                               }
-                               v.reset(OpOr64)
-                               v0 := b.NewValue0(v.Pos, OpConst64, t)
-                               v0.AuxInt = c1
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
                        }
+                       d := v_1_1.AuxInt
+                       if !(uint8(c) >= uint8(d)) {
+                               continue
+                       }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = int64(int8(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or64 (Or64 i:(Const64 <t>) z) x)
-       // cond: (z.Op != OpConst64 && x.Op != OpConst64)
-       // result: (Or64 i (Or64 <t> z x))
+       // match: (OrB (Leq8U (Const8 [c]) x) (Less8U x (Const8 [d])))
+       // cond: uint8(c) >= uint8(d)
+       // result: (Leq8U (Const8 <x.Type> [int64( int8(c-d))]) (Sub8 <x.Type> x (Const8 <x.Type> [d])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOr64 {
+                       if v_0.Op != OpLeq8U {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               i := v_0_0
-                               if i.Op != OpConst64 {
-                                       continue
-                               }
-                               t := i.Type
-                               z := v_0_1
-                               x := v_1
-                               if !(z.Op != OpConst64 && x.Op != OpConst64) {
-                                       continue
-                               }
-                               v.reset(OpOr64)
-                               v0 := b.NewValue0(v.Pos, OpOr64, t)
-                               v0.AddArg2(z, x)
-                               v.AddArg2(i, v0)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLess8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(c) >= uint8(d)) {
+                               continue
                        }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = int64(int8(c - d))
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = d
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or64 (Const64 <t> [c]) (Or64 (Const64 <t> [d]) x))
-       // result: (Or64 (Const64 <t> [c|d]) x)
+       // match: (OrB (Less64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
+       // cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
+       // result: (Less64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst64 {
+                       if v_0.Op != OpLess64U {
                                continue
                        }
-                       t := v_0.Type
-                       c := v_0.AuxInt
-                       if v_1.Op != OpOr64 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64U {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if v_1_0.Op != OpConst64 || v_1_0.Type != t {
-                                       continue
-                               }
-                               d := v_1_0.AuxInt
-                               x := v_1_1
-                               v.reset(OpOr64)
-                               v0 := b.NewValue0(v.Pos, OpConst64, t)
-                               v0.AuxInt = c | d
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
+                               continue
                        }
+                       v.reset(OpLess64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       return false
-}
-func rewriteValuegeneric_OpOr8(v *Value) bool {
-       v_1 := v.Args[1]
-       v_0 := v.Args[0]
-       b := v.Block
-       // match: (Or8 (Const8 [c]) (Const8 [d]))
-       // result: (Const8 [int64(int8(c|d))])
+       // match: (OrB (Leq64U (Const64 [c]) x) (Leq64U x (Const64 [d])))
+       // cond: uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)
+       // result: (Leq64U (Const64 <x.Type> [c-d-1]) (Sub64 <x.Type> x (Const64 <x.Type> [d+1])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst8 {
+                       if v_0.Op != OpLeq64U {
                                continue
                        }
-                       c := v_0.AuxInt
-                       if v_1.Op != OpConst8 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst64 {
                                continue
                        }
-                       d := v_1.AuxInt
-                       v.reset(OpConst8)
-                       v.AuxInt = int64(int8(c | d))
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq64U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst64 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint64(c) >= uint64(d+1) && uint64(d+1) > uint64(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq64U)
+                       v0 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v0.AuxInt = c - d - 1
+                       v1 := b.NewValue0(v.Pos, OpSub64, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst64, x.Type)
+                       v2.AuxInt = d + 1
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or8 x x)
-       // result: x
-       for {
-               x := v_0
-               if x != v_1 {
-                       break
-               }
-               v.copyOf(x)
-               return true
-       }
-       // match: (Or8 (Const8 [0]) x)
-       // result: x
+       // match: (OrB (Less32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
+       // cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
+       // result: (Less32U (Const32 <x.Type> [int64(int32(c-d-1))]) (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst8 || v_0.AuxInt != 0 {
+                       if v_0.Op != OpLess32U {
                                continue
                        }
-                       x := v_1
-                       v.copyOf(x)
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
+                               continue
+                       }
+                       v.reset(OpLess32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = int64(int32(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or8 (Const8 [-1]) _)
-       // result: (Const8 [-1])
+       // match: (OrB (Leq32U (Const32 [c]) x) (Leq32U x (Const32 [d])))
+       // cond: uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)
+       // result: (Leq32U (Const32 <x.Type> [int64(int32(c-d-1))]) (Sub32 <x.Type> x (Const32 <x.Type> [int64(int32(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst8 || v_0.AuxInt != -1 {
+                       if v_0.Op != OpLeq32U {
                                continue
                        }
-                       v.reset(OpConst8)
-                       v.AuxInt = -1
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst32 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq32U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst32 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint32(c) >= uint32(d+1) && uint32(d+1) > uint32(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq32U)
+                       v0 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v0.AuxInt = int64(int32(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub32, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst32, x.Type)
+                       v2.AuxInt = int64(int32(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
                        return true
                }
                break
        }
-       // match: (Or8 x (Or8 x y))
-       // result: (Or8 x y)
+       // match: (OrB (Less16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
+       // cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
+       // result: (Less16U (Const16 <x.Type> [int64(int16(c-d-1))]) (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       x := v_0
-                       if v_1.Op != OpOr8 {
+                       if v_0.Op != OpLess16U {
+                               continue
+                       }
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16U {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if x != v_1_0 {
-                                       continue
-                               }
-                               y := v_1_1
-                               v.reset(OpOr8)
-                               v.AddArg2(x, y)
-                               return true
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
+                               continue
                        }
+                       v.reset(OpLess16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = int64(int16(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or8 (And8 x (Const8 [c2])) (Const8 <t> [c1]))
-       // cond: ^(c1 | c2) == 0
-       // result: (Or8 (Const8 <t> [c1]) x)
+       // match: (OrB (Leq16U (Const16 [c]) x) (Leq16U x (Const16 [d])))
+       // cond: uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)
+       // result: (Leq16U (Const16 <x.Type> [int64(int16(c-d-1))]) (Sub16 <x.Type> x (Const16 <x.Type> [int64(int16(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpAnd8 {
+                       if v_0.Op != OpLeq16U {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               x := v_0_0
-                               if v_0_1.Op != OpConst8 {
-                                       continue
-                               }
-                               c2 := v_0_1.AuxInt
-                               if v_1.Op != OpConst8 {
-                                       continue
-                               }
-                               t := v_1.Type
-                               c1 := v_1.AuxInt
-                               if !(^(c1 | c2) == 0) {
-                                       continue
-                               }
-                               v.reset(OpOr8)
-                               v0 := b.NewValue0(v.Pos, OpConst8, t)
-                               v0.AuxInt = c1
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_0_0.Op != OpConst16 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq16U {
+                               continue
                        }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst16 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint16(c) >= uint16(d+1) && uint16(d+1) > uint16(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq16U)
+                       v0 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v0.AuxInt = int64(int16(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub16, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst16, x.Type)
+                       v2.AuxInt = int64(int16(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or8 (Or8 i:(Const8 <t>) z) x)
-       // cond: (z.Op != OpConst8 && x.Op != OpConst8)
-       // result: (Or8 i (Or8 <t> z x))
+       // match: (OrB (Less8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
+       // cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
+       // result: (Less8U (Const8 <x.Type> [int64( int8(c-d-1))]) (Sub8 <x.Type> x (Const8 <x.Type> [int64( int8(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpOr8 {
+                       if v_0.Op != OpLess8U {
                                continue
                        }
-                       = v_0.Args[1]
+                       x := v_0.Args[1]
                        v_0_0 := v_0.Args[0]
-                       v_0_1 := v_0.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_0_0, v_0_1 = _i1+1, v_0_1, v_0_0 {
-                               i := v_0_0
-                               if i.Op != OpConst8 {
-                                       continue
-                               }
-                               t := i.Type
-                               z := v_0_1
-                               x := v_1
-                               if !(z.Op != OpConst8 && x.Op != OpConst8) {
-                                       continue
-                               }
-                               v.reset(OpOr8)
-                               v0 := b.NewValue0(v.Pos, OpOr8, t)
-                               v0.AddArg2(z, x)
-                               v.AddArg2(i, v0)
-                               return true
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8U {
+                               continue
+                       }
+                       _ = v_1.Args[1]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
+                       v_1_1 := v_1.Args[1]
+                       if v_1_1.Op != OpConst8 {
+                               continue
+                       }
+                       d := v_1_1.AuxInt
+                       if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
+                               continue
                        }
+                       v.reset(OpLess8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = int64(int8(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
-       // match: (Or8 (Const8 <t> [c]) (Or8 (Const8 <t> [d]) x))
-       // result: (Or8 (Const8 <t> [int64(int8(c|d))]) x)
+       // match: (OrB (Leq8U (Const8 [c]) x) (Leq8U x (Const8 [d])))
+       // cond: uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)
+       // result: (Leq8U (Const8 <x.Type> [int64( int8(c-d-1))]) (Sub8 <x.Type> x (Const8 <x.Type> [int64( int8(d+1))])))
        for {
                for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
-                       if v_0.Op != OpConst8 {
+                       if v_0.Op != OpLeq8U {
                                continue
                        }
-                       t := v_0.Type
-                       c := v_0.AuxInt
-                       if v_1.Op != OpOr8 {
+                       x := v_0.Args[1]
+                       v_0_0 := v_0.Args[0]
+                       if v_0_0.Op != OpConst8 {
+                               continue
+                       }
+                       c := v_0_0.AuxInt
+                       if v_1.Op != OpLeq8U {
                                continue
                        }
                        _ = v_1.Args[1]
-                       v_1_0 := v_1.Args[0]
+                       if x != v_1.Args[0] {
+                               continue
+                       }
                        v_1_1 := v_1.Args[1]
-                       for _i1 := 0; _i1 <= 1; _i1, v_1_0, v_1_1 = _i1+1, v_1_1, v_1_0 {
-                               if v_1_0.Op != OpConst8 || v_1_0.Type != t {
-                                       continue
-                               }
-                               d := v_1_0.AuxInt
-                               x := v_1_1
-                               v.reset(OpOr8)
-                               v0 := b.NewValue0(v.Pos, OpConst8, t)
-                               v0.AuxInt = int64(int8(c | d))
-                               v.AddArg2(v0, x)
-                               return true
+                       if v_1_1.Op != OpConst8 {
+                               continue
                        }
+                       d := v_1_1.AuxInt
+                       if !(uint8(c) >= uint8(d+1) && uint8(d+1) > uint8(d)) {
+                               continue
+                       }
+                       v.reset(OpLeq8U)
+                       v0 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v0.AuxInt = int64(int8(c - d - 1))
+                       v1 := b.NewValue0(v.Pos, OpSub8, x.Type)
+                       v2 := b.NewValue0(v.Pos, OpConst8, x.Type)
+                       v2.AuxInt = int64(int8(d + 1))
+                       v1.AddArg2(x, v2)
+                       v.AddArg2(v0, v1)
+                       return true
                }
                break
        }
diff --git a/test/codegen/fuse.go b/test/codegen/fuse.go
new file mode 100644 (file)
index 0000000..79dd337
--- /dev/null
@@ -0,0 +1,197 @@
+// asmcheck
+
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package codegen
+
+// Notes:
+// - these examples use channels to provide a source of
+//   unknown values that cannot be optimized away
+// - these examples use for loops to force branches
+//   backward (predicted taken)
+
+// ---------------------------------- //
+// signed integer range (conjunction) //
+// ---------------------------------- //
+
+func si1c(c <-chan int64) {
+       // amd64:"CMPQ\t.+, [$]256"
+       // s390x:"CLGIJ\t[$]12, R[0-9]+, [$]255"
+       for x := <-c; x >= 0 && x < 256; x = <-c {
+       }
+}
+
+func si2c(c <-chan int32) {
+       // amd64:"CMPL\t.+, [$]256"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]255"
+       for x := <-c; x >= 0 && x < 256; x = <-c {
+       }
+}
+
+func si3c(c <-chan int16) {
+       // amd64:"CMPW\t.+, [$]256"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]255"
+       for x := <-c; x >= 0 && x < 256; x = <-c {
+       }
+}
+
+func si4c(c <-chan int8) {
+       // amd64:"CMPB\t.+, [$]10"
+       // s390x:"CLIJ\t[$]4, R[0-9]+, [$]10"
+       for x := <-c; x >= 0 && x < 10; x = <-c {
+       }
+}
+
+func si5c(c <-chan int64) {
+       // amd64:"CMPQ\t.+, [$]251","ADDQ\t[$]-5,"
+       // s390x:"CLGIJ\t[$]4, R[0-9]+, [$]251","ADD\t[$]-5,"
+       for x := <-c; x < 256 && x > 4; x = <-c {
+       }
+}
+
+func si6c(c <-chan int32) {
+       // amd64:"CMPL\t.+, [$]255","DECL\t"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]255","ADDW\t[$]-1,"
+       for x := <-c; x > 0 && x <= 256; x = <-c {
+       }
+}
+
+func si7c(c <-chan int16) {
+       // amd64:"CMPW\t.+, [$]60","ADDL\t[$]10,"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]60","ADDW\t[$]10,"
+       for x := <-c; x >= -10 && x <= 50; x = <-c {
+       }
+}
+
+func si8c(c <-chan int8) {
+       // amd64:"CMPB\t.+, [$]126","ADDL\t[$]126,"
+       // s390x:"CLIJ\t[$]4, R[0-9]+, [$]126","ADDW\t[$]126,"
+       for x := <-c; x >= -126 && x < 0; x = <-c {
+       }
+}
+
+// ---------------------------------- //
+// signed integer range (disjunction) //
+// ---------------------------------- //
+
+func si1d(c <-chan int64) {
+       // amd64:"CMPQ\t.+, [$]256"
+       // s390x:"CLGIJ\t[$]2, R[0-9]+, [$]255"
+       for x := <-c; x < 0 || x >= 256; x = <-c {
+       }
+}
+
+func si2d(c <-chan int32) {
+       // amd64:"CMPL\t.+, [$]256"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]255"
+       for x := <-c; x < 0 || x >= 256; x = <-c {
+       }
+}
+
+func si3d(c <-chan int16) {
+       // amd64:"CMPW\t.+, [$]256"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]255"
+       for x := <-c; x < 0 || x >= 256; x = <-c {
+       }
+}
+
+func si4d(c <-chan int8) {
+       // amd64:"CMPB\t.+, [$]10"
+       // s390x:"CLIJ\t[$]10, R[0-9]+, [$]10"
+       for x := <-c; x < 0 || x >= 10; x = <-c {
+       }
+}
+
+func si5d(c <-chan int64) {
+       // amd64:"CMPQ\t.+, [$]251","ADDQ\t[$]-5,"
+       // s390x:"CLGIJ\t[$]10, R[0-9]+, [$]251","ADD\t[$]-5,"
+       for x := <-c; x >= 256 || x <= 4; x = <-c {
+       }
+}
+
+func si6d(c <-chan int32) {
+       // amd64:"CMPL\t.+, [$]255","DECL\t"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]255","ADDW\t[$]-1,"
+       for x := <-c; x <= 0 || x > 256; x = <-c {
+       }
+}
+
+func si7d(c <-chan int16) {
+       // amd64:"CMPW\t.+, [$]60","ADDL\t[$]10,"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]60","ADDW\t[$]10,"
+       for x := <-c; x < -10 || x > 50; x = <-c {
+       }
+}
+
+func si8d(c <-chan int8) {
+       // amd64:"CMPB\t.+, [$]126","ADDL\t[$]126,"
+       // s390x:"CLIJ\t[$]10, R[0-9]+, [$]126","ADDW\t[$]126,"
+       for x := <-c; x < -126 || x >= 0; x = <-c {
+       }
+}
+
+// ------------------------------------ //
+// unsigned integer range (conjunction) //
+// ------------------------------------ //
+
+func ui1c(c <-chan uint64) {
+       // amd64:"CMPQ\t.+, [$]251","ADDQ\t[$]-5,"
+       // s390x:"CLGIJ\t[$]4, R[0-9]+, [$]251","ADD\t[$]-5,"
+       for x := <-c; x < 256 && x > 4; x = <-c {
+       }
+}
+
+func ui2c(c <-chan uint32) {
+       // amd64:"CMPL\t.+, [$]255","DECL\t"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]255","ADDW\t[$]-1,"
+       for x := <-c; x > 0 && x <= 256; x = <-c {
+       }
+}
+
+func ui3c(c <-chan uint16) {
+       // amd64:"CMPW\t.+, [$]40","ADDL\t[$]-10,"
+       // s390x:"CLIJ\t[$]12, R[0-9]+, [$]40","ADDW\t[$]-10,"
+       for x := <-c; x >= 10 && x <= 50; x = <-c {
+       }
+}
+
+func ui4c(c <-chan uint8) {
+       // amd64:"CMPB\t.+, [$]2","ADDL\t[$]-126,"
+       // s390x:"CLIJ\t[$]4, R[0-9]+, [$]2","ADDW\t[$]-126,"
+       for x := <-c; x >= 126 && x < 128; x = <-c {
+       }
+}
+
+// ------------------------------------ //
+// unsigned integer range (disjunction) //
+// ------------------------------------ //
+
+func ui1d(c <-chan uint64) {
+       // amd64:"CMPQ\t.+, [$]251","ADDQ\t[$]-5,"
+       // s390x:"CLGIJ\t[$]10, R[0-9]+, [$]251","ADD\t[$]-5,"
+       for x := <-c; x >= 256 || x <= 4; x = <-c {
+       }
+}
+
+func ui2d(c <-chan uint32) {
+       // amd64:"CMPL\t.+, [$]254","ADDL\t[$]-2,"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]254","ADDW\t[$]-2,"
+       for x := <-c; x <= 1 || x > 256; x = <-c {
+       }
+}
+
+func ui3d(c <-chan uint16) {
+       // amd64:"CMPW\t.+, [$]40","ADDL\t[$]-10,"
+       // s390x:"CLIJ\t[$]2, R[0-9]+, [$]40","ADDW\t[$]-10,"
+       for x := <-c; x < 10 || x > 50; x = <-c {
+       }
+}
+
+func ui4d(c <-chan uint8) {
+       // amd64:"CMPB\t.+, [$]2","ADDL\t[$]-126,"
+       // s390x:"CLIJ\t[$]10, R[0-9]+, [$]2","ADDW\t[$]-126,"
+       for x := <-c; x < 126 || x >= 128; x = <-c {
+       }
+}