]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/compile: add wasm architecture
authorRichard Musiol <mail@richard-musiol.de>
Wed, 28 Mar 2018 22:55:53 +0000 (00:55 +0200)
committerBrad Fitzpatrick <bradfitz@golang.org>
Fri, 4 May 2018 17:56:12 +0000 (17:56 +0000)
This commit adds the wasm architecture to the compile command.
A later commit will contain the corresponding linker changes.

Design doc: https://docs.google.com/document/d/131vjr4DH6JFnb-blm_uRdaC0_Nv3OUwjEY5qVCxCup4

The following files are generated:
- src/cmd/compile/internal/ssa/opGen.go
- src/cmd/compile/internal/ssa/rewriteWasm.go
- src/cmd/internal/obj/wasm/anames.go

Updates #18892

Change-Id: Ifb4a96a3e427aac2362a1c97967d5667450fba3b
Reviewed-on: https://go-review.googlesource.com/103295
Run-TryBot: Brad Fitzpatrick <bradfitz@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Cherry Zhang <cherryyz@google.com>
25 files changed:
src/cmd/asm/internal/arch/arch.go
src/cmd/asm/internal/asm/asm.go
src/cmd/compile/internal/gc/go.go
src/cmd/compile/internal/gc/main.go
src/cmd/compile/internal/gc/ssa.go
src/cmd/compile/internal/ssa/config.go
src/cmd/compile/internal/ssa/gen/WASM.rules [new file with mode: 0644]
src/cmd/compile/internal/ssa/gen/WASMOps.go [new file with mode: 0644]
src/cmd/compile/internal/ssa/opGen.go
src/cmd/compile/internal/ssa/rewriteWasm.go [new file with mode: 0644]
src/cmd/compile/internal/ssa/schedule.go
src/cmd/compile/internal/ssa/tighten.go
src/cmd/compile/internal/wasm/ssa.go [new file with mode: 0644]
src/cmd/compile/main.go
src/cmd/dist/buildtool.go
src/cmd/go/internal/work/gc.go
src/cmd/internal/obj/link.go
src/cmd/internal/obj/stringer.go
src/cmd/internal/obj/util.go
src/cmd/internal/obj/wasm/a.out.go [new file with mode: 0644]
src/cmd/internal/obj/wasm/anames.go [new file with mode: 0644]
src/cmd/internal/obj/wasm/wasmobj.go [new file with mode: 0644]
src/cmd/internal/objabi/head.go
src/cmd/internal/objabi/reloctype.go
src/cmd/internal/sys/arch.go

index 357ec757bc16136916a1b4d179be6329b2422662..0e4d63744b3c383361a6e4affecf441dc0fad803 100644 (file)
@@ -11,6 +11,7 @@ import (
        "cmd/internal/obj/mips"
        "cmd/internal/obj/ppc64"
        "cmd/internal/obj/s390x"
+       "cmd/internal/obj/wasm"
        "cmd/internal/obj/x86"
        "fmt"
        "strings"
@@ -87,6 +88,8 @@ func Set(GOARCH string) *Arch {
                a := archS390x()
                a.LinkArch = &s390x.Links390x
                return a
+       case "wasm":
+               return archWasm()
        }
        return nil
 }
@@ -95,6 +98,10 @@ func jumpX86(word string) bool {
        return word[0] == 'J' || word == "CALL" || strings.HasPrefix(word, "LOOP") || word == "XBEGIN"
 }
 
+func jumpWasm(word string) bool {
+       return word == "JMP" || word == "CALL" || word == "Call" || word == "Br" || word == "BrIf"
+}
+
 func archX86(linkArch *obj.LinkArch) *Arch {
        register := make(map[string]int16)
        // Create maps for easy lookup of instruction names etc.
@@ -577,3 +584,24 @@ func archS390x() *Arch {
                IsJump:         jumpS390x,
        }
 }
+
+func archWasm() *Arch {
+       instructions := make(map[string]obj.As)
+       for i, s := range obj.Anames {
+               instructions[s] = obj.As(i)
+       }
+       for i, s := range wasm.Anames {
+               if obj.As(i) >= obj.A_ARCHSPECIFIC {
+                       instructions[s] = obj.As(i) + obj.ABaseWasm
+               }
+       }
+
+       return &Arch{
+               LinkArch:       &wasm.Linkwasm,
+               Instructions:   instructions,
+               Register:       wasm.Register,
+               RegisterPrefix: nil,
+               RegisterNumber: nilRegisterNumber,
+               IsJump:         jumpWasm,
+       }
+}
index 4c256f62b201c8df27ba063f5ad406283662fd27..17572e731df102fdebc5b06f96ecfd0597812f08 100644 (file)
@@ -343,6 +343,13 @@ func (p *Parser) asmJump(op obj.As, cond string, a []obj.Addr) {
                As:   op,
        }
        switch len(a) {
+       case 0:
+               if p.arch.Family == sys.Wasm {
+                       target = &obj.Addr{Type: obj.TYPE_NONE}
+                       break
+               }
+               p.errorf("wrong number of arguments to %s instruction", op)
+               return
        case 1:
                target = &a[0]
        case 2:
@@ -445,6 +452,8 @@ func (p *Parser) asmJump(op obj.As, cond string, a []obj.Addr) {
        case target.Type == obj.TYPE_CONST:
                // JMP $4
                prog.To = a[0]
+       case target.Type == obj.TYPE_NONE:
+               // JMP
        default:
                p.errorf("cannot assemble jump %+v", target)
                return
index 1cc542a28de470b05f58736843072889faa62bbb..a471a909d65b1830db415c94bd0d2dd21c9ee664 100644 (file)
@@ -311,4 +311,12 @@ var (
        // GO386=387
        ControlWord64trunc,
        ControlWord32 *obj.LSym
+
+       // Wasm
+       WasmMove,
+       WasmZero,
+       WasmDiv,
+       WasmTruncS,
+       WasmTruncU,
+       SigPanic *obj.LSym
 )
index 203903d10e94a4ad58b99fc778c12a25c20dc0e6..74590ccc392644934e52f70fdcef06c3e06b5866 100644 (file)
@@ -180,6 +180,7 @@ func Main(archInit func(*Arch)) {
        gopkg = types.NewPkg("go", "")
 
        Nacl = objabi.GOOS == "nacl"
+       Wasm := objabi.GOARCH == "wasm"
 
        flag.BoolVar(&compiling_runtime, "+", false, "compiling runtime")
        flag.BoolVar(&compiling_std, "std", false, "compiling standard library")
@@ -200,7 +201,7 @@ func Main(archInit func(*Arch)) {
        flag.IntVar(&nBackendWorkers, "c", 1, "concurrency during compilation, 1 means no concurrency")
        flag.BoolVar(&pure_go, "complete", false, "compiling complete package (no C or assembly)")
        flag.StringVar(&debugstr, "d", "", "print debug information about items in `list`; try -d help")
-       flag.BoolVar(&flagDWARF, "dwarf", true, "generate DWARF symbols")
+       flag.BoolVar(&flagDWARF, "dwarf", !Wasm, "generate DWARF symbols")
        flag.BoolVar(&Ctxt.Flag_locationlists, "dwarflocationlists", true, "add location lists to DWARF in optimized mode")
        flag.IntVar(&genDwarfInline, "gendwarfinl", 2, "generate DWARF inline info records")
        objabi.Flagcount("e", "no limit on number of errors reported", &Debug['e'])
@@ -265,6 +266,7 @@ func Main(archInit func(*Arch)) {
        } else {
                // turn off inline generation if no dwarf at all
                genDwarfInline = 0
+               Ctxt.Flag_locationlists = false
        }
 
        if flag.NArg() < 1 && debugstr != "help" && debugstr != "ssa/help" {
index 62dafcb4dcaf03929bb62772d344d916863fb951..aa324984dc62557c556c863ef8cbec79c075313b 100644 (file)
@@ -87,6 +87,14 @@ func initssaconfig() {
        // GO386=387 runtime functions
        ControlWord64trunc = sysfunc("controlWord64trunc")
        ControlWord32 = sysfunc("controlWord32")
+
+       // Wasm
+       WasmMove = sysfunc("wasmMove")
+       WasmZero = sysfunc("wasmZero")
+       WasmDiv = sysfunc("wasmDiv")
+       WasmTruncS = sysfunc("wasmTruncS")
+       WasmTruncU = sysfunc("wasmTruncU")
+       SigPanic = sysfunc("sigpanic")
 }
 
 // buildssa builds an SSA function for fn.
@@ -1794,7 +1802,7 @@ func (s *state) expr(n *Node) *ssa.Value {
                                        conv = conv1
                                }
                        }
-                       if thearch.LinkArch.Family == sys.ARM64 || s.softFloat {
+                       if thearch.LinkArch.Family == sys.ARM64 || thearch.LinkArch.Family == sys.Wasm || s.softFloat {
                                if conv1, ok1 := uint64fpConvOpToSSA[twoTypes{s.concreteEtype(ft), s.concreteEtype(tt)}]; ok1 {
                                        conv = conv1
                                }
@@ -5222,7 +5230,7 @@ func (s *SSAGenState) Call(v *ssa.Value) *obj.Prog {
        } else {
                // TODO(mdempsky): Can these differences be eliminated?
                switch thearch.LinkArch.Family {
-               case sys.AMD64, sys.I386, sys.PPC64, sys.S390X:
+               case sys.AMD64, sys.I386, sys.PPC64, sys.S390X, sys.Wasm:
                        p.To.Type = obj.TYPE_REG
                case sys.ARM, sys.ARM64, sys.MIPS, sys.MIPS64:
                        p.To.Type = obj.TYPE_MEM
index 6ae1c5a87076b1b7de7863239a65d8517a8cf0fa..3bf62294670b0f202a0c13bfd6c49ee4aa3e118f 100644 (file)
@@ -311,6 +311,20 @@ func NewConfig(arch string, types Types, ctxt *obj.Link, optimize bool) *Config
                c.LinkReg = linkRegMIPS
                c.hasGReg = true
                c.noDuffDevice = true
+       case "wasm":
+               c.PtrSize = 8
+               c.RegSize = 8
+               c.lowerBlock = rewriteBlockWasm
+               c.lowerValue = rewriteValueWasm
+               c.registers = registersWasm[:]
+               c.gpRegMask = gpRegMaskWasm
+               c.fpRegMask = fpRegMaskWasm
+               c.FPReg = framepointerRegWasm
+               c.LinkReg = linkRegWasm
+               c.hasGReg = true
+               c.noDuffDevice = true
+               c.useAvg = false
+               c.useHmul = false
        default:
                ctxt.Diag("arch %s not implemented", arch)
        }
diff --git a/src/cmd/compile/internal/ssa/gen/WASM.rules b/src/cmd/compile/internal/ssa/gen/WASM.rules
new file mode 100644 (file)
index 0000000..cede359
--- /dev/null
@@ -0,0 +1,391 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Lowering arithmetic
+(Add(64|32|16|8|Ptr) x y) -> (I64Add x y)
+(Add(64|32)F x y) -> (F64Add x y)
+
+(Sub(64|32|16|8|Ptr) x y) -> (I64Sub x y)
+(Sub(64|32)F x y) -> (F64Sub x y)
+
+(Mul(64|32|16|8) x y) -> (I64Mul x y)
+(Mul(64|32)F x y) -> (F64Mul x y)
+
+(Div64  x y) -> (I64DivS x y)
+(Div64u x y) -> (I64DivU x y)
+(Div32  x y) -> (I64DivS (SignExt32to64 x) (SignExt32to64 y))
+(Div32u x y) -> (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Div16  x y) -> (I64DivS (SignExt16to64 x) (SignExt16to64 y))
+(Div16u x y) -> (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Div8   x y) -> (I64DivS (SignExt8to64 x) (SignExt8to64 y))
+(Div8u  x y) -> (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
+(Div(64|32)F x y) -> (F64Div x y)
+
+(Mod64  x y) -> (I64RemS x y)
+(Mod64u x y) -> (I64RemU x y)
+(Mod32  x y) -> (I64RemS (SignExt32to64 x) (SignExt32to64 y))
+(Mod32u x y) -> (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Mod16  x y) -> (I64RemS (SignExt16to64 x) (SignExt16to64 y))
+(Mod16u x y) -> (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Mod8   x y) -> (I64RemS (SignExt8to64  x) (SignExt8to64  y))
+(Mod8u  x y) -> (I64RemU (ZeroExt8to64  x) (ZeroExt8to64  y))
+
+(And(64|32|16|8|B) x y) -> (I64And x y)
+
+(Or(64|32|16|8|B) x y) -> (I64Or x y)
+
+(Xor(64|32|16|8) x y) -> (I64Xor x y)
+
+(Neg(64|32|16|8) x) -> (I64Sub (I64Const [0]) x)
+(Neg32F x) -> (F64Neg x)
+(Neg64F x) -> (F64Neg x)
+
+(Com(64|32|16|8) x) -> (I64Xor x (I64Const [-1]))
+
+(Not x) -> (I64Eqz x)
+
+// Lowering pointer arithmetic
+(OffPtr [0] ptr) -> ptr
+(OffPtr [off] ptr) && off > 0 -> (I64AddConst [off] ptr)
+
+// Lowering extension
+(SignExt32to64        x) -> (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32]))
+(SignExt16to(64|32)   x) -> (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
+(SignExt8to(64|32|16) x) -> (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
+
+(ZeroExt32to64        x) -> (I64ShrU (I64Shl x (I64Const [32])) (I64Const [32]))
+(ZeroExt16to(64|32)   x) -> (I64ShrU (I64Shl x (I64Const [48])) (I64Const [48]))
+(ZeroExt8to(64|32|16) x) -> (I64ShrU (I64Shl x (I64Const [56])) (I64Const [56]))
+
+(Slicemask x) -> (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
+
+// Lowering truncation
+// Because we ignore the high parts, truncates are just copies.
+(Trunc64to(32|16|8) x) -> x
+(Trunc32to(16|8)    x) -> x
+(Trunc16to8         x) -> x
+
+// Lowering float <-> int
+(Cvt32to32F x) -> (LoweredRound32F (F64ConvertSI64 (SignExt32to64 x)))
+(Cvt32to64F x) -> (F64ConvertSI64 (SignExt32to64 x))
+(Cvt64to32F x) -> (LoweredRound32F (F64ConvertSI64 x))
+(Cvt64to64F x) -> (F64ConvertSI64 x)
+(Cvt32Uto32F x) -> (LoweredRound32F (F64ConvertUI64 (ZeroExt32to64 x)))
+(Cvt32Uto64F x) -> (F64ConvertUI64 (ZeroExt32to64 x))
+(Cvt64Uto32F x) -> (LoweredRound32F (F64ConvertUI64 x))
+(Cvt64Uto64F x) -> (F64ConvertUI64 x)
+
+(Cvt32Fto32 x) -> (I64TruncSF64 x)
+(Cvt32Fto64 x) -> (I64TruncSF64 x)
+(Cvt64Fto32 x) -> (I64TruncSF64 x)
+(Cvt64Fto64 x) -> (I64TruncSF64 x)
+(Cvt32Fto32U x) -> (I64TruncUF64 x)
+(Cvt32Fto64U x) -> (I64TruncUF64 x)
+(Cvt64Fto32U x) -> (I64TruncUF64 x)
+(Cvt64Fto64U x) -> (I64TruncUF64 x)
+
+(Cvt32Fto64F x) -> x
+(Cvt64Fto32F x) -> (LoweredRound32F x)
+
+(Round32F x) -> (LoweredRound32F x)
+(Round64F x) -> x
+
+// Lowering shifts
+// Unsigned shifts need to return 0 if shift amount is >= width of shifted value.
+
+(Lsh64x64 x y) -> (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
+(Lsh64x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
+(Lsh64x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
+(Lsh64x8  x y) -> (Lsh64x64 x (ZeroExt8to64  y))
+
+(Lsh32x64 x y) -> (Lsh64x64 x y)
+(Lsh32x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
+(Lsh32x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
+(Lsh32x8  x y) -> (Lsh64x64 x (ZeroExt8to64  y))
+
+(Lsh16x64 x y) -> (Lsh64x64 x y)
+(Lsh16x32 x y) -> (Lsh64x64 x (ZeroExt32to64 y))
+(Lsh16x16 x y) -> (Lsh64x64 x (ZeroExt16to64 y))
+(Lsh16x8  x y) -> (Lsh64x64 x (ZeroExt8to64  y))
+
+(Lsh8x64  x y) -> (Lsh64x64 x y)
+(Lsh8x32  x y) -> (Lsh64x64 x (ZeroExt32to64 y))
+(Lsh8x16  x y) -> (Lsh64x64 x (ZeroExt16to64 y))
+(Lsh8x8   x y) -> (Lsh64x64 x (ZeroExt8to64  y))
+
+(Rsh64Ux64 x y) -> (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
+(Rsh64Ux32 x y) -> (Rsh64Ux64 x (ZeroExt32to64 y))
+(Rsh64Ux16 x y) -> (Rsh64Ux64 x (ZeroExt16to64 y))
+(Rsh64Ux8  x y) -> (Rsh64Ux64 x (ZeroExt8to64  y))
+
+(Rsh32Ux64 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) y)
+(Rsh32Ux32 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Rsh32Ux16 x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt16to64 y))
+(Rsh32Ux8  x y) -> (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt8to64  y))
+
+(Rsh16Ux64 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) y)
+(Rsh16Ux32 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt32to64 y))
+(Rsh16Ux16 x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Rsh16Ux8  x y) -> (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt8to64  y))
+
+(Rsh8Ux64  x y) -> (Rsh64Ux64 (ZeroExt8to64 x) y)
+(Rsh8Ux32  x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt32to64 y))
+(Rsh8Ux16  x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt16to64 y))
+(Rsh8Ux8   x y) -> (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt8to64  y))
+
+// Signed right shift needs to return 0/-1 if shift amount is >= width of shifted value.
+// We implement this by setting the shift value to (width - 1) if the shift value is >= width.
+
+(Rsh64x64 x y) -> (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
+(Rsh64x32 x y) -> (Rsh64x64 x (ZeroExt32to64 y))
+(Rsh64x16 x y) -> (Rsh64x64 x (ZeroExt16to64 y))
+(Rsh64x8  x y) -> (Rsh64x64 x (ZeroExt8to64  y))
+
+(Rsh32x64 x y) -> (Rsh64x64 (SignExt32to64 x) y)
+(Rsh32x32 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt32to64 y))
+(Rsh32x16 x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt16to64 y))
+(Rsh32x8  x y) -> (Rsh64x64 (SignExt32to64 x) (ZeroExt8to64  y))
+
+(Rsh16x64 x y) -> (Rsh64x64 (SignExt16to64 x) y)
+(Rsh16x32 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt32to64 y))
+(Rsh16x16 x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt16to64 y))
+(Rsh16x8  x y) -> (Rsh64x64 (SignExt16to64 x) (ZeroExt8to64  y))
+
+(Rsh8x64 x y)  -> (Rsh64x64 (SignExt8to64 x) y)
+(Rsh8x32 x y)  -> (Rsh64x64 (SignExt8to64 x) (ZeroExt32to64 y))
+(Rsh8x16 x y)  -> (Rsh64x64 (SignExt8to64 x) (ZeroExt16to64 y))
+(Rsh8x8  x y)  -> (Rsh64x64 (SignExt8to64 x) (ZeroExt8to64  y))
+
+// Lowering comparisons
+(Less64  x y) -> (I64LtS x y)
+(Less32  x y) -> (I64LtS (SignExt32to64 x) (SignExt32to64 y))
+(Less16  x y) -> (I64LtS (SignExt16to64 x) (SignExt16to64 y))
+(Less8   x y) -> (I64LtS (SignExt8to64  x) (SignExt8to64  y))
+(Less64U x y) -> (I64LtU x y)
+(Less32U x y) -> (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Less16U x y) -> (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Less8U  x y) -> (I64LtU (ZeroExt8to64  x) (ZeroExt8to64  y))
+(Less64F x y) -> (F64Lt x y)
+(Less32F x y) -> (F64Lt (LoweredRound32F x) (LoweredRound32F y))
+
+(Leq64  x y) -> (I64LeS x y)
+(Leq32  x y) -> (I64LeS (SignExt32to64 x) (SignExt32to64 y))
+(Leq16  x y) -> (I64LeS (SignExt16to64 x) (SignExt16to64 y))
+(Leq8   x y) -> (I64LeS (SignExt8to64  x) (SignExt8to64  y))
+(Leq64U x y) -> (I64LeU x y)
+(Leq32U x y) -> (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Leq16U x y) -> (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Leq8U  x y) -> (I64LeU (ZeroExt8to64  x) (ZeroExt8to64  y))
+(Leq64F x y) -> (F64Le x y)
+(Leq32F x y) -> (F64Le (LoweredRound32F x) (LoweredRound32F y))
+
+(Greater64  x y) -> (I64GtS x y)
+(Greater32  x y) -> (I64GtS (SignExt32to64 x) (SignExt32to64 y))
+(Greater16  x y) -> (I64GtS (SignExt16to64 x) (SignExt16to64 y))
+(Greater8   x y) -> (I64GtS (SignExt8to64  x) (SignExt8to64  y))
+(Greater64U x y) -> (I64GtU x y)
+(Greater32U x y) -> (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Greater16U x y) -> (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Greater8U  x y) -> (I64GtU (ZeroExt8to64  x) (ZeroExt8to64  y))
+(Greater64F x y) -> (F64Gt x y)
+(Greater32F x y) -> (F64Gt (LoweredRound32F x) (LoweredRound32F y))
+
+(Geq64  x y) -> (I64GeS x y)
+(Geq32  x y) -> (I64GeS (SignExt32to64 x) (SignExt32to64 y))
+(Geq16  x y) -> (I64GeS (SignExt16to64 x) (SignExt16to64 y))
+(Geq8   x y) -> (I64GeS (SignExt8to64  x) (SignExt8to64  y))
+(Geq64U x y) -> (I64GeU x y)
+(Geq32U x y) -> (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Geq16U x y) -> (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Geq8U  x y) -> (I64GeU (ZeroExt8to64  x) (ZeroExt8to64  y))
+(Geq64F x y) -> (F64Ge x y)
+(Geq32F x y) -> (F64Ge (LoweredRound32F x) (LoweredRound32F y))
+
+(Eq64  x y) -> (I64Eq x y)
+(Eq32  x y) -> (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Eq16  x y) -> (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Eq8   x y) -> (I64Eq (ZeroExt8to64  x) (ZeroExt8to64  y))
+(EqB   x y) -> (I64Eq x y)
+(EqPtr x y) -> (I64Eq x y)
+(Eq64F x y) -> (F64Eq x y)
+(Eq32F x y) -> (F64Eq (LoweredRound32F x) (LoweredRound32F y))
+
+(Neq64  x y) -> (I64Ne x y)
+(Neq32  x y) -> (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
+(Neq16  x y) -> (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
+(Neq8   x y) -> (I64Ne (ZeroExt8to64  x) (ZeroExt8to64  y))
+(NeqB   x y) -> (I64Ne x y)
+(NeqPtr x y) -> (I64Ne x y)
+(Neq64F x y) -> (F64Ne x y)
+(Neq32F x y) -> (F64Ne (LoweredRound32F x) (LoweredRound32F y))
+
+// Lowering loads
+(Load <t> ptr mem) && is32BitFloat(t) -> (F32Load ptr mem)
+(Load <t> ptr mem) && is64BitFloat(t) -> (F64Load ptr mem)
+(Load <t> ptr mem) && t.Size() == 8 -> (I64Load ptr mem)
+(Load <t> ptr mem) && t.Size() == 4 && !t.IsSigned() -> (I64Load32U ptr mem)
+(Load <t> ptr mem) && t.Size() == 4 &&  t.IsSigned() -> (I64Load32S ptr mem)
+(Load <t> ptr mem) && t.Size() == 2 && !t.IsSigned() -> (I64Load16U ptr mem)
+(Load <t> ptr mem) && t.Size() == 2 &&  t.IsSigned() -> (I64Load16S ptr mem)
+(Load <t> ptr mem) && t.Size() == 1 && !t.IsSigned() -> (I64Load8U ptr mem)
+(Load <t> ptr mem) && t.Size() == 1 &&  t.IsSigned() -> (I64Load8S ptr mem)
+
+// Lowering stores
+(Store {t} ptr val mem) && is64BitFloat(t.(*types.Type)) -> (F64Store ptr val mem)
+(Store {t} ptr val mem) && is32BitFloat(t.(*types.Type)) -> (F32Store ptr val mem)
+(Store {t} ptr val mem) && t.(*types.Type).Size() == 8 -> (I64Store ptr val mem)
+(Store {t} ptr val mem) && t.(*types.Type).Size() == 4 -> (I64Store32 ptr val mem)
+(Store {t} ptr val mem) && t.(*types.Type).Size() == 2 -> (I64Store16 ptr val mem)
+(Store {t} ptr val mem) && t.(*types.Type).Size() == 1 -> (I64Store8 ptr val mem)
+
+// Lowering moves
+(Move [0] _ _ mem) -> mem
+(Move [1] dst src mem) -> (I64Store8 dst (I64Load8U src mem) mem)
+(Move [2] dst src mem) -> (I64Store16 dst (I64Load16U src mem) mem)
+(Move [4] dst src mem) -> (I64Store32 dst (I64Load32U src mem) mem)
+(Move [8] dst src mem) -> (I64Store dst (I64Load src mem) mem)
+(Move [16] dst src mem) ->
+       (I64Store [8] dst (I64Load [8] src mem)
+               (I64Store dst (I64Load src mem) mem))
+(Move [3] dst src mem) ->
+       (I64Store8 [2] dst (I64Load8U [2] src mem)
+               (I64Store16 dst (I64Load16U src mem) mem))
+(Move [5] dst src mem) ->
+       (I64Store8 [4] dst (I64Load8U [4] src mem)
+               (I64Store32 dst (I64Load32U src mem) mem))
+(Move [6] dst src mem) ->
+       (I64Store16 [4] dst (I64Load16U [4] src mem)
+               (I64Store32 dst (I64Load32U src mem) mem))
+(Move [7] dst src mem) ->
+       (I64Store32 [3] dst (I64Load32U [3] src mem)
+               (I64Store32 dst (I64Load32U src mem) mem))
+(Move [s] dst src mem) && s > 8 && s < 16 ->
+       (I64Store [s-8] dst (I64Load [s-8] src mem)
+               (I64Store dst (I64Load src mem) mem))
+
+// Adjust moves to be a multiple of 16 bytes.
+(Move [s] dst src mem)
+       && s > 16 && s%16 != 0 && s%16 <= 8 ->
+       (Move [s-s%16]
+               (OffPtr <dst.Type> dst [s%16])
+               (OffPtr <src.Type> src [s%16])
+               (I64Store dst (I64Load src mem) mem))
+(Move [s] dst src mem)
+       && s > 16 && s%16 != 0 && s%16 > 8 ->
+       (Move [s-s%16]
+               (OffPtr <dst.Type> dst [s%16])
+               (OffPtr <src.Type> src [s%16])
+               (I64Store [8] dst (I64Load [8] src mem)
+                       (I64Store dst (I64Load src mem) mem)))
+
+// Large copying uses helper.
+(Move [s] dst src mem) && s%8 == 0 ->
+       (LoweredMove [s/8] dst src mem)
+
+// Lowering Zero instructions
+(Zero [0] _ mem) -> mem
+(Zero [1] destptr mem) -> (I64Store8 destptr (I64Const [0]) mem)
+(Zero [2] destptr mem) -> (I64Store16 destptr (I64Const [0]) mem)
+(Zero [4] destptr mem) -> (I64Store32 destptr (I64Const [0]) mem)
+(Zero [8] destptr mem) -> (I64Store destptr (I64Const [0]) mem)
+
+(Zero [3] destptr mem) ->
+       (I64Store8 [2] destptr (I64Const [0])
+               (I64Store16 destptr (I64Const [0]) mem))
+(Zero [5] destptr mem) ->
+       (I64Store8 [4] destptr (I64Const [0])
+               (I64Store32 destptr (I64Const [0]) mem))
+(Zero [6] destptr mem) ->
+       (I64Store16 [4] destptr (I64Const [0])
+               (I64Store32 destptr (I64Const [0]) mem))
+(Zero [7] destptr mem) ->
+       (I64Store32 [3] destptr (I64Const [0])
+               (I64Store32 destptr (I64Const [0]) mem))
+
+// Strip off any fractional word zeroing.
+(Zero [s] destptr mem) && s%8 != 0 && s > 8 ->
+       (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8])
+               (I64Store destptr (I64Const [0]) mem))
+
+// Zero small numbers of words directly.
+(Zero [16] destptr mem) ->
+       (I64Store [8] destptr (I64Const [0])
+               (I64Store destptr (I64Const [0]) mem))
+(Zero [24] destptr mem) ->
+       (I64Store [16] destptr (I64Const [0])
+               (I64Store [8] destptr (I64Const [0])
+                       (I64Store destptr (I64Const [0]) mem)))
+(Zero [32] destptr mem) ->
+       (I64Store [24] destptr (I64Const [0])
+               (I64Store [16] destptr (I64Const [0])
+                       (I64Store [8] destptr (I64Const [0])
+                               (I64Store destptr (I64Const [0]) mem))))
+
+// Large zeroing uses helper.
+(Zero [s] destptr mem) && s%8 == 0 && s > 32 ->
+       (LoweredZero [s/8] destptr mem)
+
+// Lowering constants
+(Const(64|32|16|8) [val]) -> (I64Const [val])
+(Const(64|32)F [val]) -> (F64Const [val])
+(ConstNil) -> (I64Const [0])
+(ConstBool [b]) -> (I64Const [b])
+
+// Lowering calls
+(StaticCall [argwid] {target} mem) -> (LoweredStaticCall [argwid] {target} mem)
+(ClosureCall [argwid] entry closure mem) -> (LoweredClosureCall [argwid] entry closure mem)
+(InterCall [argwid] entry mem) -> (LoweredInterCall [argwid] entry mem)
+
+// Miscellaneous
+(Convert <t> x mem) -> (LoweredConvert <t> x mem)
+(IsNonNil p) -> (I64Eqz (I64Eqz p))
+(IsInBounds idx len) -> (I64LtU idx len)
+(IsSliceInBounds idx len) -> (I64LeU idx len)
+(NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
+(GetClosurePtr) -> (LoweredGetClosurePtr)
+(GetCallerPC) -> (LoweredGetCallerPC)
+(GetCallerSP) -> (LoweredGetCallerSP)
+(Addr {sym} base) -> (LoweredAddr {sym} base)
+
+// Write barrier.
+(WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem)
+
+// --- Optimizations ---
+(I64Add (I64Const [x]) (I64Const [y])) -> (I64Const [x + y])
+(I64Mul (I64Const [x]) (I64Const [y])) -> (I64Const [x * y])
+(I64And (I64Const [x]) (I64Const [y])) -> (I64Const [x & y])
+(I64Or  (I64Const [x]) (I64Const [y])) -> (I64Const [x | y])
+(I64Xor (I64Const [x]) (I64Const [y])) -> (I64Const [x ^ y])
+(F64Add (F64Const [x]) (F64Const [y])) -> (F64Const [f2i(i2f(x) + i2f(y))])
+(F64Mul (F64Const [x]) (F64Const [y])) -> (F64Const [f2i(i2f(x) * i2f(y))])
+(I64Eq  (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [1])
+(I64Eq  (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [0])
+(I64Ne  (I64Const [x]) (I64Const [y])) && x == y -> (I64Const [0])
+(I64Ne  (I64Const [x]) (I64Const [y])) && x != y -> (I64Const [1])
+
+(I64Add (I64Const [x]) y) -> (I64Add y (I64Const [x]))
+(I64Mul (I64Const [x]) y) -> (I64Mul y (I64Const [x]))
+(I64And (I64Const [x]) y) -> (I64And y (I64Const [x]))
+(I64Or  (I64Const [x]) y) -> (I64Or  y (I64Const [x]))
+(I64Xor (I64Const [x]) y) -> (I64Xor y (I64Const [x]))
+(F64Add (F64Const [x]) y) -> (F64Add y (F64Const [x]))
+(F64Mul (F64Const [x]) y) -> (F64Mul y (F64Const [x]))
+(I64Eq  (I64Const [x]) y) -> (I64Eq y  (I64Const [x]))
+(I64Ne  (I64Const [x]) y) -> (I64Ne y  (I64Const [x]))
+
+(I64Add x (I64Const [y])) -> (I64AddConst [y] x)
+(I64Eqz (I64Eqz (I64Eqz x))) -> (I64Eqz x)
+
+(I64Store8 [off] (I64AddConst [off2] ptr) val mem) && off+off2 >= 0 -> (I64Store8 [off+off2] ptr val mem)
+(I64Store16 [off] (I64AddConst [off2] ptr) val mem) && off+off2 >= 0 -> (I64Store16 [off+off2] ptr val mem)
+(I64Store32 [off] (I64AddConst [off2] ptr) val mem) && off+off2 >= 0 -> (I64Store32 [off+off2] ptr val mem)
+(I64Store [off] (I64AddConst [off2] ptr) val mem) && off+off2 >= 0 -> (I64Store [off+off2] ptr val mem)
+
+(I64Load8U [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load8U [off+off2] ptr mem)
+(I64Load8S [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load8S [off+off2] ptr mem)
+(I64Load16U [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load16U [off+off2] ptr mem)
+(I64Load16S [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load16S [off+off2] ptr mem)
+(I64Load32U [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load32U [off+off2] ptr mem)
+(I64Load32S [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load32S [off+off2] ptr mem)
+(I64Load [off] (I64AddConst [off2] ptr) mem) && off+off2 >= 0 -> (I64Load [off+off2] ptr mem)
diff --git a/src/cmd/compile/internal/ssa/gen/WASMOps.go b/src/cmd/compile/internal/ssa/gen/WASMOps.go
new file mode 100644 (file)
index 0000000..c8d7677
--- /dev/null
@@ -0,0 +1,208 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+package main
+
+import "strings"
+
+var regNamesWasm = []string{
+       "R0",
+       "R1",
+       "R2",
+       "R3",
+       "R4",
+       "R5",
+       "R6",
+       "R7",
+       "R8",
+       "R9",
+       "R10",
+       "R11",
+       "R12",
+       "R13",
+       "R14",
+       "R15",
+
+       "F0",
+       "F1",
+       "F2",
+       "F3",
+       "F4",
+       "F5",
+       "F6",
+       "F7",
+       "F8",
+       "F9",
+       "F10",
+       "F11",
+       "F12",
+       "F13",
+       "F14",
+       "F15",
+
+       "SP",
+       "g",
+
+       // pseudo-registers
+       "SB",
+}
+
+func init() {
+       // Make map from reg names to reg integers.
+       if len(regNamesWasm) > 64 {
+               panic("too many registers")
+       }
+       num := map[string]int{}
+       for i, name := range regNamesWasm {
+               num[name] = i
+       }
+       buildReg := func(s string) regMask {
+               m := regMask(0)
+               for _, r := range strings.Split(s, " ") {
+                       if n, ok := num[r]; ok {
+                               m |= regMask(1) << uint(n)
+                               continue
+                       }
+                       panic("register " + r + " not found")
+               }
+               return m
+       }
+
+       var (
+               gp     = buildReg("R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15")
+               fp     = buildReg("F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15")
+               gpsp   = gp | buildReg("SP")
+               gpspsb = gpsp | buildReg("SB")
+               // The "registers", which are actually local variables, can get clobbered
+               // if we're switching goroutines, because it unwinds the WebAssembly stack.
+               callerSave = gp | fp | buildReg("g")
+       )
+
+       // Common regInfo
+       var (
+               gp01    = regInfo{inputs: nil, outputs: []regMask{gp}}
+               gp11    = regInfo{inputs: []regMask{gpsp}, outputs: []regMask{gp}}
+               gp21    = regInfo{inputs: []regMask{gpsp, gpsp}, outputs: []regMask{gp}}
+               gp31    = regInfo{inputs: []regMask{gpsp, gpsp, gpsp}, outputs: []regMask{gp}}
+               fp01    = regInfo{inputs: nil, outputs: []regMask{fp}}
+               fp11    = regInfo{inputs: []regMask{fp}, outputs: []regMask{fp}}
+               fp21    = regInfo{inputs: []regMask{fp, fp}, outputs: []regMask{fp}}
+               fp21gp  = regInfo{inputs: []regMask{fp, fp}, outputs: []regMask{gp}}
+               gpload  = regInfo{inputs: []regMask{gpspsb, 0}, outputs: []regMask{gp}}
+               gpstore = regInfo{inputs: []regMask{gpspsb, gpsp, 0}}
+               fpload  = regInfo{inputs: []regMask{gpspsb, 0}, outputs: []regMask{fp}}
+               fpstore = regInfo{inputs: []regMask{gpspsb, fp, 0}}
+               // fpstoreconst = regInfo{inputs: []regMask{fp, 0}}
+       )
+
+       var WasmOps = []opData{
+               {name: "LoweredStaticCall", argLength: 1, reg: regInfo{clobbers: callerSave}, aux: "SymOff", call: true, symEffect: "None"},            // call static function aux.(*obj.LSym). arg0=mem, auxint=argsize, returns mem
+               {name: "LoweredClosureCall", argLength: 3, reg: regInfo{inputs: []regMask{gp, gp, 0}, clobbers: callerSave}, aux: "Int64", call: true}, // call function via closure. arg0=codeptr, arg1=closure, arg2=mem, auxint=argsize, returns mem
+               {name: "LoweredInterCall", argLength: 2, reg: regInfo{inputs: []regMask{gp}, clobbers: callerSave}, aux: "Int64", call: true},          // call fn by pointer. arg0=codeptr, arg1=mem, auxint=argsize, returns mem
+
+               {name: "LoweredAddr", argLength: 1, reg: gp11, aux: "SymOff", rematerializeable: true, symEffect: "Addr"}, // returns base+aux, arg0=base
+               {name: "LoweredMove", argLength: 3, reg: regInfo{inputs: []regMask{gp, gp}}, aux: "Int64"},                // large move. arg0=dst, arg1=src, arg2=mem, auxint=len/8, returns mem
+               {name: "LoweredZero", argLength: 2, reg: regInfo{inputs: []regMask{gp}}, aux: "Int64"},                    // large zeroing. arg0=start, arg1=mem, auxint=len/8, returns mem
+
+               {name: "LoweredGetClosurePtr", reg: gp01},                                                                          // returns wasm.REG_CTXT, the closure pointer
+               {name: "LoweredGetCallerPC", reg: gp01, rematerializeable: true},                                                   // returns the PC of the caller of the current function
+               {name: "LoweredGetCallerSP", reg: gp01, rematerializeable: true},                                                   // returns the SP of the caller of the current function
+               {name: "LoweredNilCheck", argLength: 2, reg: regInfo{inputs: []regMask{gp}}, nilCheck: true, faultOnNilArg0: true}, // panic if arg0 is nil. arg1=mem
+               {name: "LoweredWB", argLength: 3, reg: regInfo{inputs: []regMask{gp, gp}}, aux: "Sym", symEffect: "None"},          // invokes runtime.gcWriteBarrier. arg0=destptr, arg1=srcptr, arg2=mem, aux=runtime.gcWriteBarrier
+               {name: "LoweredRound32F", argLength: 1, reg: fp11, typ: "Float32"},                                                 // rounds arg0 to 32-bit float precision. arg0=value
+
+               // LoweredConvert converts between pointers and integers.
+               // We have a special op for this so as to not confuse GC
+               // (particularly stack maps). It takes a memory arg so it
+               // gets correctly ordered with respect to GC safepoints.
+               // arg0=ptr/int arg1=mem, output=int/ptr
+               //
+               // TODO(neelance): LoweredConvert should not be necessary any more, since OpConvert does not need to be lowered any more (CL 108496).
+               {name: "LoweredConvert", argLength: 2, reg: regInfo{inputs: []regMask{gp}, outputs: []regMask{gp}}},
+
+               // The following are native WebAssembly instructions, see https://webassembly.github.io/spec/core/syntax/instructions.html
+
+               {name: "Select", asm: "Select", argLength: 3, reg: gp31}, // returns arg0 if arg2 != 0, otherwise returns arg1
+
+               {name: "I64Load8U", asm: "I64Load8U", argLength: 2, reg: gpload, aux: "Int64", typ: "UInt8"},    // read unsigned 8-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load8S", asm: "I64Load8S", argLength: 2, reg: gpload, aux: "Int64", typ: "Int8"},     // read signed 8-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load16U", asm: "I64Load16U", argLength: 2, reg: gpload, aux: "Int64", typ: "UInt16"}, // read unsigned 16-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load16S", asm: "I64Load16S", argLength: 2, reg: gpload, aux: "Int64", typ: "Int16"},  // read signed 16-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load32U", asm: "I64Load32U", argLength: 2, reg: gpload, aux: "Int64", typ: "UInt32"}, // read unsigned 32-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load32S", asm: "I64Load32S", argLength: 2, reg: gpload, aux: "Int64", typ: "Int32"},  // read signed 32-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Load", asm: "I64Load", argLength: 2, reg: gpload, aux: "Int64", typ: "UInt64"},       // read 64-bit integer from address arg0+aux, arg1=mem
+               {name: "I64Store8", asm: "I64Store8", argLength: 3, reg: gpstore, aux: "Int64", typ: "Mem"},     // store 8-bit integer arg1 at address arg0+aux, arg2=mem, returns mem
+               {name: "I64Store16", asm: "I64Store16", argLength: 3, reg: gpstore, aux: "Int64", typ: "Mem"},   // store 16-bit integer arg1 at address arg0+aux, arg2=mem, returns mem
+               {name: "I64Store32", asm: "I64Store32", argLength: 3, reg: gpstore, aux: "Int64", typ: "Mem"},   // store 32-bit integer arg1 at address arg0+aux, arg2=mem, returns mem
+               {name: "I64Store", asm: "I64Store", argLength: 3, reg: gpstore, aux: "Int64", typ: "Mem"},       // store 64-bit integer arg1 at address arg0+aux, arg2=mem, returns mem
+
+               {name: "F32Load", asm: "F32Load", argLength: 2, reg: fpload, aux: "Int64", typ: "Float64"}, // read 32-bit float from address arg0+aux, arg1=mem
+               {name: "F64Load", asm: "F64Load", argLength: 2, reg: fpload, aux: "Int64", typ: "Float64"}, // read 64-bit float from address arg0+aux, arg1=mem
+               {name: "F32Store", asm: "F32Store", argLength: 3, reg: fpstore, aux: "Int64", typ: "Mem"},  // store 32-bit float arg1 at address arg0+aux, arg2=mem, returns mem
+               {name: "F64Store", asm: "F64Store", argLength: 3, reg: fpstore, aux: "Int64", typ: "Mem"},  // store 64-bit float arg1 at address arg0+aux, arg2=mem, returns mem
+
+               {name: "I64Const", reg: gp01, aux: "Int64", rematerializeable: true, typ: "Int64"},     // returns the constant integer aux
+               {name: "F64Const", reg: fp01, aux: "Float64", rematerializeable: true, typ: "Float64"}, // returns the constant float aux
+
+               {name: "I64Eqz", asm: "I64Eqz", argLength: 1, reg: gp11, typ: "Bool"}, // arg0 == 0
+               {name: "I64Eq", asm: "I64Eq", argLength: 2, reg: gp21, typ: "Bool"},   // arg0 == arg1
+               {name: "I64Ne", asm: "I64Ne", argLength: 2, reg: gp21, typ: "Bool"},   // arg0 != arg1
+               {name: "I64LtS", asm: "I64LtS", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 < arg1 (signed)
+               {name: "I64LtU", asm: "I64LtU", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 < arg1 (unsigned)
+               {name: "I64GtS", asm: "I64GtS", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 > arg1 (signed)
+               {name: "I64GtU", asm: "I64GtU", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 > arg1 (unsigned)
+               {name: "I64LeS", asm: "I64LeS", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 <= arg1 (signed)
+               {name: "I64LeU", asm: "I64LeU", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 <= arg1 (unsigned)
+               {name: "I64GeS", asm: "I64GeS", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 >= arg1 (signed)
+               {name: "I64GeU", asm: "I64GeU", argLength: 2, reg: gp21, typ: "Bool"}, // arg0 >= arg1 (unsigned)
+
+               {name: "F64Eq", asm: "F64Eq", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 == arg1
+               {name: "F64Ne", asm: "F64Ne", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 != arg1
+               {name: "F64Lt", asm: "F64Lt", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 < arg1
+               {name: "F64Gt", asm: "F64Gt", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 > arg1
+               {name: "F64Le", asm: "F64Le", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 <= arg1
+               {name: "F64Ge", asm: "F64Ge", argLength: 2, reg: fp21gp, typ: "Bool"}, // arg0 >= arg1
+
+               {name: "I64Add", asm: "I64Add", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 + arg1
+               {name: "I64AddConst", asm: "I64Add", argLength: 1, reg: gp11, aux: "Int64", typ: "Int64"}, // arg0 + aux
+               {name: "I64Sub", asm: "I64Sub", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 - arg1
+               {name: "I64Mul", asm: "I64Mul", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 * arg1
+               {name: "I64DivS", asm: "I64DivS", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 / arg1 (signed)
+               {name: "I64DivU", asm: "I64DivU", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 / arg1 (unsigned)
+               {name: "I64RemS", asm: "I64RemS", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 % arg1 (signed)
+               {name: "I64RemU", asm: "I64RemU", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 % arg1 (unsigned)
+               {name: "I64And", asm: "I64And", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 & arg1
+               {name: "I64Or", asm: "I64Or", argLength: 2, reg: gp21, typ: "Int64"},                      // arg0 | arg1
+               {name: "I64Xor", asm: "I64Xor", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 ^ arg1
+               {name: "I64Shl", asm: "I64Shl", argLength: 2, reg: gp21, typ: "Int64"},                    // arg0 << (arg1 % 64)
+               {name: "I64ShrS", asm: "I64ShrS", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 >> (arg1 % 64) (signed)
+               {name: "I64ShrU", asm: "I64ShrU", argLength: 2, reg: gp21, typ: "Int64"},                  // arg0 >> (arg1 % 64) (unsigned)
+
+               {name: "F64Neg", asm: "F64Neg", argLength: 1, reg: fp11, typ: "Float64"}, // -arg0
+               {name: "F64Add", asm: "F64Add", argLength: 2, reg: fp21, typ: "Float64"}, // arg0 + arg1
+               {name: "F64Sub", asm: "F64Sub", argLength: 2, reg: fp21, typ: "Float64"}, // arg0 - arg1
+               {name: "F64Mul", asm: "F64Mul", argLength: 2, reg: fp21, typ: "Float64"}, // arg0 * arg1
+               {name: "F64Div", asm: "F64Div", argLength: 2, reg: fp21, typ: "Float64"}, // arg0 / arg1
+
+               {name: "I64TruncSF64", asm: "I64TruncSF64", argLength: 1, reg: regInfo{inputs: []regMask{fp}, outputs: []regMask{gp}}, typ: "Int64"},       // truncates the float arg0 to a signed integer
+               {name: "I64TruncUF64", asm: "I64TruncUF64", argLength: 1, reg: regInfo{inputs: []regMask{fp}, outputs: []regMask{gp}}, typ: "Int64"},       // truncates the float arg0 to an unsigned integer
+               {name: "F64ConvertSI64", asm: "F64ConvertSI64", argLength: 1, reg: regInfo{inputs: []regMask{gp}, outputs: []regMask{fp}}, typ: "Float64"}, // converts the signed integer arg0 to a float
+               {name: "F64ConvertUI64", asm: "F64ConvertUI64", argLength: 1, reg: regInfo{inputs: []regMask{gp}, outputs: []regMask{fp}}, typ: "Float64"}, // converts the unsigned integer arg0 to a float
+       }
+
+       archs = append(archs, arch{
+               name:            "Wasm",
+               pkg:             "cmd/internal/obj/wasm",
+               genfile:         "",
+               ops:             WasmOps,
+               blocks:          nil,
+               regnames:        regNamesWasm,
+               gpregmask:       gp,
+               fpregmask:       fp,
+               framepointerreg: -1, // not used
+               linkreg:         -1, // not used
+       })
+}
index d058a08089178d146127f512a6f502c2a562e960..b0d893a2a8a5155a9ea72981d7ee4e2f0d02e2ca 100644 (file)
@@ -9,6 +9,7 @@ import (
        "cmd/internal/obj/mips"
        "cmd/internal/obj/ppc64"
        "cmd/internal/obj/s390x"
+       "cmd/internal/obj/wasm"
        "cmd/internal/obj/x86"
 )
 
@@ -1860,6 +1861,78 @@ const (
        OpS390XLoweredMove
        OpS390XLoweredZero
 
+       OpWasmLoweredStaticCall
+       OpWasmLoweredClosureCall
+       OpWasmLoweredInterCall
+       OpWasmLoweredAddr
+       OpWasmLoweredMove
+       OpWasmLoweredZero
+       OpWasmLoweredGetClosurePtr
+       OpWasmLoweredGetCallerPC
+       OpWasmLoweredGetCallerSP
+       OpWasmLoweredNilCheck
+       OpWasmLoweredWB
+       OpWasmLoweredRound32F
+       OpWasmLoweredConvert
+       OpWasmSelect
+       OpWasmI64Load8U
+       OpWasmI64Load8S
+       OpWasmI64Load16U
+       OpWasmI64Load16S
+       OpWasmI64Load32U
+       OpWasmI64Load32S
+       OpWasmI64Load
+       OpWasmI64Store8
+       OpWasmI64Store16
+       OpWasmI64Store32
+       OpWasmI64Store
+       OpWasmF32Load
+       OpWasmF64Load
+       OpWasmF32Store
+       OpWasmF64Store
+       OpWasmI64Const
+       OpWasmF64Const
+       OpWasmI64Eqz
+       OpWasmI64Eq
+       OpWasmI64Ne
+       OpWasmI64LtS
+       OpWasmI64LtU
+       OpWasmI64GtS
+       OpWasmI64GtU
+       OpWasmI64LeS
+       OpWasmI64LeU
+       OpWasmI64GeS
+       OpWasmI64GeU
+       OpWasmF64Eq
+       OpWasmF64Ne
+       OpWasmF64Lt
+       OpWasmF64Gt
+       OpWasmF64Le
+       OpWasmF64Ge
+       OpWasmI64Add
+       OpWasmI64AddConst
+       OpWasmI64Sub
+       OpWasmI64Mul
+       OpWasmI64DivS
+       OpWasmI64DivU
+       OpWasmI64RemS
+       OpWasmI64RemU
+       OpWasmI64And
+       OpWasmI64Or
+       OpWasmI64Xor
+       OpWasmI64Shl
+       OpWasmI64ShrS
+       OpWasmI64ShrU
+       OpWasmF64Neg
+       OpWasmF64Add
+       OpWasmF64Sub
+       OpWasmF64Mul
+       OpWasmF64Div
+       OpWasmI64TruncSF64
+       OpWasmI64TruncUF64
+       OpWasmF64ConvertSI64
+       OpWasmF64ConvertUI64
+
        OpAdd8
        OpAdd16
        OpAdd32
@@ -24772,148 +24845,1085 @@ var opcodeTable = [...]opInfo{
        },
 
        {
-               name:        "Add8",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
-       },
-       {
-               name:        "Add16",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
-       },
-       {
-               name:        "Add32",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:      "LoweredStaticCall",
+               auxType:   auxSymOff,
+               argLen:    1,
+               call:      true,
+               symEffect: SymNone,
+               reg: regInfo{
+                       clobbers: 12884901887, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 g
+               },
        },
        {
-               name:        "Add64",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "LoweredClosureCall",
+               auxType: auxInt64,
+               argLen:  3,
+               call:    true,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                               {1, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+                       clobbers: 12884901887, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 g
+               },
        },
        {
-               name:    "AddPtr",
+               name:    "LoweredInterCall",
+               auxType: auxInt64,
                argLen:  2,
-               generic: true,
+               call:    true,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+                       clobbers: 12884901887, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15 g
+               },
        },
        {
-               name:        "Add32F",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:              "LoweredAddr",
+               auxType:           auxSymOff,
+               argLen:            1,
+               rematerializeable: true,
+               symEffect:         SymAddr,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Add64F",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "LoweredMove",
+               auxType: auxInt64,
+               argLen:  3,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                               {1, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub8",
+               name:    "LoweredZero",
+               auxType: auxInt64,
                argLen:  2,
-               generic: true,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub16",
-               argLen:  2,
-               generic: true,
+               name:   "LoweredGetClosurePtr",
+               argLen: 0,
+               reg: regInfo{
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub32",
-               argLen:  2,
-               generic: true,
+               name:              "LoweredGetCallerPC",
+               argLen:            0,
+               rematerializeable: true,
+               reg: regInfo{
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub64",
-               argLen:  2,
-               generic: true,
+               name:              "LoweredGetCallerSP",
+               argLen:            0,
+               rematerializeable: true,
+               reg: regInfo{
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "SubPtr",
-               argLen:  2,
-               generic: true,
+               name:           "LoweredNilCheck",
+               argLen:         2,
+               nilCheck:       true,
+               faultOnNilArg0: true,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub32F",
-               argLen:  2,
-               generic: true,
+               name:      "LoweredWB",
+               auxType:   auxSym,
+               argLen:    3,
+               symEffect: SymNone,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                               {1, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Sub64F",
-               argLen:  2,
-               generic: true,
+               name:   "LoweredRound32F",
+               argLen: 1,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
        },
        {
-               name:        "Mul8",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:   "LoweredConvert",
+               argLen: 2,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Mul16",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:   "Select",
+               argLen: 3,
+               asm:    wasm.ASelect,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {2, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Mul32",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Load8U",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AI64Load8U,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Mul64",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Load8S",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AI64Load8S,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Mul32F",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Load16U",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AI64Load16U,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Mul64F",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Load16S",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AI64Load16S,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Div32F",
+               name:    "I64Load32U",
+               auxType: auxInt64,
                argLen:  2,
-               generic: true,
+               asm:     wasm.AI64Load32U,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:    "Div64F",
+               name:    "I64Load32S",
+               auxType: auxInt64,
                argLen:  2,
-               generic: true,
+               asm:     wasm.AI64Load32S,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Hmul32",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Load",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AI64Load,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
        },
        {
-               name:        "Hmul32u",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Store8",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AI64Store8,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4295032831},  // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
        },
        {
-               name:        "Hmul64",
-               argLen:      2,
-               commutative: true,
-               generic:     true,
+               name:    "I64Store16",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AI64Store16,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4295032831},  // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
        },
        {
-               name:        "Hmul64u",
-               argLen:      2,
+               name:    "I64Store32",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AI64Store32,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4295032831},  // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
+       },
+       {
+               name:    "I64Store",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AI64Store,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4295032831},  // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
+       },
+       {
+               name:    "F32Load",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AF32Load,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:    "F64Load",
+               auxType: auxInt64,
+               argLen:  2,
+               asm:     wasm.AF64Load,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:    "F32Store",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AF32Store,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4294901760},  // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
+       },
+       {
+               name:    "F64Store",
+               auxType: auxInt64,
+               argLen:  3,
+               asm:     wasm.AF64Store,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {1, 4294901760},  // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {0, 21474902015}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP SB
+                       },
+               },
+       },
+       {
+               name:              "I64Const",
+               auxType:           auxInt64,
+               argLen:            0,
+               rematerializeable: true,
+               reg: regInfo{
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:              "F64Const",
+               auxType:           auxFloat64,
+               argLen:            0,
+               rematerializeable: true,
+               reg: regInfo{
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "I64Eqz",
+               argLen: 1,
+               asm:    wasm.AI64Eqz,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Eq",
+               argLen: 2,
+               asm:    wasm.AI64Eq,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Ne",
+               argLen: 2,
+               asm:    wasm.AI64Ne,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64LtS",
+               argLen: 2,
+               asm:    wasm.AI64LtS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64LtU",
+               argLen: 2,
+               asm:    wasm.AI64LtU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64GtS",
+               argLen: 2,
+               asm:    wasm.AI64GtS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64GtU",
+               argLen: 2,
+               asm:    wasm.AI64GtU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64LeS",
+               argLen: 2,
+               asm:    wasm.AI64LeS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64LeU",
+               argLen: 2,
+               asm:    wasm.AI64LeU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64GeS",
+               argLen: 2,
+               asm:    wasm.AI64GeS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64GeU",
+               argLen: 2,
+               asm:    wasm.AI64GeU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Eq",
+               argLen: 2,
+               asm:    wasm.AF64Eq,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Ne",
+               argLen: 2,
+               asm:    wasm.AF64Ne,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Lt",
+               argLen: 2,
+               asm:    wasm.AF64Lt,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Gt",
+               argLen: 2,
+               asm:    wasm.AF64Gt,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Le",
+               argLen: 2,
+               asm:    wasm.AF64Le,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Ge",
+               argLen: 2,
+               asm:    wasm.AF64Ge,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Add",
+               argLen: 2,
+               asm:    wasm.AI64Add,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:    "I64AddConst",
+               auxType: auxInt64,
+               argLen:  1,
+               asm:     wasm.AI64Add,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Sub",
+               argLen: 2,
+               asm:    wasm.AI64Sub,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Mul",
+               argLen: 2,
+               asm:    wasm.AI64Mul,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64DivS",
+               argLen: 2,
+               asm:    wasm.AI64DivS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64DivU",
+               argLen: 2,
+               asm:    wasm.AI64DivU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64RemS",
+               argLen: 2,
+               asm:    wasm.AI64RemS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64RemU",
+               argLen: 2,
+               asm:    wasm.AI64RemU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64And",
+               argLen: 2,
+               asm:    wasm.AI64And,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Or",
+               argLen: 2,
+               asm:    wasm.AI64Or,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Xor",
+               argLen: 2,
+               asm:    wasm.AI64Xor,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64Shl",
+               argLen: 2,
+               asm:    wasm.AI64Shl,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64ShrS",
+               argLen: 2,
+               asm:    wasm.AI64ShrS,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64ShrU",
+               argLen: 2,
+               asm:    wasm.AI64ShrU,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                               {1, 4295032831}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15 SP
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64Neg",
+               argLen: 1,
+               asm:    wasm.AF64Neg,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "F64Add",
+               argLen: 2,
+               asm:    wasm.AF64Add,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "F64Sub",
+               argLen: 2,
+               asm:    wasm.AF64Sub,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "F64Mul",
+               argLen: 2,
+               asm:    wasm.AF64Mul,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "F64Div",
+               argLen: 2,
+               asm:    wasm.AF64Div,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                               {1, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "I64TruncSF64",
+               argLen: 1,
+               asm:    wasm.AI64TruncSF64,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "I64TruncUF64",
+               argLen: 1,
+               asm:    wasm.AI64TruncUF64,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+                       outputs: []outputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+               },
+       },
+       {
+               name:   "F64ConvertSI64",
+               argLen: 1,
+               asm:    wasm.AF64ConvertSI64,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+       {
+               name:   "F64ConvertUI64",
+               argLen: 1,
+               asm:    wasm.AF64ConvertUI64,
+               reg: regInfo{
+                       inputs: []inputInfo{
+                               {0, 65535}, // R0 R1 R2 R3 R4 R5 R6 R7 R8 R9 R10 R11 R12 R13 R14 R15
+                       },
+                       outputs: []outputInfo{
+                               {0, 4294901760}, // F0 F1 F2 F3 F4 F5 F6 F7 F8 F9 F10 F11 F12 F13 F14 F15
+                       },
+               },
+       },
+
+       {
+               name:        "Add8",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Add16",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Add32",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Add64",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:    "AddPtr",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:        "Add32F",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Add64F",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:    "Sub8",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Sub16",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Sub32",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Sub64",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "SubPtr",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Sub32F",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Sub64F",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:        "Mul8",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Mul16",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Mul32",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Mul64",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Mul32F",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Mul64F",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:    "Div32F",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:    "Div64F",
+               argLen:  2,
+               generic: true,
+       },
+       {
+               name:        "Hmul32",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Hmul32u",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Hmul64",
+               argLen:      2,
+               commutative: true,
+               generic:     true,
+       },
+       {
+               name:        "Hmul64u",
+               argLen:      2,
                commutative: true,
                generic:     true,
        },
@@ -27082,3 +28092,45 @@ var fpRegMaskS390X = regMask(4294901760)
 var specialRegMaskS390X = regMask(0)
 var framepointerRegS390X = int8(-1)
 var linkRegS390X = int8(14)
+var registersWasm = [...]Register{
+       {0, wasm.REG_R0, "R0"},
+       {1, wasm.REG_R1, "R1"},
+       {2, wasm.REG_R2, "R2"},
+       {3, wasm.REG_R3, "R3"},
+       {4, wasm.REG_R4, "R4"},
+       {5, wasm.REG_R5, "R5"},
+       {6, wasm.REG_R6, "R6"},
+       {7, wasm.REG_R7, "R7"},
+       {8, wasm.REG_R8, "R8"},
+       {9, wasm.REG_R9, "R9"},
+       {10, wasm.REG_R10, "R10"},
+       {11, wasm.REG_R11, "R11"},
+       {12, wasm.REG_R12, "R12"},
+       {13, wasm.REG_R13, "R13"},
+       {14, wasm.REG_R14, "R14"},
+       {15, wasm.REG_R15, "R15"},
+       {16, wasm.REG_F0, "F0"},
+       {17, wasm.REG_F1, "F1"},
+       {18, wasm.REG_F2, "F2"},
+       {19, wasm.REG_F3, "F3"},
+       {20, wasm.REG_F4, "F4"},
+       {21, wasm.REG_F5, "F5"},
+       {22, wasm.REG_F6, "F6"},
+       {23, wasm.REG_F7, "F7"},
+       {24, wasm.REG_F8, "F8"},
+       {25, wasm.REG_F9, "F9"},
+       {26, wasm.REG_F10, "F10"},
+       {27, wasm.REG_F11, "F11"},
+       {28, wasm.REG_F12, "F12"},
+       {29, wasm.REG_F13, "F13"},
+       {30, wasm.REG_F14, "F14"},
+       {31, wasm.REG_F15, "F15"},
+       {32, wasm.REGSP, "SP"},
+       {33, wasm.REGG, "g"},
+       {34, 0, "SB"},
+}
+var gpRegMaskWasm = regMask(65535)
+var fpRegMaskWasm = regMask(4294901760)
+var specialRegMaskWasm = regMask(0)
+var framepointerRegWasm = int8(-1)
+var linkRegWasm = int8(-1)
diff --git a/src/cmd/compile/internal/ssa/rewriteWasm.go b/src/cmd/compile/internal/ssa/rewriteWasm.go
new file mode 100644 (file)
index 0000000..f488a93
--- /dev/null
@@ -0,0 +1,6284 @@
+// Code generated from gen/Wasm.rules; DO NOT EDIT.
+// generated with: cd gen; go run *.go
+
+package ssa
+
+import "math"
+import "cmd/internal/obj"
+import "cmd/internal/objabi"
+import "cmd/compile/internal/types"
+
+var _ = math.MinInt8  // in case not otherwise used
+var _ = obj.ANOP      // in case not otherwise used
+var _ = objabi.GOROOT // in case not otherwise used
+var _ = types.TypeMem // in case not otherwise used
+
+func rewriteValueWasm(v *Value) bool {
+       switch v.Op {
+       case OpAdd16:
+               return rewriteValueWasm_OpAdd16_0(v)
+       case OpAdd32:
+               return rewriteValueWasm_OpAdd32_0(v)
+       case OpAdd32F:
+               return rewriteValueWasm_OpAdd32F_0(v)
+       case OpAdd64:
+               return rewriteValueWasm_OpAdd64_0(v)
+       case OpAdd64F:
+               return rewriteValueWasm_OpAdd64F_0(v)
+       case OpAdd8:
+               return rewriteValueWasm_OpAdd8_0(v)
+       case OpAddPtr:
+               return rewriteValueWasm_OpAddPtr_0(v)
+       case OpAddr:
+               return rewriteValueWasm_OpAddr_0(v)
+       case OpAnd16:
+               return rewriteValueWasm_OpAnd16_0(v)
+       case OpAnd32:
+               return rewriteValueWasm_OpAnd32_0(v)
+       case OpAnd64:
+               return rewriteValueWasm_OpAnd64_0(v)
+       case OpAnd8:
+               return rewriteValueWasm_OpAnd8_0(v)
+       case OpAndB:
+               return rewriteValueWasm_OpAndB_0(v)
+       case OpClosureCall:
+               return rewriteValueWasm_OpClosureCall_0(v)
+       case OpCom16:
+               return rewriteValueWasm_OpCom16_0(v)
+       case OpCom32:
+               return rewriteValueWasm_OpCom32_0(v)
+       case OpCom64:
+               return rewriteValueWasm_OpCom64_0(v)
+       case OpCom8:
+               return rewriteValueWasm_OpCom8_0(v)
+       case OpConst16:
+               return rewriteValueWasm_OpConst16_0(v)
+       case OpConst32:
+               return rewriteValueWasm_OpConst32_0(v)
+       case OpConst32F:
+               return rewriteValueWasm_OpConst32F_0(v)
+       case OpConst64:
+               return rewriteValueWasm_OpConst64_0(v)
+       case OpConst64F:
+               return rewriteValueWasm_OpConst64F_0(v)
+       case OpConst8:
+               return rewriteValueWasm_OpConst8_0(v)
+       case OpConstBool:
+               return rewriteValueWasm_OpConstBool_0(v)
+       case OpConstNil:
+               return rewriteValueWasm_OpConstNil_0(v)
+       case OpConvert:
+               return rewriteValueWasm_OpConvert_0(v)
+       case OpCvt32Fto32:
+               return rewriteValueWasm_OpCvt32Fto32_0(v)
+       case OpCvt32Fto32U:
+               return rewriteValueWasm_OpCvt32Fto32U_0(v)
+       case OpCvt32Fto64:
+               return rewriteValueWasm_OpCvt32Fto64_0(v)
+       case OpCvt32Fto64F:
+               return rewriteValueWasm_OpCvt32Fto64F_0(v)
+       case OpCvt32Fto64U:
+               return rewriteValueWasm_OpCvt32Fto64U_0(v)
+       case OpCvt32Uto32F:
+               return rewriteValueWasm_OpCvt32Uto32F_0(v)
+       case OpCvt32Uto64F:
+               return rewriteValueWasm_OpCvt32Uto64F_0(v)
+       case OpCvt32to32F:
+               return rewriteValueWasm_OpCvt32to32F_0(v)
+       case OpCvt32to64F:
+               return rewriteValueWasm_OpCvt32to64F_0(v)
+       case OpCvt64Fto32:
+               return rewriteValueWasm_OpCvt64Fto32_0(v)
+       case OpCvt64Fto32F:
+               return rewriteValueWasm_OpCvt64Fto32F_0(v)
+       case OpCvt64Fto32U:
+               return rewriteValueWasm_OpCvt64Fto32U_0(v)
+       case OpCvt64Fto64:
+               return rewriteValueWasm_OpCvt64Fto64_0(v)
+       case OpCvt64Fto64U:
+               return rewriteValueWasm_OpCvt64Fto64U_0(v)
+       case OpCvt64Uto32F:
+               return rewriteValueWasm_OpCvt64Uto32F_0(v)
+       case OpCvt64Uto64F:
+               return rewriteValueWasm_OpCvt64Uto64F_0(v)
+       case OpCvt64to32F:
+               return rewriteValueWasm_OpCvt64to32F_0(v)
+       case OpCvt64to64F:
+               return rewriteValueWasm_OpCvt64to64F_0(v)
+       case OpDiv16:
+               return rewriteValueWasm_OpDiv16_0(v)
+       case OpDiv16u:
+               return rewriteValueWasm_OpDiv16u_0(v)
+       case OpDiv32:
+               return rewriteValueWasm_OpDiv32_0(v)
+       case OpDiv32F:
+               return rewriteValueWasm_OpDiv32F_0(v)
+       case OpDiv32u:
+               return rewriteValueWasm_OpDiv32u_0(v)
+       case OpDiv64:
+               return rewriteValueWasm_OpDiv64_0(v)
+       case OpDiv64F:
+               return rewriteValueWasm_OpDiv64F_0(v)
+       case OpDiv64u:
+               return rewriteValueWasm_OpDiv64u_0(v)
+       case OpDiv8:
+               return rewriteValueWasm_OpDiv8_0(v)
+       case OpDiv8u:
+               return rewriteValueWasm_OpDiv8u_0(v)
+       case OpEq16:
+               return rewriteValueWasm_OpEq16_0(v)
+       case OpEq32:
+               return rewriteValueWasm_OpEq32_0(v)
+       case OpEq32F:
+               return rewriteValueWasm_OpEq32F_0(v)
+       case OpEq64:
+               return rewriteValueWasm_OpEq64_0(v)
+       case OpEq64F:
+               return rewriteValueWasm_OpEq64F_0(v)
+       case OpEq8:
+               return rewriteValueWasm_OpEq8_0(v)
+       case OpEqB:
+               return rewriteValueWasm_OpEqB_0(v)
+       case OpEqPtr:
+               return rewriteValueWasm_OpEqPtr_0(v)
+       case OpGeq16:
+               return rewriteValueWasm_OpGeq16_0(v)
+       case OpGeq16U:
+               return rewriteValueWasm_OpGeq16U_0(v)
+       case OpGeq32:
+               return rewriteValueWasm_OpGeq32_0(v)
+       case OpGeq32F:
+               return rewriteValueWasm_OpGeq32F_0(v)
+       case OpGeq32U:
+               return rewriteValueWasm_OpGeq32U_0(v)
+       case OpGeq64:
+               return rewriteValueWasm_OpGeq64_0(v)
+       case OpGeq64F:
+               return rewriteValueWasm_OpGeq64F_0(v)
+       case OpGeq64U:
+               return rewriteValueWasm_OpGeq64U_0(v)
+       case OpGeq8:
+               return rewriteValueWasm_OpGeq8_0(v)
+       case OpGeq8U:
+               return rewriteValueWasm_OpGeq8U_0(v)
+       case OpGetCallerPC:
+               return rewriteValueWasm_OpGetCallerPC_0(v)
+       case OpGetCallerSP:
+               return rewriteValueWasm_OpGetCallerSP_0(v)
+       case OpGetClosurePtr:
+               return rewriteValueWasm_OpGetClosurePtr_0(v)
+       case OpGreater16:
+               return rewriteValueWasm_OpGreater16_0(v)
+       case OpGreater16U:
+               return rewriteValueWasm_OpGreater16U_0(v)
+       case OpGreater32:
+               return rewriteValueWasm_OpGreater32_0(v)
+       case OpGreater32F:
+               return rewriteValueWasm_OpGreater32F_0(v)
+       case OpGreater32U:
+               return rewriteValueWasm_OpGreater32U_0(v)
+       case OpGreater64:
+               return rewriteValueWasm_OpGreater64_0(v)
+       case OpGreater64F:
+               return rewriteValueWasm_OpGreater64F_0(v)
+       case OpGreater64U:
+               return rewriteValueWasm_OpGreater64U_0(v)
+       case OpGreater8:
+               return rewriteValueWasm_OpGreater8_0(v)
+       case OpGreater8U:
+               return rewriteValueWasm_OpGreater8U_0(v)
+       case OpInterCall:
+               return rewriteValueWasm_OpInterCall_0(v)
+       case OpIsInBounds:
+               return rewriteValueWasm_OpIsInBounds_0(v)
+       case OpIsNonNil:
+               return rewriteValueWasm_OpIsNonNil_0(v)
+       case OpIsSliceInBounds:
+               return rewriteValueWasm_OpIsSliceInBounds_0(v)
+       case OpLeq16:
+               return rewriteValueWasm_OpLeq16_0(v)
+       case OpLeq16U:
+               return rewriteValueWasm_OpLeq16U_0(v)
+       case OpLeq32:
+               return rewriteValueWasm_OpLeq32_0(v)
+       case OpLeq32F:
+               return rewriteValueWasm_OpLeq32F_0(v)
+       case OpLeq32U:
+               return rewriteValueWasm_OpLeq32U_0(v)
+       case OpLeq64:
+               return rewriteValueWasm_OpLeq64_0(v)
+       case OpLeq64F:
+               return rewriteValueWasm_OpLeq64F_0(v)
+       case OpLeq64U:
+               return rewriteValueWasm_OpLeq64U_0(v)
+       case OpLeq8:
+               return rewriteValueWasm_OpLeq8_0(v)
+       case OpLeq8U:
+               return rewriteValueWasm_OpLeq8U_0(v)
+       case OpLess16:
+               return rewriteValueWasm_OpLess16_0(v)
+       case OpLess16U:
+               return rewriteValueWasm_OpLess16U_0(v)
+       case OpLess32:
+               return rewriteValueWasm_OpLess32_0(v)
+       case OpLess32F:
+               return rewriteValueWasm_OpLess32F_0(v)
+       case OpLess32U:
+               return rewriteValueWasm_OpLess32U_0(v)
+       case OpLess64:
+               return rewriteValueWasm_OpLess64_0(v)
+       case OpLess64F:
+               return rewriteValueWasm_OpLess64F_0(v)
+       case OpLess64U:
+               return rewriteValueWasm_OpLess64U_0(v)
+       case OpLess8:
+               return rewriteValueWasm_OpLess8_0(v)
+       case OpLess8U:
+               return rewriteValueWasm_OpLess8U_0(v)
+       case OpLoad:
+               return rewriteValueWasm_OpLoad_0(v)
+       case OpLsh16x16:
+               return rewriteValueWasm_OpLsh16x16_0(v)
+       case OpLsh16x32:
+               return rewriteValueWasm_OpLsh16x32_0(v)
+       case OpLsh16x64:
+               return rewriteValueWasm_OpLsh16x64_0(v)
+       case OpLsh16x8:
+               return rewriteValueWasm_OpLsh16x8_0(v)
+       case OpLsh32x16:
+               return rewriteValueWasm_OpLsh32x16_0(v)
+       case OpLsh32x32:
+               return rewriteValueWasm_OpLsh32x32_0(v)
+       case OpLsh32x64:
+               return rewriteValueWasm_OpLsh32x64_0(v)
+       case OpLsh32x8:
+               return rewriteValueWasm_OpLsh32x8_0(v)
+       case OpLsh64x16:
+               return rewriteValueWasm_OpLsh64x16_0(v)
+       case OpLsh64x32:
+               return rewriteValueWasm_OpLsh64x32_0(v)
+       case OpLsh64x64:
+               return rewriteValueWasm_OpLsh64x64_0(v)
+       case OpLsh64x8:
+               return rewriteValueWasm_OpLsh64x8_0(v)
+       case OpLsh8x16:
+               return rewriteValueWasm_OpLsh8x16_0(v)
+       case OpLsh8x32:
+               return rewriteValueWasm_OpLsh8x32_0(v)
+       case OpLsh8x64:
+               return rewriteValueWasm_OpLsh8x64_0(v)
+       case OpLsh8x8:
+               return rewriteValueWasm_OpLsh8x8_0(v)
+       case OpMod16:
+               return rewriteValueWasm_OpMod16_0(v)
+       case OpMod16u:
+               return rewriteValueWasm_OpMod16u_0(v)
+       case OpMod32:
+               return rewriteValueWasm_OpMod32_0(v)
+       case OpMod32u:
+               return rewriteValueWasm_OpMod32u_0(v)
+       case OpMod64:
+               return rewriteValueWasm_OpMod64_0(v)
+       case OpMod64u:
+               return rewriteValueWasm_OpMod64u_0(v)
+       case OpMod8:
+               return rewriteValueWasm_OpMod8_0(v)
+       case OpMod8u:
+               return rewriteValueWasm_OpMod8u_0(v)
+       case OpMove:
+               return rewriteValueWasm_OpMove_0(v) || rewriteValueWasm_OpMove_10(v)
+       case OpMul16:
+               return rewriteValueWasm_OpMul16_0(v)
+       case OpMul32:
+               return rewriteValueWasm_OpMul32_0(v)
+       case OpMul32F:
+               return rewriteValueWasm_OpMul32F_0(v)
+       case OpMul64:
+               return rewriteValueWasm_OpMul64_0(v)
+       case OpMul64F:
+               return rewriteValueWasm_OpMul64F_0(v)
+       case OpMul8:
+               return rewriteValueWasm_OpMul8_0(v)
+       case OpNeg16:
+               return rewriteValueWasm_OpNeg16_0(v)
+       case OpNeg32:
+               return rewriteValueWasm_OpNeg32_0(v)
+       case OpNeg32F:
+               return rewriteValueWasm_OpNeg32F_0(v)
+       case OpNeg64:
+               return rewriteValueWasm_OpNeg64_0(v)
+       case OpNeg64F:
+               return rewriteValueWasm_OpNeg64F_0(v)
+       case OpNeg8:
+               return rewriteValueWasm_OpNeg8_0(v)
+       case OpNeq16:
+               return rewriteValueWasm_OpNeq16_0(v)
+       case OpNeq32:
+               return rewriteValueWasm_OpNeq32_0(v)
+       case OpNeq32F:
+               return rewriteValueWasm_OpNeq32F_0(v)
+       case OpNeq64:
+               return rewriteValueWasm_OpNeq64_0(v)
+       case OpNeq64F:
+               return rewriteValueWasm_OpNeq64F_0(v)
+       case OpNeq8:
+               return rewriteValueWasm_OpNeq8_0(v)
+       case OpNeqB:
+               return rewriteValueWasm_OpNeqB_0(v)
+       case OpNeqPtr:
+               return rewriteValueWasm_OpNeqPtr_0(v)
+       case OpNilCheck:
+               return rewriteValueWasm_OpNilCheck_0(v)
+       case OpNot:
+               return rewriteValueWasm_OpNot_0(v)
+       case OpOffPtr:
+               return rewriteValueWasm_OpOffPtr_0(v)
+       case OpOr16:
+               return rewriteValueWasm_OpOr16_0(v)
+       case OpOr32:
+               return rewriteValueWasm_OpOr32_0(v)
+       case OpOr64:
+               return rewriteValueWasm_OpOr64_0(v)
+       case OpOr8:
+               return rewriteValueWasm_OpOr8_0(v)
+       case OpOrB:
+               return rewriteValueWasm_OpOrB_0(v)
+       case OpRound32F:
+               return rewriteValueWasm_OpRound32F_0(v)
+       case OpRound64F:
+               return rewriteValueWasm_OpRound64F_0(v)
+       case OpRsh16Ux16:
+               return rewriteValueWasm_OpRsh16Ux16_0(v)
+       case OpRsh16Ux32:
+               return rewriteValueWasm_OpRsh16Ux32_0(v)
+       case OpRsh16Ux64:
+               return rewriteValueWasm_OpRsh16Ux64_0(v)
+       case OpRsh16Ux8:
+               return rewriteValueWasm_OpRsh16Ux8_0(v)
+       case OpRsh16x16:
+               return rewriteValueWasm_OpRsh16x16_0(v)
+       case OpRsh16x32:
+               return rewriteValueWasm_OpRsh16x32_0(v)
+       case OpRsh16x64:
+               return rewriteValueWasm_OpRsh16x64_0(v)
+       case OpRsh16x8:
+               return rewriteValueWasm_OpRsh16x8_0(v)
+       case OpRsh32Ux16:
+               return rewriteValueWasm_OpRsh32Ux16_0(v)
+       case OpRsh32Ux32:
+               return rewriteValueWasm_OpRsh32Ux32_0(v)
+       case OpRsh32Ux64:
+               return rewriteValueWasm_OpRsh32Ux64_0(v)
+       case OpRsh32Ux8:
+               return rewriteValueWasm_OpRsh32Ux8_0(v)
+       case OpRsh32x16:
+               return rewriteValueWasm_OpRsh32x16_0(v)
+       case OpRsh32x32:
+               return rewriteValueWasm_OpRsh32x32_0(v)
+       case OpRsh32x64:
+               return rewriteValueWasm_OpRsh32x64_0(v)
+       case OpRsh32x8:
+               return rewriteValueWasm_OpRsh32x8_0(v)
+       case OpRsh64Ux16:
+               return rewriteValueWasm_OpRsh64Ux16_0(v)
+       case OpRsh64Ux32:
+               return rewriteValueWasm_OpRsh64Ux32_0(v)
+       case OpRsh64Ux64:
+               return rewriteValueWasm_OpRsh64Ux64_0(v)
+       case OpRsh64Ux8:
+               return rewriteValueWasm_OpRsh64Ux8_0(v)
+       case OpRsh64x16:
+               return rewriteValueWasm_OpRsh64x16_0(v)
+       case OpRsh64x32:
+               return rewriteValueWasm_OpRsh64x32_0(v)
+       case OpRsh64x64:
+               return rewriteValueWasm_OpRsh64x64_0(v)
+       case OpRsh64x8:
+               return rewriteValueWasm_OpRsh64x8_0(v)
+       case OpRsh8Ux16:
+               return rewriteValueWasm_OpRsh8Ux16_0(v)
+       case OpRsh8Ux32:
+               return rewriteValueWasm_OpRsh8Ux32_0(v)
+       case OpRsh8Ux64:
+               return rewriteValueWasm_OpRsh8Ux64_0(v)
+       case OpRsh8Ux8:
+               return rewriteValueWasm_OpRsh8Ux8_0(v)
+       case OpRsh8x16:
+               return rewriteValueWasm_OpRsh8x16_0(v)
+       case OpRsh8x32:
+               return rewriteValueWasm_OpRsh8x32_0(v)
+       case OpRsh8x64:
+               return rewriteValueWasm_OpRsh8x64_0(v)
+       case OpRsh8x8:
+               return rewriteValueWasm_OpRsh8x8_0(v)
+       case OpSignExt16to32:
+               return rewriteValueWasm_OpSignExt16to32_0(v)
+       case OpSignExt16to64:
+               return rewriteValueWasm_OpSignExt16to64_0(v)
+       case OpSignExt32to64:
+               return rewriteValueWasm_OpSignExt32to64_0(v)
+       case OpSignExt8to16:
+               return rewriteValueWasm_OpSignExt8to16_0(v)
+       case OpSignExt8to32:
+               return rewriteValueWasm_OpSignExt8to32_0(v)
+       case OpSignExt8to64:
+               return rewriteValueWasm_OpSignExt8to64_0(v)
+       case OpSlicemask:
+               return rewriteValueWasm_OpSlicemask_0(v)
+       case OpStaticCall:
+               return rewriteValueWasm_OpStaticCall_0(v)
+       case OpStore:
+               return rewriteValueWasm_OpStore_0(v)
+       case OpSub16:
+               return rewriteValueWasm_OpSub16_0(v)
+       case OpSub32:
+               return rewriteValueWasm_OpSub32_0(v)
+       case OpSub32F:
+               return rewriteValueWasm_OpSub32F_0(v)
+       case OpSub64:
+               return rewriteValueWasm_OpSub64_0(v)
+       case OpSub64F:
+               return rewriteValueWasm_OpSub64F_0(v)
+       case OpSub8:
+               return rewriteValueWasm_OpSub8_0(v)
+       case OpSubPtr:
+               return rewriteValueWasm_OpSubPtr_0(v)
+       case OpTrunc16to8:
+               return rewriteValueWasm_OpTrunc16to8_0(v)
+       case OpTrunc32to16:
+               return rewriteValueWasm_OpTrunc32to16_0(v)
+       case OpTrunc32to8:
+               return rewriteValueWasm_OpTrunc32to8_0(v)
+       case OpTrunc64to16:
+               return rewriteValueWasm_OpTrunc64to16_0(v)
+       case OpTrunc64to32:
+               return rewriteValueWasm_OpTrunc64to32_0(v)
+       case OpTrunc64to8:
+               return rewriteValueWasm_OpTrunc64to8_0(v)
+       case OpWB:
+               return rewriteValueWasm_OpWB_0(v)
+       case OpWasmF64Add:
+               return rewriteValueWasm_OpWasmF64Add_0(v)
+       case OpWasmF64Mul:
+               return rewriteValueWasm_OpWasmF64Mul_0(v)
+       case OpWasmI64Add:
+               return rewriteValueWasm_OpWasmI64Add_0(v)
+       case OpWasmI64And:
+               return rewriteValueWasm_OpWasmI64And_0(v)
+       case OpWasmI64Eq:
+               return rewriteValueWasm_OpWasmI64Eq_0(v)
+       case OpWasmI64Eqz:
+               return rewriteValueWasm_OpWasmI64Eqz_0(v)
+       case OpWasmI64Load:
+               return rewriteValueWasm_OpWasmI64Load_0(v)
+       case OpWasmI64Load16S:
+               return rewriteValueWasm_OpWasmI64Load16S_0(v)
+       case OpWasmI64Load16U:
+               return rewriteValueWasm_OpWasmI64Load16U_0(v)
+       case OpWasmI64Load32S:
+               return rewriteValueWasm_OpWasmI64Load32S_0(v)
+       case OpWasmI64Load32U:
+               return rewriteValueWasm_OpWasmI64Load32U_0(v)
+       case OpWasmI64Load8S:
+               return rewriteValueWasm_OpWasmI64Load8S_0(v)
+       case OpWasmI64Load8U:
+               return rewriteValueWasm_OpWasmI64Load8U_0(v)
+       case OpWasmI64Mul:
+               return rewriteValueWasm_OpWasmI64Mul_0(v)
+       case OpWasmI64Ne:
+               return rewriteValueWasm_OpWasmI64Ne_0(v)
+       case OpWasmI64Or:
+               return rewriteValueWasm_OpWasmI64Or_0(v)
+       case OpWasmI64Store:
+               return rewriteValueWasm_OpWasmI64Store_0(v)
+       case OpWasmI64Store16:
+               return rewriteValueWasm_OpWasmI64Store16_0(v)
+       case OpWasmI64Store32:
+               return rewriteValueWasm_OpWasmI64Store32_0(v)
+       case OpWasmI64Store8:
+               return rewriteValueWasm_OpWasmI64Store8_0(v)
+       case OpWasmI64Xor:
+               return rewriteValueWasm_OpWasmI64Xor_0(v)
+       case OpXor16:
+               return rewriteValueWasm_OpXor16_0(v)
+       case OpXor32:
+               return rewriteValueWasm_OpXor32_0(v)
+       case OpXor64:
+               return rewriteValueWasm_OpXor64_0(v)
+       case OpXor8:
+               return rewriteValueWasm_OpXor8_0(v)
+       case OpZero:
+               return rewriteValueWasm_OpZero_0(v) || rewriteValueWasm_OpZero_10(v)
+       case OpZeroExt16to32:
+               return rewriteValueWasm_OpZeroExt16to32_0(v)
+       case OpZeroExt16to64:
+               return rewriteValueWasm_OpZeroExt16to64_0(v)
+       case OpZeroExt32to64:
+               return rewriteValueWasm_OpZeroExt32to64_0(v)
+       case OpZeroExt8to16:
+               return rewriteValueWasm_OpZeroExt8to16_0(v)
+       case OpZeroExt8to32:
+               return rewriteValueWasm_OpZeroExt8to32_0(v)
+       case OpZeroExt8to64:
+               return rewriteValueWasm_OpZeroExt8to64_0(v)
+       }
+       return false
+}
+func rewriteValueWasm_OpAdd16_0(v *Value) bool {
+       // match: (Add16 x y)
+       // cond:
+       // result: (I64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAdd32_0(v *Value) bool {
+       // match: (Add32 x y)
+       // cond:
+       // result: (I64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAdd32F_0(v *Value) bool {
+       // match: (Add32F x y)
+       // cond:
+       // result: (F64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAdd64_0(v *Value) bool {
+       // match: (Add64 x y)
+       // cond:
+       // result: (I64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAdd64F_0(v *Value) bool {
+       // match: (Add64F x y)
+       // cond:
+       // result: (F64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAdd8_0(v *Value) bool {
+       // match: (Add8 x y)
+       // cond:
+       // result: (I64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAddPtr_0(v *Value) bool {
+       // match: (AddPtr x y)
+       // cond:
+       // result: (I64Add x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAddr_0(v *Value) bool {
+       // match: (Addr {sym} base)
+       // cond:
+       // result: (LoweredAddr {sym} base)
+       for {
+               sym := v.Aux
+               base := v.Args[0]
+               v.reset(OpWasmLoweredAddr)
+               v.Aux = sym
+               v.AddArg(base)
+               return true
+       }
+}
+func rewriteValueWasm_OpAnd16_0(v *Value) bool {
+       // match: (And16 x y)
+       // cond:
+       // result: (I64And x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAnd32_0(v *Value) bool {
+       // match: (And32 x y)
+       // cond:
+       // result: (I64And x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAnd64_0(v *Value) bool {
+       // match: (And64 x y)
+       // cond:
+       // result: (I64And x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAnd8_0(v *Value) bool {
+       // match: (And8 x y)
+       // cond:
+       // result: (I64And x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpAndB_0(v *Value) bool {
+       // match: (AndB x y)
+       // cond:
+       // result: (I64And x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpClosureCall_0(v *Value) bool {
+       // match: (ClosureCall [argwid] entry closure mem)
+       // cond:
+       // result: (LoweredClosureCall [argwid] entry closure mem)
+       for {
+               argwid := v.AuxInt
+               _ = v.Args[2]
+               entry := v.Args[0]
+               closure := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmLoweredClosureCall)
+               v.AuxInt = argwid
+               v.AddArg(entry)
+               v.AddArg(closure)
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpCom16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Com16 x)
+       // cond:
+       // result: (I64Xor x (I64Const [-1]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = -1
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCom32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Com32 x)
+       // cond:
+       // result: (I64Xor x (I64Const [-1]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = -1
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCom64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Com64 x)
+       // cond:
+       // result: (I64Xor x (I64Const [-1]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = -1
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCom8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Com8 x)
+       // cond:
+       // result: (I64Xor x (I64Const [-1]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = -1
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpConst16_0(v *Value) bool {
+       // match: (Const16 [val])
+       // cond:
+       // result: (I64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConst32_0(v *Value) bool {
+       // match: (Const32 [val])
+       // cond:
+       // result: (I64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConst32F_0(v *Value) bool {
+       // match: (Const32F [val])
+       // cond:
+       // result: (F64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmF64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConst64_0(v *Value) bool {
+       // match: (Const64 [val])
+       // cond:
+       // result: (I64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConst64F_0(v *Value) bool {
+       // match: (Const64F [val])
+       // cond:
+       // result: (F64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmF64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConst8_0(v *Value) bool {
+       // match: (Const8 [val])
+       // cond:
+       // result: (I64Const [val])
+       for {
+               val := v.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = val
+               return true
+       }
+}
+func rewriteValueWasm_OpConstBool_0(v *Value) bool {
+       // match: (ConstBool [b])
+       // cond:
+       // result: (I64Const [b])
+       for {
+               b := v.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = b
+               return true
+       }
+}
+func rewriteValueWasm_OpConstNil_0(v *Value) bool {
+       // match: (ConstNil)
+       // cond:
+       // result: (I64Const [0])
+       for {
+               v.reset(OpWasmI64Const)
+               v.AuxInt = 0
+               return true
+       }
+}
+func rewriteValueWasm_OpConvert_0(v *Value) bool {
+       // match: (Convert <t> x mem)
+       // cond:
+       // result: (LoweredConvert <t> x mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               x := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmLoweredConvert)
+               v.Type = t
+               v.AddArg(x)
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Fto32_0(v *Value) bool {
+       // match: (Cvt32Fto32 x)
+       // cond:
+       // result: (I64TruncSF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncSF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Fto32U_0(v *Value) bool {
+       // match: (Cvt32Fto32U x)
+       // cond:
+       // result: (I64TruncUF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncUF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Fto64_0(v *Value) bool {
+       // match: (Cvt32Fto64 x)
+       // cond:
+       // result: (I64TruncSF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncSF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Fto64F_0(v *Value) bool {
+       // match: (Cvt32Fto64F x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Fto64U_0(v *Value) bool {
+       // match: (Cvt32Fto64U x)
+       // cond:
+       // result: (I64TruncUF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncUF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Uto32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt32Uto32F x)
+       // cond:
+       // result: (LoweredRound32F (F64ConvertUI64 (ZeroExt32to64 x)))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v0 := b.NewValue0(v.Pos, OpWasmF64ConvertUI64, typ.Float64)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32Uto64F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt32Uto64F x)
+       // cond:
+       // result: (F64ConvertUI64 (ZeroExt32to64 x))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64ConvertUI64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32to32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt32to32F x)
+       // cond:
+       // result: (LoweredRound32F (F64ConvertSI64 (SignExt32to64 x)))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v0 := b.NewValue0(v.Pos, OpWasmF64ConvertSI64, typ.Float64)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(x)
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt32to64F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt32to64F x)
+       // cond:
+       // result: (F64ConvertSI64 (SignExt32to64 x))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64ConvertSI64)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Fto32_0(v *Value) bool {
+       // match: (Cvt64Fto32 x)
+       // cond:
+       // result: (I64TruncSF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncSF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Fto32F_0(v *Value) bool {
+       // match: (Cvt64Fto32F x)
+       // cond:
+       // result: (LoweredRound32F x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Fto32U_0(v *Value) bool {
+       // match: (Cvt64Fto32U x)
+       // cond:
+       // result: (I64TruncUF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncUF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Fto64_0(v *Value) bool {
+       // match: (Cvt64Fto64 x)
+       // cond:
+       // result: (I64TruncSF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncSF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Fto64U_0(v *Value) bool {
+       // match: (Cvt64Fto64U x)
+       // cond:
+       // result: (I64TruncUF64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64TruncUF64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Uto32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt64Uto32F x)
+       // cond:
+       // result: (LoweredRound32F (F64ConvertUI64 x))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v0 := b.NewValue0(v.Pos, OpWasmF64ConvertUI64, typ.Float64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64Uto64F_0(v *Value) bool {
+       // match: (Cvt64Uto64F x)
+       // cond:
+       // result: (F64ConvertUI64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64ConvertUI64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64to32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Cvt64to32F x)
+       // cond:
+       // result: (LoweredRound32F (F64ConvertSI64 x))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v0 := b.NewValue0(v.Pos, OpWasmF64ConvertSI64, typ.Float64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpCvt64to64F_0(v *Value) bool {
+       // match: (Cvt64to64F x)
+       // cond:
+       // result: (F64ConvertSI64 x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64ConvertSI64)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div16 x y)
+       // cond:
+       // result: (I64DivS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv16u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div16u x y)
+       // cond:
+       // result: (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div32 x y)
+       // cond:
+       // result: (I64DivS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv32F_0(v *Value) bool {
+       // match: (Div32F x y)
+       // cond:
+       // result: (F64Div x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Div)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv32u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div32u x y)
+       // cond:
+       // result: (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv64_0(v *Value) bool {
+       // match: (Div64 x y)
+       // cond:
+       // result: (I64DivS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv64F_0(v *Value) bool {
+       // match: (Div64F x y)
+       // cond:
+       // result: (F64Div x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Div)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv64u_0(v *Value) bool {
+       // match: (Div64u x y)
+       // cond:
+       // result: (I64DivU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div8 x y)
+       // cond:
+       // result: (I64DivS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpDiv8u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Div8u x y)
+       // cond:
+       // result: (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64DivU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Eq16 x y)
+       // cond:
+       // result: (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Eq32 x y)
+       // cond:
+       // result: (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Eq32F x y)
+       // cond:
+       // result: (F64Eq (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Eq)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq64_0(v *Value) bool {
+       // match: (Eq64 x y)
+       // cond:
+       // result: (I64Eq x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq64F_0(v *Value) bool {
+       // match: (Eq64F x y)
+       // cond:
+       // result: (F64Eq x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Eq)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpEq8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Eq8 x y)
+       // cond:
+       // result: (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpEqB_0(v *Value) bool {
+       // match: (EqB x y)
+       // cond:
+       // result: (I64Eq x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpEqPtr_0(v *Value) bool {
+       // match: (EqPtr x y)
+       // cond:
+       // result: (I64Eq x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq16 x y)
+       // cond:
+       // result: (I64GeS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq16U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq16U x y)
+       // cond:
+       // result: (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq32 x y)
+       // cond:
+       // result: (I64GeS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq32F x y)
+       // cond:
+       // result: (F64Ge (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Ge)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq32U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq32U x y)
+       // cond:
+       // result: (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq64_0(v *Value) bool {
+       // match: (Geq64 x y)
+       // cond:
+       // result: (I64GeS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq64F_0(v *Value) bool {
+       // match: (Geq64F x y)
+       // cond:
+       // result: (F64Ge x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Ge)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq64U_0(v *Value) bool {
+       // match: (Geq64U x y)
+       // cond:
+       // result: (I64GeU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq8 x y)
+       // cond:
+       // result: (I64GeS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGeq8U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Geq8U x y)
+       // cond:
+       // result: (I64GeU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGetCallerPC_0(v *Value) bool {
+       // match: (GetCallerPC)
+       // cond:
+       // result: (LoweredGetCallerPC)
+       for {
+               v.reset(OpWasmLoweredGetCallerPC)
+               return true
+       }
+}
+func rewriteValueWasm_OpGetCallerSP_0(v *Value) bool {
+       // match: (GetCallerSP)
+       // cond:
+       // result: (LoweredGetCallerSP)
+       for {
+               v.reset(OpWasmLoweredGetCallerSP)
+               return true
+       }
+}
+func rewriteValueWasm_OpGetClosurePtr_0(v *Value) bool {
+       // match: (GetClosurePtr)
+       // cond:
+       // result: (LoweredGetClosurePtr)
+       for {
+               v.reset(OpWasmLoweredGetClosurePtr)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater16 x y)
+       // cond:
+       // result: (I64GtS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater16U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater16U x y)
+       // cond:
+       // result: (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater32 x y)
+       // cond:
+       // result: (I64GtS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater32F x y)
+       // cond:
+       // result: (F64Gt (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Gt)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater32U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater32U x y)
+       // cond:
+       // result: (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater64_0(v *Value) bool {
+       // match: (Greater64 x y)
+       // cond:
+       // result: (I64GtS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater64F_0(v *Value) bool {
+       // match: (Greater64F x y)
+       // cond:
+       // result: (F64Gt x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Gt)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater64U_0(v *Value) bool {
+       // match: (Greater64U x y)
+       // cond:
+       // result: (I64GtU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater8 x y)
+       // cond:
+       // result: (I64GtS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpGreater8U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Greater8U x y)
+       // cond:
+       // result: (I64GtU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64GtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpInterCall_0(v *Value) bool {
+       // match: (InterCall [argwid] entry mem)
+       // cond:
+       // result: (LoweredInterCall [argwid] entry mem)
+       for {
+               argwid := v.AuxInt
+               _ = v.Args[1]
+               entry := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmLoweredInterCall)
+               v.AuxInt = argwid
+               v.AddArg(entry)
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpIsInBounds_0(v *Value) bool {
+       // match: (IsInBounds idx len)
+       // cond:
+       // result: (I64LtU idx len)
+       for {
+               _ = v.Args[1]
+               idx := v.Args[0]
+               len := v.Args[1]
+               v.reset(OpWasmI64LtU)
+               v.AddArg(idx)
+               v.AddArg(len)
+               return true
+       }
+}
+func rewriteValueWasm_OpIsNonNil_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (IsNonNil p)
+       // cond:
+       // result: (I64Eqz (I64Eqz p))
+       for {
+               p := v.Args[0]
+               v.reset(OpWasmI64Eqz)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Eqz, typ.Bool)
+               v0.AddArg(p)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpIsSliceInBounds_0(v *Value) bool {
+       // match: (IsSliceInBounds idx len)
+       // cond:
+       // result: (I64LeU idx len)
+       for {
+               _ = v.Args[1]
+               idx := v.Args[0]
+               len := v.Args[1]
+               v.reset(OpWasmI64LeU)
+               v.AddArg(idx)
+               v.AddArg(len)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq16 x y)
+       // cond:
+       // result: (I64LeS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq16U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq16U x y)
+       // cond:
+       // result: (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq32 x y)
+       // cond:
+       // result: (I64LeS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq32F x y)
+       // cond:
+       // result: (F64Le (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Le)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq32U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq32U x y)
+       // cond:
+       // result: (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq64_0(v *Value) bool {
+       // match: (Leq64 x y)
+       // cond:
+       // result: (I64LeS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq64F_0(v *Value) bool {
+       // match: (Leq64F x y)
+       // cond:
+       // result: (F64Le x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Le)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq64U_0(v *Value) bool {
+       // match: (Leq64U x y)
+       // cond:
+       // result: (I64LeU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq8 x y)
+       // cond:
+       // result: (I64LeS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLeq8U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Leq8U x y)
+       // cond:
+       // result: (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LeU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less16 x y)
+       // cond:
+       // result: (I64LtS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess16U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less16U x y)
+       // cond:
+       // result: (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less32 x y)
+       // cond:
+       // result: (I64LtS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less32F x y)
+       // cond:
+       // result: (F64Lt (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Lt)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess32U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less32U x y)
+       // cond:
+       // result: (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess64_0(v *Value) bool {
+       // match: (Less64 x y)
+       // cond:
+       // result: (I64LtS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess64F_0(v *Value) bool {
+       // match: (Less64F x y)
+       // cond:
+       // result: (F64Lt x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Lt)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess64U_0(v *Value) bool {
+       // match: (Less64U x y)
+       // cond:
+       // result: (I64LtU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less8 x y)
+       // cond:
+       // result: (I64LtS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLess8U_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Less8U x y)
+       // cond:
+       // result: (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64LtU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpLoad_0(v *Value) bool {
+       // match: (Load <t> ptr mem)
+       // cond: is32BitFloat(t)
+       // result: (F32Load ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(is32BitFloat(t)) {
+                       break
+               }
+               v.reset(OpWasmF32Load)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: is64BitFloat(t)
+       // result: (F64Load ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(is64BitFloat(t)) {
+                       break
+               }
+               v.reset(OpWasmF64Load)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 8
+       // result: (I64Load ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 8) {
+                       break
+               }
+               v.reset(OpWasmI64Load)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 4 && !t.IsSigned()
+       // result: (I64Load32U ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 4 && !t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load32U)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 4 && t.IsSigned()
+       // result: (I64Load32S ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 4 && t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load32S)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 2 && !t.IsSigned()
+       // result: (I64Load16U ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 2 && !t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load16U)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 2 && t.IsSigned()
+       // result: (I64Load16S ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 2 && t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load16S)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 1 && !t.IsSigned()
+       // result: (I64Load8U ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 1 && !t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load8U)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Load <t> ptr mem)
+       // cond: t.Size() == 1 && t.IsSigned()
+       // result: (I64Load8S ptr mem)
+       for {
+               t := v.Type
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               if !(t.Size() == 1 && t.IsSigned()) {
+                       break
+               }
+               v.reset(OpWasmI64Load8S)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpLsh16x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh16x16 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh16x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh16x32 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh16x64_0(v *Value) bool {
+       // match: (Lsh16x64 x y)
+       // cond:
+       // result: (Lsh64x64 x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh16x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh16x8 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh32x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh32x16 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh32x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh32x32 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh32x64_0(v *Value) bool {
+       // match: (Lsh32x64 x y)
+       // cond:
+       // result: (Lsh64x64 x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh32x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh32x8 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh64x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh64x16 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh64x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh64x32 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh64x64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh64x64 x y)
+       // cond:
+       // result: (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmSelect)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 0
+               v.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpWasmI64LtU, typ.Bool)
+               v2.AddArg(y)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v3.AuxInt = 64
+               v2.AddArg(v3)
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh64x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh64x8 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh8x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh8x16 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh8x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh8x32 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh8x64_0(v *Value) bool {
+       // match: (Lsh8x64 x y)
+       // cond:
+       // result: (Lsh64x64 x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpLsh8x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Lsh8x8 x y)
+       // cond:
+       // result: (Lsh64x64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpLsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod16 x y)
+       // cond:
+       // result: (I64RemS (SignExt16to64 x) (SignExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemS)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod16u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod16u x y)
+       // cond:
+       // result: (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod32 x y)
+       // cond:
+       // result: (I64RemS (SignExt32to64 x) (SignExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemS)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod32u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod32u x y)
+       // cond:
+       // result: (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod64_0(v *Value) bool {
+       // match: (Mod64 x y)
+       // cond:
+       // result: (I64RemS x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemS)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod64u_0(v *Value) bool {
+       // match: (Mod64u x y)
+       // cond:
+       // result: (I64RemU x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemU)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod8 x y)
+       // cond:
+       // result: (I64RemS (SignExt8to64 x) (SignExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemS)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMod8u_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Mod8u x y)
+       // cond:
+       // result: (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64RemU)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpMove_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Move [0] _ _ mem)
+       // cond:
+       // result: mem
+       for {
+               if v.AuxInt != 0 {
+                       break
+               }
+               _ = v.Args[2]
+               mem := v.Args[2]
+               v.reset(OpCopy)
+               v.Type = mem.Type
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Move [1] dst src mem)
+       // cond:
+       // result: (I64Store8 dst (I64Load8U src mem) mem)
+       for {
+               if v.AuxInt != 1 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store8)
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load8U, typ.UInt8)
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Move [2] dst src mem)
+       // cond:
+       // result: (I64Store16 dst (I64Load16U src mem) mem)
+       for {
+               if v.AuxInt != 2 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store16)
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load16U, typ.UInt16)
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Move [4] dst src mem)
+       // cond:
+       // result: (I64Store32 dst (I64Load32U src mem) mem)
+       for {
+               if v.AuxInt != 4 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store32)
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load32U, typ.UInt32)
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Move [8] dst src mem)
+       // cond:
+       // result: (I64Store dst (I64Load src mem) mem)
+       for {
+               if v.AuxInt != 8 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store)
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Move [16] dst src mem)
+       // cond:
+       // result: (I64Store [8] dst (I64Load [8] src mem) (I64Store dst (I64Load src mem) mem))
+       for {
+               if v.AuxInt != 16 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store)
+               v.AuxInt = 8
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v0.AuxInt = 8
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Move [3] dst src mem)
+       // cond:
+       // result: (I64Store8 [2] dst (I64Load8U [2] src mem) (I64Store16 dst (I64Load16U src mem) mem))
+       for {
+               if v.AuxInt != 3 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store8)
+               v.AuxInt = 2
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load8U, typ.UInt8)
+               v0.AuxInt = 2
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store16, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load16U, typ.UInt16)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Move [5] dst src mem)
+       // cond:
+       // result: (I64Store8 [4] dst (I64Load8U [4] src mem) (I64Store32 dst (I64Load32U src mem) mem))
+       for {
+               if v.AuxInt != 5 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store8)
+               v.AuxInt = 4
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load8U, typ.UInt8)
+               v0.AuxInt = 4
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load32U, typ.UInt32)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Move [6] dst src mem)
+       // cond:
+       // result: (I64Store16 [4] dst (I64Load16U [4] src mem) (I64Store32 dst (I64Load32U src mem) mem))
+       for {
+               if v.AuxInt != 6 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store16)
+               v.AuxInt = 4
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load16U, typ.UInt16)
+               v0.AuxInt = 4
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load32U, typ.UInt32)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Move [7] dst src mem)
+       // cond:
+       // result: (I64Store32 [3] dst (I64Load32U [3] src mem) (I64Store32 dst (I64Load32U src mem) mem))
+       for {
+               if v.AuxInt != 7 {
+                       break
+               }
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmI64Store32)
+               v.AuxInt = 3
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load32U, typ.UInt32)
+               v0.AuxInt = 3
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load32U, typ.UInt32)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpMove_10(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Move [s] dst src mem)
+       // cond: s > 8 && s < 16
+       // result: (I64Store [s-8] dst (I64Load [s-8] src mem) (I64Store dst (I64Load src mem) mem))
+       for {
+               s := v.AuxInt
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               if !(s > 8 && s < 16) {
+                       break
+               }
+               v.reset(OpWasmI64Store)
+               v.AuxInt = s - 8
+               v.AddArg(dst)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v0.AuxInt = s - 8
+               v0.AddArg(src)
+               v0.AddArg(mem)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AddArg(dst)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v2.AddArg(src)
+               v2.AddArg(mem)
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Move [s] dst src mem)
+       // cond: s > 16 && s%16 != 0 && s%16 <= 8
+       // result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (I64Store dst (I64Load src mem) mem))
+       for {
+               s := v.AuxInt
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               if !(s > 16 && s%16 != 0 && s%16 <= 8) {
+                       break
+               }
+               v.reset(OpMove)
+               v.AuxInt = s - s%16
+               v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
+               v0.AuxInt = s % 16
+               v0.AddArg(dst)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
+               v1.AuxInt = s % 16
+               v1.AddArg(src)
+               v.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v2.AddArg(dst)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v3.AddArg(src)
+               v3.AddArg(mem)
+               v2.AddArg(v3)
+               v2.AddArg(mem)
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Move [s] dst src mem)
+       // cond: s > 16 && s%16 != 0 && s%16 > 8
+       // result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (I64Store [8] dst (I64Load [8] src mem) (I64Store dst (I64Load src mem) mem)))
+       for {
+               s := v.AuxInt
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               if !(s > 16 && s%16 != 0 && s%16 > 8) {
+                       break
+               }
+               v.reset(OpMove)
+               v.AuxInt = s - s%16
+               v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type)
+               v0.AuxInt = s % 16
+               v0.AddArg(dst)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type)
+               v1.AuxInt = s % 16
+               v1.AddArg(src)
+               v.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v2.AuxInt = 8
+               v2.AddArg(dst)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v3.AuxInt = 8
+               v3.AddArg(src)
+               v3.AddArg(mem)
+               v2.AddArg(v3)
+               v4 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v4.AddArg(dst)
+               v5 := b.NewValue0(v.Pos, OpWasmI64Load, typ.UInt64)
+               v5.AddArg(src)
+               v5.AddArg(mem)
+               v4.AddArg(v5)
+               v4.AddArg(mem)
+               v2.AddArg(v4)
+               v.AddArg(v2)
+               return true
+       }
+       // match: (Move [s] dst src mem)
+       // cond: s%8 == 0
+       // result: (LoweredMove [s/8] dst src mem)
+       for {
+               s := v.AuxInt
+               _ = v.Args[2]
+               dst := v.Args[0]
+               src := v.Args[1]
+               mem := v.Args[2]
+               if !(s%8 == 0) {
+                       break
+               }
+               v.reset(OpWasmLoweredMove)
+               v.AuxInt = s / 8
+               v.AddArg(dst)
+               v.AddArg(src)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpMul16_0(v *Value) bool {
+       // match: (Mul16 x y)
+       // cond:
+       // result: (I64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMul32_0(v *Value) bool {
+       // match: (Mul32 x y)
+       // cond:
+       // result: (I64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMul32F_0(v *Value) bool {
+       // match: (Mul32F x y)
+       // cond:
+       // result: (F64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMul64_0(v *Value) bool {
+       // match: (Mul64 x y)
+       // cond:
+       // result: (I64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMul64F_0(v *Value) bool {
+       // match: (Mul64F x y)
+       // cond:
+       // result: (F64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpMul8_0(v *Value) bool {
+       // match: (Mul8 x y)
+       // cond:
+       // result: (I64Mul x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Mul)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neg16 x)
+       // cond:
+       // result: (I64Sub (I64Const [0]) x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Sub)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neg32 x)
+       // cond:
+       // result: (I64Sub (I64Const [0]) x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Sub)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg32F_0(v *Value) bool {
+       // match: (Neg32F x)
+       // cond:
+       // result: (F64Neg x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64Neg)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neg64 x)
+       // cond:
+       // result: (I64Sub (I64Const [0]) x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Sub)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg64F_0(v *Value) bool {
+       // match: (Neg64F x)
+       // cond:
+       // result: (F64Neg x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmF64Neg)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeg8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neg8 x)
+       // cond:
+       // result: (I64Sub (I64Const [0]) x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Sub)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neq16 x y)
+       // cond:
+       // result: (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neq32 x y)
+       // cond:
+       // result: (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq32F_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neq32F x y)
+       // cond:
+       // result: (F64Ne (LoweredRound32F x) (LoweredRound32F y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Ne)
+               v0 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmLoweredRound32F, typ.Float32)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq64_0(v *Value) bool {
+       // match: (Neq64 x y)
+       // cond:
+       // result: (I64Ne x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq64F_0(v *Value) bool {
+       // match: (Neq64F x y)
+       // cond:
+       // result: (F64Ne x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Ne)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeq8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Neq8 x y)
+       // cond:
+       // result: (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeqB_0(v *Value) bool {
+       // match: (NeqB x y)
+       // cond:
+       // result: (I64Ne x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpNeqPtr_0(v *Value) bool {
+       // match: (NeqPtr x y)
+       // cond:
+       // result: (I64Ne x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpNilCheck_0(v *Value) bool {
+       // match: (NilCheck ptr mem)
+       // cond:
+       // result: (LoweredNilCheck ptr mem)
+       for {
+               _ = v.Args[1]
+               ptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmLoweredNilCheck)
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpNot_0(v *Value) bool {
+       // match: (Not x)
+       // cond:
+       // result: (I64Eqz x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64Eqz)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpOffPtr_0(v *Value) bool {
+       // match: (OffPtr [0] ptr)
+       // cond:
+       // result: ptr
+       for {
+               if v.AuxInt != 0 {
+                       break
+               }
+               ptr := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = ptr.Type
+               v.AddArg(ptr)
+               return true
+       }
+       // match: (OffPtr [off] ptr)
+       // cond: off > 0
+       // result: (I64AddConst [off] ptr)
+       for {
+               off := v.AuxInt
+               ptr := v.Args[0]
+               if !(off > 0) {
+                       break
+               }
+               v.reset(OpWasmI64AddConst)
+               v.AuxInt = off
+               v.AddArg(ptr)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpOr16_0(v *Value) bool {
+       // match: (Or16 x y)
+       // cond:
+       // result: (I64Or x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpOr32_0(v *Value) bool {
+       // match: (Or32 x y)
+       // cond:
+       // result: (I64Or x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpOr64_0(v *Value) bool {
+       // match: (Or64 x y)
+       // cond:
+       // result: (I64Or x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpOr8_0(v *Value) bool {
+       // match: (Or8 x y)
+       // cond:
+       // result: (I64Or x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpOrB_0(v *Value) bool {
+       // match: (OrB x y)
+       // cond:
+       // result: (I64Or x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRound32F_0(v *Value) bool {
+       // match: (Round32F x)
+       // cond:
+       // result: (LoweredRound32F x)
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmLoweredRound32F)
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpRound64F_0(v *Value) bool {
+       // match: (Round64F x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16Ux16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16Ux16 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16Ux32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16Ux32 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16Ux64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16Ux64 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt16to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16Ux8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16Ux8 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16x16 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt16to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16x32 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt16to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16x64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16x64 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt16to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh16x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh16x8 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt16to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32Ux16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32Ux16 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32Ux32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32Ux32 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32Ux64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32Ux64 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt32to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32Ux8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32Ux8 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32x16 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt32to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32x32 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt32to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32x64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32x64 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt32to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh32x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh32x8 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt32to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64Ux16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64Ux16 x y)
+       // cond:
+       // result: (Rsh64Ux64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64Ux32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64Ux32 x y)
+       // cond:
+       // result: (Rsh64Ux64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64Ux64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64Ux64 x y)
+       // cond:
+       // result: (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmSelect)
+               v0 := b.NewValue0(v.Pos, OpWasmI64ShrU, typ.Int64)
+               v0.AddArg(x)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 0
+               v.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpWasmI64LtU, typ.Bool)
+               v2.AddArg(y)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v3.AuxInt = 64
+               v2.AddArg(v3)
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64Ux8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64Ux8 x y)
+       // cond:
+       // result: (Rsh64Ux64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64x16 x y)
+       // cond:
+       // result: (Rsh64x64 x (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64x32 x y)
+       // cond:
+       // result: (Rsh64x64 x (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64x64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64x64 x y)
+       // cond:
+       // result: (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64ShrS)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpWasmSelect, typ.Int64)
+               v0.AddArg(y)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 63
+               v0.AddArg(v1)
+               v2 := b.NewValue0(v.Pos, OpWasmI64LtU, typ.Bool)
+               v2.AddArg(y)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v3.AuxInt = 64
+               v2.AddArg(v3)
+               v0.AddArg(v2)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh64x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh64x8 x y)
+       // cond:
+       // result: (Rsh64x64 x (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v.AddArg(x)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(y)
+               v.AddArg(v0)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8Ux16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8Ux16 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8Ux32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8Ux32 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8Ux64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8Ux64 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt8to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8Ux8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8Ux8 x y)
+       // cond:
+       // result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64Ux64)
+               v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8x16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8x16 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt8to64 x) (ZeroExt16to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8x32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8x32 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt8to64 x) (ZeroExt32to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8x64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8x64 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt8to64 x) y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpRsh8x8_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Rsh8x8 x y)
+       // cond:
+       // result: (Rsh64x64 (SignExt8to64 x) (ZeroExt8to64 y))
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpRsh64x64)
+               v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
+               v1.AddArg(y)
+               v.AddArg(v1)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt16to32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt16to32 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 48
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 48
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt16to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt16to64 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [48])) (I64Const [48]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 48
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 48
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt32to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt32to64 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [32])) (I64Const [32]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 32
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 32
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt8to16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt8to16 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt8to32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt8to32 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSignExt8to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (SignExt8to64 x)
+       // cond:
+       // result: (I64ShrS (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpSlicemask_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Slicemask x)
+       // cond:
+       // result: (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrS)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Sub, typ.Int64)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 0
+               v0.AddArg(v1)
+               v0.AddArg(x)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 63
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpStaticCall_0(v *Value) bool {
+       // match: (StaticCall [argwid] {target} mem)
+       // cond:
+       // result: (LoweredStaticCall [argwid] {target} mem)
+       for {
+               argwid := v.AuxInt
+               target := v.Aux
+               mem := v.Args[0]
+               v.reset(OpWasmLoweredStaticCall)
+               v.AuxInt = argwid
+               v.Aux = target
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpStore_0(v *Value) bool {
+       // match: (Store {t} ptr val mem)
+       // cond: is64BitFloat(t.(*types.Type))
+       // result: (F64Store ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(is64BitFloat(t.(*types.Type))) {
+                       break
+               }
+               v.reset(OpWasmF64Store)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Store {t} ptr val mem)
+       // cond: is32BitFloat(t.(*types.Type))
+       // result: (F32Store ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(is32BitFloat(t.(*types.Type))) {
+                       break
+               }
+               v.reset(OpWasmF32Store)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Store {t} ptr val mem)
+       // cond: t.(*types.Type).Size() == 8
+       // result: (I64Store ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(t.(*types.Type).Size() == 8) {
+                       break
+               }
+               v.reset(OpWasmI64Store)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Store {t} ptr val mem)
+       // cond: t.(*types.Type).Size() == 4
+       // result: (I64Store32 ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(t.(*types.Type).Size() == 4) {
+                       break
+               }
+               v.reset(OpWasmI64Store32)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Store {t} ptr val mem)
+       // cond: t.(*types.Type).Size() == 2
+       // result: (I64Store16 ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(t.(*types.Type).Size() == 2) {
+                       break
+               }
+               v.reset(OpWasmI64Store16)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Store {t} ptr val mem)
+       // cond: t.(*types.Type).Size() == 1
+       // result: (I64Store8 ptr val mem)
+       for {
+               t := v.Aux
+               _ = v.Args[2]
+               ptr := v.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(t.(*types.Type).Size() == 1) {
+                       break
+               }
+               v.reset(OpWasmI64Store8)
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpSub16_0(v *Value) bool {
+       // match: (Sub16 x y)
+       // cond:
+       // result: (I64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSub32_0(v *Value) bool {
+       // match: (Sub32 x y)
+       // cond:
+       // result: (I64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSub32F_0(v *Value) bool {
+       // match: (Sub32F x y)
+       // cond:
+       // result: (F64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSub64_0(v *Value) bool {
+       // match: (Sub64 x y)
+       // cond:
+       // result: (I64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSub64F_0(v *Value) bool {
+       // match: (Sub64F x y)
+       // cond:
+       // result: (F64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmF64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSub8_0(v *Value) bool {
+       // match: (Sub8 x y)
+       // cond:
+       // result: (I64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpSubPtr_0(v *Value) bool {
+       // match: (SubPtr x y)
+       // cond:
+       // result: (I64Sub x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Sub)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc16to8_0(v *Value) bool {
+       // match: (Trunc16to8 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc32to16_0(v *Value) bool {
+       // match: (Trunc32to16 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc32to8_0(v *Value) bool {
+       // match: (Trunc32to8 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc64to16_0(v *Value) bool {
+       // match: (Trunc64to16 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc64to32_0(v *Value) bool {
+       // match: (Trunc64to32 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpTrunc64to8_0(v *Value) bool {
+       // match: (Trunc64to8 x)
+       // cond:
+       // result: x
+       for {
+               x := v.Args[0]
+               v.reset(OpCopy)
+               v.Type = x.Type
+               v.AddArg(x)
+               return true
+       }
+}
+func rewriteValueWasm_OpWB_0(v *Value) bool {
+       // match: (WB {fn} destptr srcptr mem)
+       // cond:
+       // result: (LoweredWB {fn} destptr srcptr mem)
+       for {
+               fn := v.Aux
+               _ = v.Args[2]
+               destptr := v.Args[0]
+               srcptr := v.Args[1]
+               mem := v.Args[2]
+               v.reset(OpWasmLoweredWB)
+               v.Aux = fn
+               v.AddArg(destptr)
+               v.AddArg(srcptr)
+               v.AddArg(mem)
+               return true
+       }
+}
+func rewriteValueWasm_OpWasmF64Add_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (F64Add (F64Const [x]) (F64Const [y]))
+       // cond:
+       // result: (F64Const [f2i(i2f(x) + i2f(y))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmF64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmF64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmF64Const)
+               v.AuxInt = f2i(i2f(x) + i2f(y))
+               return true
+       }
+       // match: (F64Add (F64Const [x]) y)
+       // cond:
+       // result: (F64Add y (F64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmF64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmF64Add)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmF64Const, typ.Float64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmF64Mul_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (F64Mul (F64Const [x]) (F64Const [y]))
+       // cond:
+       // result: (F64Const [f2i(i2f(x) * i2f(y))])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmF64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmF64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmF64Const)
+               v.AuxInt = f2i(i2f(x) * i2f(y))
+               return true
+       }
+       // match: (F64Mul (F64Const [x]) y)
+       // cond:
+       // result: (F64Mul y (F64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmF64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmF64Mul)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmF64Const, typ.Float64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Add_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Add (I64Const [x]) (I64Const [y]))
+       // cond:
+       // result: (I64Const [x + y])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = x + y
+               return true
+       }
+       // match: (I64Add (I64Const [x]) y)
+       // cond:
+       // result: (I64Add y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Add)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       // match: (I64Add x (I64Const [y]))
+       // cond:
+       // result: (I64AddConst [y] x)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64AddConst)
+               v.AuxInt = y
+               v.AddArg(x)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64And_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64And (I64Const [x]) (I64Const [y]))
+       // cond:
+       // result: (I64Const [x & y])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = x & y
+               return true
+       }
+       // match: (I64And (I64Const [x]) y)
+       // cond:
+       // result: (I64And y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64And)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Eq_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Eq (I64Const [x]) (I64Const [y]))
+       // cond: x == y
+       // result: (I64Const [1])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               if !(x == y) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = 1
+               return true
+       }
+       // match: (I64Eq (I64Const [x]) (I64Const [y]))
+       // cond: x != y
+       // result: (I64Const [0])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               if !(x != y) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = 0
+               return true
+       }
+       // match: (I64Eq (I64Const [x]) y)
+       // cond:
+       // result: (I64Eq y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Eq)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Eqz_0(v *Value) bool {
+       // match: (I64Eqz (I64Eqz (I64Eqz x)))
+       // cond:
+       // result: (I64Eqz x)
+       for {
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Eqz {
+                       break
+               }
+               v_0_0 := v_0.Args[0]
+               if v_0_0.Op != OpWasmI64Eqz {
+                       break
+               }
+               x := v_0_0.Args[0]
+               v.reset(OpWasmI64Eqz)
+               v.AddArg(x)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load_0(v *Value) bool {
+       // match: (I64Load [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load16S_0(v *Value) bool {
+       // match: (I64Load16S [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load16S [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load16S)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load16U_0(v *Value) bool {
+       // match: (I64Load16U [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load16U [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load16U)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load32S_0(v *Value) bool {
+       // match: (I64Load32S [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load32S [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load32S)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load32U_0(v *Value) bool {
+       // match: (I64Load32U [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load32U [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load32U)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load8S_0(v *Value) bool {
+       // match: (I64Load8S [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load8S [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load8S)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Load8U_0(v *Value) bool {
+       // match: (I64Load8U [off] (I64AddConst [off2] ptr) mem)
+       // cond: off+off2 >= 0
+       // result: (I64Load8U [off+off2] ptr mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               mem := v.Args[1]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Load8U)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Mul_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Mul (I64Const [x]) (I64Const [y]))
+       // cond:
+       // result: (I64Const [x * y])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = x * y
+               return true
+       }
+       // match: (I64Mul (I64Const [x]) y)
+       // cond:
+       // result: (I64Mul y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Mul)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Ne_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Ne (I64Const [x]) (I64Const [y]))
+       // cond: x == y
+       // result: (I64Const [0])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               if !(x == y) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = 0
+               return true
+       }
+       // match: (I64Ne (I64Const [x]) (I64Const [y]))
+       // cond: x != y
+       // result: (I64Const [1])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               if !(x != y) {
+                       break
+               }
+               v.reset(OpWasmI64Const)
+               v.AuxInt = 1
+               return true
+       }
+       // match: (I64Ne (I64Const [x]) y)
+       // cond:
+       // result: (I64Ne y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Ne)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Or_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Or (I64Const [x]) (I64Const [y]))
+       // cond:
+       // result: (I64Const [x | y])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = x | y
+               return true
+       }
+       // match: (I64Or (I64Const [x]) y)
+       // cond:
+       // result: (I64Or y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Or)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Store_0(v *Value) bool {
+       // match: (I64Store [off] (I64AddConst [off2] ptr) val mem)
+       // cond: off+off2 >= 0
+       // result: (I64Store [off+off2] ptr val mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[2]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Store)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Store16_0(v *Value) bool {
+       // match: (I64Store16 [off] (I64AddConst [off2] ptr) val mem)
+       // cond: off+off2 >= 0
+       // result: (I64Store16 [off+off2] ptr val mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[2]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Store16)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Store32_0(v *Value) bool {
+       // match: (I64Store32 [off] (I64AddConst [off2] ptr) val mem)
+       // cond: off+off2 >= 0
+       // result: (I64Store32 [off+off2] ptr val mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[2]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Store32)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Store8_0(v *Value) bool {
+       // match: (I64Store8 [off] (I64AddConst [off2] ptr) val mem)
+       // cond: off+off2 >= 0
+       // result: (I64Store8 [off+off2] ptr val mem)
+       for {
+               off := v.AuxInt
+               _ = v.Args[2]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64AddConst {
+                       break
+               }
+               off2 := v_0.AuxInt
+               ptr := v_0.Args[0]
+               val := v.Args[1]
+               mem := v.Args[2]
+               if !(off+off2 >= 0) {
+                       break
+               }
+               v.reset(OpWasmI64Store8)
+               v.AuxInt = off + off2
+               v.AddArg(ptr)
+               v.AddArg(val)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpWasmI64Xor_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (I64Xor (I64Const [x]) (I64Const [y]))
+       // cond:
+       // result: (I64Const [x ^ y])
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               v_1 := v.Args[1]
+               if v_1.Op != OpWasmI64Const {
+                       break
+               }
+               y := v_1.AuxInt
+               v.reset(OpWasmI64Const)
+               v.AuxInt = x ^ y
+               return true
+       }
+       // match: (I64Xor (I64Const [x]) y)
+       // cond:
+       // result: (I64Xor y (I64Const [x]))
+       for {
+               _ = v.Args[1]
+               v_0 := v.Args[0]
+               if v_0.Op != OpWasmI64Const {
+                       break
+               }
+               x := v_0.AuxInt
+               y := v.Args[1]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(y)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = x
+               v.AddArg(v0)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpXor16_0(v *Value) bool {
+       // match: (Xor16 x y)
+       // cond:
+       // result: (I64Xor x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpXor32_0(v *Value) bool {
+       // match: (Xor32 x y)
+       // cond:
+       // result: (I64Xor x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpXor64_0(v *Value) bool {
+       // match: (Xor64 x y)
+       // cond:
+       // result: (I64Xor x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpXor8_0(v *Value) bool {
+       // match: (Xor8 x y)
+       // cond:
+       // result: (I64Xor x y)
+       for {
+               _ = v.Args[1]
+               x := v.Args[0]
+               y := v.Args[1]
+               v.reset(OpWasmI64Xor)
+               v.AddArg(x)
+               v.AddArg(y)
+               return true
+       }
+}
+func rewriteValueWasm_OpZero_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Zero [0] _ mem)
+       // cond:
+       // result: mem
+       for {
+               if v.AuxInt != 0 {
+                       break
+               }
+               _ = v.Args[1]
+               mem := v.Args[1]
+               v.reset(OpCopy)
+               v.Type = mem.Type
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Zero [1] destptr mem)
+       // cond:
+       // result: (I64Store8 destptr (I64Const [0]) mem)
+       for {
+               if v.AuxInt != 1 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store8)
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Zero [2] destptr mem)
+       // cond:
+       // result: (I64Store16 destptr (I64Const [0]) mem)
+       for {
+               if v.AuxInt != 2 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store16)
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Zero [4] destptr mem)
+       // cond:
+       // result: (I64Store32 destptr (I64Const [0]) mem)
+       for {
+               if v.AuxInt != 4 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store32)
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Zero [8] destptr mem)
+       // cond:
+       // result: (I64Store destptr (I64Const [0]) mem)
+       for {
+               if v.AuxInt != 8 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store)
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v.AddArg(mem)
+               return true
+       }
+       // match: (Zero [3] destptr mem)
+       // cond:
+       // result: (I64Store8 [2] destptr (I64Const [0]) (I64Store16 destptr (I64Const [0]) mem))
+       for {
+               if v.AuxInt != 3 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store8)
+               v.AuxInt = 2
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store16, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [5] destptr mem)
+       // cond:
+       // result: (I64Store8 [4] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem))
+       for {
+               if v.AuxInt != 5 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store8)
+               v.AuxInt = 4
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [6] destptr mem)
+       // cond:
+       // result: (I64Store16 [4] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem))
+       for {
+               if v.AuxInt != 6 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store16)
+               v.AuxInt = 4
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [7] destptr mem)
+       // cond:
+       // result: (I64Store32 [3] destptr (I64Const [0]) (I64Store32 destptr (I64Const [0]) mem))
+       for {
+               if v.AuxInt != 7 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store32)
+               v.AuxInt = 3
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store32, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [s] destptr mem)
+       // cond: s%8 != 0 && s > 8
+       // result: (Zero [s-s%8] (OffPtr <destptr.Type> destptr [s%8]) (I64Store destptr (I64Const [0]) mem))
+       for {
+               s := v.AuxInt
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               if !(s%8 != 0 && s > 8) {
+                       break
+               }
+               v.reset(OpZero)
+               v.AuxInt = s - s%8
+               v0 := b.NewValue0(v.Pos, OpOffPtr, destptr.Type)
+               v0.AuxInt = s % 8
+               v0.AddArg(destptr)
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpZero_10(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (Zero [16] destptr mem)
+       // cond:
+       // result: (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem))
+       for {
+               if v.AuxInt != 16 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store)
+               v.AuxInt = 8
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v1.AddArg(mem)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [24] destptr mem)
+       // cond:
+       // result: (I64Store [16] destptr (I64Const [0]) (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem)))
+       for {
+               if v.AuxInt != 24 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store)
+               v.AuxInt = 16
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AuxInt = 8
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v3.AddArg(destptr)
+               v4 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v4.AuxInt = 0
+               v3.AddArg(v4)
+               v3.AddArg(mem)
+               v1.AddArg(v3)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [32] destptr mem)
+       // cond:
+       // result: (I64Store [24] destptr (I64Const [0]) (I64Store [16] destptr (I64Const [0]) (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem))))
+       for {
+               if v.AuxInt != 32 {
+                       break
+               }
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               v.reset(OpWasmI64Store)
+               v.AuxInt = 24
+               v.AddArg(destptr)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v0.AuxInt = 0
+               v.AddArg(v0)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v1.AuxInt = 16
+               v1.AddArg(destptr)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 0
+               v1.AddArg(v2)
+               v3 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v3.AuxInt = 8
+               v3.AddArg(destptr)
+               v4 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v4.AuxInt = 0
+               v3.AddArg(v4)
+               v5 := b.NewValue0(v.Pos, OpWasmI64Store, types.TypeMem)
+               v5.AddArg(destptr)
+               v6 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v6.AuxInt = 0
+               v5.AddArg(v6)
+               v5.AddArg(mem)
+               v3.AddArg(v5)
+               v1.AddArg(v3)
+               v.AddArg(v1)
+               return true
+       }
+       // match: (Zero [s] destptr mem)
+       // cond: s%8 == 0 && s > 32
+       // result: (LoweredZero [s/8] destptr mem)
+       for {
+               s := v.AuxInt
+               _ = v.Args[1]
+               destptr := v.Args[0]
+               mem := v.Args[1]
+               if !(s%8 == 0 && s > 32) {
+                       break
+               }
+               v.reset(OpWasmLoweredZero)
+               v.AuxInt = s / 8
+               v.AddArg(destptr)
+               v.AddArg(mem)
+               return true
+       }
+       return false
+}
+func rewriteValueWasm_OpZeroExt16to32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt16to32 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [48])) (I64Const [48]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 48
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 48
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpZeroExt16to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt16to64 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [48])) (I64Const [48]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 48
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 48
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpZeroExt32to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt32to64 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [32])) (I64Const [32]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 32
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 32
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpZeroExt8to16_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt8to16 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpZeroExt8to32_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt8to32 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteValueWasm_OpZeroExt8to64_0(v *Value) bool {
+       b := v.Block
+       _ = b
+       typ := &b.Func.Config.Types
+       _ = typ
+       // match: (ZeroExt8to64 x)
+       // cond:
+       // result: (I64ShrU (I64Shl x (I64Const [56])) (I64Const [56]))
+       for {
+               x := v.Args[0]
+               v.reset(OpWasmI64ShrU)
+               v0 := b.NewValue0(v.Pos, OpWasmI64Shl, typ.Int64)
+               v0.AddArg(x)
+               v1 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v1.AuxInt = 56
+               v0.AddArg(v1)
+               v.AddArg(v0)
+               v2 := b.NewValue0(v.Pos, OpWasmI64Const, typ.Int64)
+               v2.AuxInt = 56
+               v.AddArg(v2)
+               return true
+       }
+}
+func rewriteBlockWasm(b *Block) bool {
+       config := b.Func.Config
+       _ = config
+       fe := b.Func.fe
+       _ = fe
+       typ := &config.Types
+       _ = typ
+       switch b.Kind {
+       }
+       return false
+}
index 78c72f814641841d97759c23092d905562ba6a9e..f1783a9532efe04be1dba3d2b0e76c1a9b0d8a1d 100644 (file)
@@ -90,7 +90,8 @@ func schedule(f *Func) {
                        case v.Op == OpAMD64LoweredGetClosurePtr || v.Op == OpPPC64LoweredGetClosurePtr ||
                                v.Op == OpARMLoweredGetClosurePtr || v.Op == OpARM64LoweredGetClosurePtr ||
                                v.Op == Op386LoweredGetClosurePtr || v.Op == OpMIPS64LoweredGetClosurePtr ||
-                               v.Op == OpS390XLoweredGetClosurePtr || v.Op == OpMIPSLoweredGetClosurePtr:
+                               v.Op == OpS390XLoweredGetClosurePtr || v.Op == OpMIPSLoweredGetClosurePtr ||
+                               v.Op == OpWasmLoweredGetClosurePtr:
                                // We also score GetLoweredClosurePtr as early as possible to ensure that the
                                // context register is not stomped. GetLoweredClosurePtr should only appear
                                // in the entry block where there are no phi functions, so there is no
@@ -102,7 +103,8 @@ func schedule(f *Func) {
                        case v.Op == OpAMD64LoweredNilCheck || v.Op == OpPPC64LoweredNilCheck ||
                                v.Op == OpARMLoweredNilCheck || v.Op == OpARM64LoweredNilCheck ||
                                v.Op == Op386LoweredNilCheck || v.Op == OpMIPS64LoweredNilCheck ||
-                               v.Op == OpS390XLoweredNilCheck || v.Op == OpMIPSLoweredNilCheck:
+                               v.Op == OpS390XLoweredNilCheck || v.Op == OpMIPSLoweredNilCheck ||
+                               v.Op == OpWasmLoweredNilCheck:
                                // Nil checks must come before loads from the same address.
                                score[v.ID] = ScoreNilCheck
                        case v.Op == OpPhi:
index 2dabeb86269acfd61c6873df7cbaa84c57a7f680..3713269376977942b1c683ec7a08bf1fbea9993a 100644 (file)
@@ -18,7 +18,8 @@ func tighten(f *Func) {
                                OpAMD64LoweredGetClosurePtr, Op386LoweredGetClosurePtr,
                                OpARMLoweredGetClosurePtr, OpARM64LoweredGetClosurePtr,
                                OpMIPSLoweredGetClosurePtr, OpMIPS64LoweredGetClosurePtr,
-                               OpS390XLoweredGetClosurePtr, OpPPC64LoweredGetClosurePtr:
+                               OpS390XLoweredGetClosurePtr, OpPPC64LoweredGetClosurePtr,
+                               OpWasmLoweredGetClosurePtr:
                                // Phis need to stay in their block.
                                // GetClosurePtr & Arg must stay in the entry block.
                                // Tuple selectors must stay with the tuple generator.
diff --git a/src/cmd/compile/internal/wasm/ssa.go b/src/cmd/compile/internal/wasm/ssa.go
new file mode 100644 (file)
index 0000000..d4bd7fb
--- /dev/null
@@ -0,0 +1,430 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package wasm
+
+import (
+       "cmd/compile/internal/gc"
+       "cmd/compile/internal/ssa"
+       "cmd/compile/internal/types"
+       "cmd/internal/obj"
+       "cmd/internal/obj/wasm"
+)
+
+func Init(arch *gc.Arch) {
+       arch.LinkArch = &wasm.Linkwasm
+       arch.REGSP = wasm.REG_SP
+       arch.MAXWIDTH = 1 << 50
+
+       arch.ZeroRange = zeroRange
+       arch.ZeroAuto = zeroAuto
+       arch.Ginsnop = ginsnop
+
+       arch.SSAMarkMoves = ssaMarkMoves
+       arch.SSAGenValue = ssaGenValue
+       arch.SSAGenBlock = ssaGenBlock
+}
+
+func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Prog {
+       if cnt == 0 {
+               return p
+       }
+       if cnt%8 != 0 {
+               gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt)
+       }
+
+       for i := int64(0); i < cnt; i += 8 {
+               p = pp.Appendpp(p, wasm.AGet, obj.TYPE_REG, wasm.REG_SP, 0, 0, 0, 0)
+               p = pp.Appendpp(p, wasm.AI64Const, obj.TYPE_CONST, 0, 0, 0, 0, 0)
+               p = pp.Appendpp(p, wasm.AI64Store, 0, 0, 0, obj.TYPE_CONST, 0, off+i)
+       }
+
+       return p
+}
+
+func zeroAuto(pp *gc.Progs, n *gc.Node) {
+       sym := n.Sym.Linksym()
+       size := n.Type.Size()
+       for i := int64(0); i < size; i += 8 {
+               p := pp.Prog(wasm.AGet)
+               p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_SP}
+
+               p = pp.Prog(wasm.AI64Const)
+               p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: 0}
+
+               p = pp.Prog(wasm.AI64Store)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_AUTO, Offset: n.Xoffset + i, Sym: sym}
+       }
+}
+
+func ginsnop(pp *gc.Progs) {
+       pp.Prog(wasm.ANop)
+}
+
+func ssaMarkMoves(s *gc.SSAGenState, b *ssa.Block) {
+}
+
+func ssaGenBlock(s *gc.SSAGenState, b, next *ssa.Block) {
+       goToBlock := func(block *ssa.Block, canFallthrough bool) {
+               if canFallthrough && block == next {
+                       return
+               }
+               s.Br(obj.AJMP, block)
+       }
+
+       switch b.Kind {
+       case ssa.BlockPlain:
+               goToBlock(b.Succs[0].Block(), true)
+
+       case ssa.BlockIf:
+               getReg32(s, b.Control)
+               s.Prog(wasm.AI32Eqz)
+               s.Prog(wasm.AIf)
+               goToBlock(b.Succs[1].Block(), false)
+               s.Prog(wasm.AEnd)
+               goToBlock(b.Succs[0].Block(), true)
+
+       case ssa.BlockRet:
+               s.Prog(obj.ARET)
+
+       case ssa.BlockRetJmp:
+               p := s.Prog(obj.ARET)
+               p.To.Type = obj.TYPE_MEM
+               p.To.Name = obj.NAME_EXTERN
+               p.To.Sym = b.Aux.(*obj.LSym)
+
+       case ssa.BlockExit:
+               s.Prog(obj.AUNDEF)
+
+       case ssa.BlockDefer:
+               p := s.Prog(wasm.AGet)
+               p.From = obj.Addr{Type: obj.TYPE_REG, Reg: wasm.REG_RET0}
+               s.Prog(wasm.AI64Eqz)
+               s.Prog(wasm.AI32Eqz)
+               s.Prog(wasm.AIf)
+               goToBlock(b.Succs[1].Block(), false)
+               s.Prog(wasm.AEnd)
+               goToBlock(b.Succs[0].Block(), true)
+
+       default:
+               panic("unexpected block")
+       }
+
+       // Entry point for the next block. Used by the JMP in goToBlock.
+       s.Prog(wasm.ARESUMEPOINT)
+}
+
+func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
+       switch v.Op {
+       case ssa.OpWasmLoweredStaticCall, ssa.OpWasmLoweredClosureCall, ssa.OpWasmLoweredInterCall:
+               s.PrepareCall(v)
+               if v.Aux == gc.Deferreturn {
+                       // add a resume point before call to deferreturn so it can be called again via jmpdefer
+                       s.Prog(wasm.ARESUMEPOINT)
+               }
+               if v.Op == ssa.OpWasmLoweredClosureCall {
+                       getReg64(s, v.Args[1])
+                       setReg(s, wasm.REG_CTXT)
+               }
+               if sym, ok := v.Aux.(*obj.LSym); ok {
+                       p := s.Prog(obj.ACALL)
+                       p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: sym}
+               } else {
+                       getReg64(s, v.Args[0])
+                       p := s.Prog(obj.ACALL)
+                       p.To = obj.Addr{Type: obj.TYPE_NONE}
+               }
+
+       case ssa.OpWasmLoweredMove:
+               getReg32(s, v.Args[0])
+               getReg32(s, v.Args[1])
+               i32Const(s, int32(v.AuxInt))
+               p := s.Prog(wasm.ACall)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmMove}
+
+       case ssa.OpWasmLoweredZero:
+               getReg32(s, v.Args[0])
+               i32Const(s, int32(v.AuxInt))
+               p := s.Prog(wasm.ACall)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmZero}
+
+       case ssa.OpWasmLoweredNilCheck:
+               getReg64(s, v.Args[0])
+               s.Prog(wasm.AI64Eqz)
+               s.Prog(wasm.AIf)
+               p := s.Prog(wasm.ACALLNORESUME)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.SigPanic}
+               s.Prog(wasm.AEnd)
+               if gc.Debug_checknil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers
+                       gc.Warnl(v.Pos, "generated nil check")
+               }
+
+       case ssa.OpWasmLoweredWB:
+               getReg64(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               p := s.Prog(wasm.ACALLNORESUME) // TODO(neelance): If possible, turn this into a simple wasm.ACall).
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: v.Aux.(*obj.LSym)}
+
+       case ssa.OpWasmI64Store8, ssa.OpWasmI64Store16, ssa.OpWasmI64Store32, ssa.OpWasmI64Store, ssa.OpWasmF32Store, ssa.OpWasmF64Store:
+               getReg32(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               if v.Op == ssa.OpWasmF32Store {
+                       s.Prog(wasm.AF32DemoteF64)
+               }
+               p := s.Prog(v.Op.Asm())
+               p.To = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
+
+       case ssa.OpStoreReg:
+               getReg(s, wasm.REG_SP)
+               getReg64(s, v.Args[0])
+               if v.Type.Etype == types.TFLOAT32 {
+                       s.Prog(wasm.AF32DemoteF64)
+               }
+               p := s.Prog(storeOp(v.Type))
+               gc.AddrAuto(&p.To, v)
+
+       default:
+               if v.Type.IsMemory() {
+                       return
+               }
+               ssaGenValueOnStack(s, v)
+               setReg(s, v.Reg())
+       }
+}
+
+func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value) {
+       switch v.Op {
+       case ssa.OpWasmLoweredGetClosurePtr:
+               getReg(s, wasm.REG_CTXT)
+
+       case ssa.OpWasmLoweredGetCallerPC:
+               p := s.Prog(wasm.AI64Load)
+               // Caller PC is stored 8 bytes below first parameter.
+               p.From = obj.Addr{
+                       Type:   obj.TYPE_MEM,
+                       Name:   obj.NAME_PARAM,
+                       Offset: -8,
+               }
+
+       case ssa.OpWasmLoweredGetCallerSP:
+               p := s.Prog(wasm.AGet)
+               // Caller SP is the address of the first parameter.
+               p.From = obj.Addr{
+                       Type:   obj.TYPE_ADDR,
+                       Name:   obj.NAME_PARAM,
+                       Reg:    wasm.REG_SP,
+                       Offset: 0,
+               }
+
+       case ssa.OpWasmLoweredAddr:
+               p := s.Prog(wasm.AGet)
+               switch n := v.Aux.(type) {
+               case *obj.LSym:
+                       p.From = obj.Addr{Type: obj.TYPE_ADDR, Name: obj.NAME_EXTERN, Sym: n}
+               case *gc.Node:
+                       p.From = obj.Addr{
+                               Type:   obj.TYPE_ADDR,
+                               Name:   obj.NAME_AUTO,
+                               Reg:    v.Args[0].Reg(),
+                               Offset: n.Xoffset,
+                       }
+                       if n.Class() == gc.PPARAM || n.Class() == gc.PPARAMOUT {
+                               p.From.Name = obj.NAME_PARAM
+                       }
+               default:
+                       panic("wasm: bad LoweredAddr")
+               }
+
+       case ssa.OpWasmLoweredRound32F:
+               getReg64(s, v.Args[0])
+               s.Prog(wasm.AF32DemoteF64)
+               s.Prog(wasm.AF64PromoteF32)
+
+       case ssa.OpWasmLoweredConvert:
+               getReg64(s, v.Args[0])
+
+       case ssa.OpWasmSelect:
+               getReg64(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               getReg64(s, v.Args[2])
+               s.Prog(wasm.AI32WrapI64)
+               s.Prog(v.Op.Asm())
+
+       case ssa.OpWasmI64AddConst:
+               getReg64(s, v.Args[0])
+               i64Const(s, v.AuxInt)
+               s.Prog(v.Op.Asm())
+
+       case ssa.OpWasmI64Const:
+               i64Const(s, v.AuxInt)
+
+       case ssa.OpWasmF64Const:
+               f64Const(s, v.AuxFloat())
+
+       case ssa.OpWasmI64Load8U, ssa.OpWasmI64Load8S, ssa.OpWasmI64Load16U, ssa.OpWasmI64Load16S, ssa.OpWasmI64Load32U, ssa.OpWasmI64Load32S, ssa.OpWasmI64Load, ssa.OpWasmF32Load, ssa.OpWasmF64Load:
+               getReg32(s, v.Args[0])
+               p := s.Prog(v.Op.Asm())
+               p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: v.AuxInt}
+               if v.Op == ssa.OpWasmF32Load {
+                       s.Prog(wasm.AF64PromoteF32)
+               }
+
+       case ssa.OpWasmI64Eqz:
+               getReg64(s, v.Args[0])
+               s.Prog(v.Op.Asm())
+               s.Prog(wasm.AI64ExtendUI32)
+
+       case ssa.OpWasmI64Eq, ssa.OpWasmI64Ne, ssa.OpWasmI64LtS, ssa.OpWasmI64LtU, ssa.OpWasmI64GtS, ssa.OpWasmI64GtU, ssa.OpWasmI64LeS, ssa.OpWasmI64LeU, ssa.OpWasmI64GeS, ssa.OpWasmI64GeU, ssa.OpWasmF64Eq, ssa.OpWasmF64Ne, ssa.OpWasmF64Lt, ssa.OpWasmF64Gt, ssa.OpWasmF64Le, ssa.OpWasmF64Ge:
+               getReg64(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               s.Prog(v.Op.Asm())
+               s.Prog(wasm.AI64ExtendUI32)
+
+       case ssa.OpWasmI64Add, ssa.OpWasmI64Sub, ssa.OpWasmI64Mul, ssa.OpWasmI64DivU, ssa.OpWasmI64RemS, ssa.OpWasmI64RemU, ssa.OpWasmI64And, ssa.OpWasmI64Or, ssa.OpWasmI64Xor, ssa.OpWasmI64Shl, ssa.OpWasmI64ShrS, ssa.OpWasmI64ShrU, ssa.OpWasmF64Add, ssa.OpWasmF64Sub, ssa.OpWasmF64Mul, ssa.OpWasmF64Div:
+               getReg64(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               s.Prog(v.Op.Asm())
+
+       case ssa.OpWasmI64DivS:
+               getReg64(s, v.Args[0])
+               getReg64(s, v.Args[1])
+               if v.Type.Size() == 8 {
+                       // Division of int64 needs helper function wasmDiv to handle the MinInt64 / -1 case.
+                       p := s.Prog(wasm.ACall)
+                       p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmDiv}
+                       break
+               }
+               s.Prog(wasm.AI64DivS)
+
+       case ssa.OpWasmI64TruncSF64:
+               getReg64(s, v.Args[0])
+               p := s.Prog(wasm.ACall)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncS}
+
+       case ssa.OpWasmI64TruncUF64:
+               getReg64(s, v.Args[0])
+               p := s.Prog(wasm.ACall)
+               p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: gc.WasmTruncU}
+
+       case ssa.OpWasmF64Neg, ssa.OpWasmF64ConvertSI64, ssa.OpWasmF64ConvertUI64:
+               getReg64(s, v.Args[0])
+               s.Prog(v.Op.Asm())
+
+       case ssa.OpLoadReg:
+               p := s.Prog(loadOp(v.Type))
+               gc.AddrAuto(&p.From, v.Args[0])
+               if v.Type.Etype == types.TFLOAT32 {
+                       s.Prog(wasm.AF64PromoteF32)
+               }
+
+       case ssa.OpCopy:
+               getReg64(s, v.Args[0])
+
+       default:
+               v.Fatalf("unexpected op: %s", v.Op)
+
+       }
+}
+
+func getReg32(s *gc.SSAGenState, v *ssa.Value) {
+       reg := v.Reg()
+       getReg(s, reg)
+       if reg != wasm.REG_SP {
+               s.Prog(wasm.AI32WrapI64)
+       }
+}
+
+func getReg64(s *gc.SSAGenState, v *ssa.Value) {
+       reg := v.Reg()
+       getReg(s, reg)
+       if reg == wasm.REG_SP {
+               s.Prog(wasm.AI64ExtendUI32)
+       }
+}
+
+func i32Const(s *gc.SSAGenState, val int32) {
+       p := s.Prog(wasm.AI32Const)
+       p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: int64(val)}
+}
+
+func i64Const(s *gc.SSAGenState, val int64) {
+       p := s.Prog(wasm.AI64Const)
+       p.From = obj.Addr{Type: obj.TYPE_CONST, Offset: val}
+}
+
+func f64Const(s *gc.SSAGenState, val float64) {
+       p := s.Prog(wasm.AF64Const)
+       p.From = obj.Addr{Type: obj.TYPE_FCONST, Val: val}
+}
+
+func getReg(s *gc.SSAGenState, reg int16) {
+       p := s.Prog(wasm.AGet)
+       p.From = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
+}
+
+func setReg(s *gc.SSAGenState, reg int16) {
+       p := s.Prog(wasm.ASet)
+       p.To = obj.Addr{Type: obj.TYPE_REG, Reg: reg}
+}
+
+func loadOp(t *types.Type) obj.As {
+       if t.IsFloat() {
+               switch t.Size() {
+               case 4:
+                       return wasm.AF32Load
+               case 8:
+                       return wasm.AF64Load
+               default:
+                       panic("bad load type")
+               }
+       }
+
+       switch t.Size() {
+       case 1:
+               if t.IsSigned() {
+                       return wasm.AI64Load8S
+               }
+               return wasm.AI64Load8U
+       case 2:
+               if t.IsSigned() {
+                       return wasm.AI64Load16S
+               }
+               return wasm.AI64Load16U
+       case 4:
+               if t.IsSigned() {
+                       return wasm.AI64Load32S
+               }
+               return wasm.AI64Load32U
+       case 8:
+               return wasm.AI64Load
+       default:
+               panic("bad load type")
+       }
+}
+
+func storeOp(t *types.Type) obj.As {
+       if t.IsFloat() {
+               switch t.Size() {
+               case 4:
+                       return wasm.AF32Store
+               case 8:
+                       return wasm.AF64Store
+               default:
+                       panic("bad store type")
+               }
+       }
+
+       switch t.Size() {
+       case 1:
+               return wasm.AI64Store8
+       case 2:
+               return wasm.AI64Store16
+       case 4:
+               return wasm.AI64Store32
+       case 8:
+               return wasm.AI64Store
+       default:
+               panic("bad store type")
+       }
+}
index e699b91f5d2b2c2247a80556e270dd9169026ee2..bf4289e8f1c639256a6efc3cb16b8e0b821aab08 100644 (file)
@@ -13,6 +13,7 @@ import (
        "cmd/compile/internal/mips64"
        "cmd/compile/internal/ppc64"
        "cmd/compile/internal/s390x"
+       "cmd/compile/internal/wasm"
        "cmd/compile/internal/x86"
        "cmd/internal/objabi"
        "fmt"
@@ -33,6 +34,7 @@ var archInits = map[string]func(*gc.Arch){
        "ppc64":    ppc64.Init,
        "ppc64le":  ppc64.Init,
        "s390x":    s390x.Init,
+       "wasm":     wasm.Init,
 }
 
 func main() {
index 24d2e1e7d6256f3bf8750413d77ba67162e3e2b0..880b76f32ddde8471a2140653e571fd9fdb7f21d 100644 (file)
@@ -49,6 +49,7 @@ var bootstrapDirs = []string{
        "cmd/compile/internal/ssa",
        "cmd/compile/internal/syntax",
        "cmd/compile/internal/x86",
+       "cmd/compile/internal/wasm",
        "cmd/internal/bio",
        "cmd/internal/gcprog",
        "cmd/internal/dwarf",
@@ -61,6 +62,7 @@ var bootstrapDirs = []string{
        "cmd/internal/obj/ppc64",
        "cmd/internal/obj/s390x",
        "cmd/internal/obj/x86",
+       "cmd/internal/obj/wasm",
        "cmd/internal/src",
        "cmd/internal/sys",
        "cmd/link",
@@ -100,6 +102,8 @@ var ignorePrefixes = []string{
 var ignoreSuffixes = []string{
        "_arm64.s",
        "_arm64.go",
+       "_wasm.s",
+       "_wasm.go",
 }
 
 func bootstrapBuildTools() {
index 88efe8b757bd8692bd7af6affc47dd07df626f69..6e5333ccbc40d45465c6ab42c7430bbf5a91b32e 100644 (file)
@@ -89,7 +89,7 @@ func (gcToolchain) gc(b *Builder, a *Action, archive string, importcfg []byte, a
                gcargs = append(gcargs, "-buildid", a.buildID)
        }
        platform := cfg.Goos + "/" + cfg.Goarch
-       if p.Internal.OmitDebug || platform == "nacl/amd64p32" || cfg.Goos == "plan9" {
+       if p.Internal.OmitDebug || platform == "nacl/amd64p32" || cfg.Goos == "plan9" || cfg.Goarch == "wasm" {
                gcargs = append(gcargs, "-dwarf=false")
        }
        if strings.HasPrefix(runtimeVersion, "go1") && !strings.Contains(os.Args[0], "go_bootstrap") {
index ea1129400059e78d1386b3ca1228e827d202e32b..2fbbf6cb258abd873306a5ce2c19c7393b9e1333 100644 (file)
@@ -364,6 +364,7 @@ const (
        ABaseARM64
        ABaseMIPS
        ABaseS390X
+       ABaseWasm
 
        AllowedOpCodes = 1 << 11            // The number of opcodes available for any given architecture.
        AMask          = AllowedOpCodes - 1 // AND with this to use the opcode as an array index.
@@ -595,7 +596,7 @@ func (ctxt *Link) Logf(format string, args ...interface{}) {
 // the hardware stack pointer and the local variable area.
 func (ctxt *Link) FixedFrameSize() int64 {
        switch ctxt.Arch.Family {
-       case sys.AMD64, sys.I386:
+       case sys.AMD64, sys.I386, sys.Wasm:
                return 0
        case sys.PPC64:
                // PIC code on ppc64le requires 32 bytes of stack, and it's easier to
index c4b3712359453cd8c921083a1284fcbc0355cb28..1c7a962e570117e316e9c09a6804eb51385cd32c 100644 (file)
@@ -26,7 +26,7 @@ var (
        pkg    = flag.String("p", "", "package name")
 )
 
-var Are = regexp.MustCompile(`^\tA([A-Z0-9]+)`)
+var Are = regexp.MustCompile(`^\tA([A-Za-z0-9]+)`)
 
 func main() {
        flag.Parse()
index 89d481e726b1fd6d19ab4fbb6601dccf8adfa071..98475d00cabd7ea50e7cf76df6b94c4ef9135d78 100644 (file)
@@ -394,6 +394,7 @@ const (
        RBaseARM64 = 8 * 1024  // range [8k, 13k)
        RBaseMIPS  = 13 * 1024 // range [13k, 14k)
        RBaseS390X = 14 * 1024 // range [14k, 15k)
+       RBaseWasm  = 16 * 1024
 )
 
 // RegisterRegister binds a pretty-printer (Rconv) for register
diff --git a/src/cmd/internal/obj/wasm/a.out.go b/src/cmd/internal/obj/wasm/a.out.go
new file mode 100644 (file)
index 0000000..9c04be2
--- /dev/null
@@ -0,0 +1,288 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package wasm
+
+import "cmd/internal/obj"
+
+//go:generate go run ../stringer.go -i $GOFILE -o anames.go -p wasm
+
+const (
+       /* mark flags */
+       DONE          = 1 << iota
+       PRESERVEFLAGS // not allowed to clobber flags
+)
+
+/*
+ *     wasm
+ */
+const (
+       ACallImport = obj.ABaseWasm + obj.A_ARCHSPECIFIC + iota
+       AGet
+       ASet
+       ATee
+       ANot // alias for I32Eqz
+
+       // The following are low-level WebAssembly instructions.
+       // Their order matters, since it matches the opcode encoding.
+       // Gaps in the encoding are indicated by comments.
+
+       AUnreachable // opcode 0x00
+       ANop
+       ABlock
+       ALoop
+       AIf
+       AElse
+
+       AEnd // opcode 0x0B
+       ABr
+       ABrIf
+       ABrTable
+       // ACall and AReturn are WebAssembly instructions. obj.ACALL and obj.ARET are higher level instructions
+       // with Go semantics, e.g. they manipulate the Go stack on the linear memory.
+       AReturn
+       ACall
+       ACallIndirect
+
+       ADrop // opcode 0x1A
+       ASelect
+
+       AI32Load // opcode 0x28
+       AI64Load
+       AF32Load
+       AF64Load
+       AI32Load8S
+       AI32Load8U
+       AI32Load16S
+       AI32Load16U
+       AI64Load8S
+       AI64Load8U
+       AI64Load16S
+       AI64Load16U
+       AI64Load32S
+       AI64Load32U
+       AI32Store
+       AI64Store
+       AF32Store
+       AF64Store
+       AI32Store8
+       AI32Store16
+       AI64Store8
+       AI64Store16
+       AI64Store32
+       ACurrentMemory
+       AGrowMemory
+
+       AI32Const
+       AI64Const
+       AF32Const
+       AF64Const
+
+       AI32Eqz
+       AI32Eq
+       AI32Ne
+       AI32LtS
+       AI32LtU
+       AI32GtS
+       AI32GtU
+       AI32LeS
+       AI32LeU
+       AI32GeS
+       AI32GeU
+
+       AI64Eqz
+       AI64Eq
+       AI64Ne
+       AI64LtS
+       AI64LtU
+       AI64GtS
+       AI64GtU
+       AI64LeS
+       AI64LeU
+       AI64GeS
+       AI64GeU
+
+       AF32Eq
+       AF32Ne
+       AF32Lt
+       AF32Gt
+       AF32Le
+       AF32Ge
+
+       AF64Eq
+       AF64Ne
+       AF64Lt
+       AF64Gt
+       AF64Le
+       AF64Ge
+
+       AI32Clz
+       AI32Ctz
+       AI32Popcnt
+       AI32Add
+       AI32Sub
+       AI32Mul
+       AI32DivS
+       AI32DivU
+       AI32RemS
+       AI32RemU
+       AI32And
+       AI32Or
+       AI32Xor
+       AI32Shl
+       AI32ShrS
+       AI32ShrU
+       AI32Rotl
+       AI32Rotr
+
+       AI64Clz
+       AI64Ctz
+       AI64Popcnt
+       AI64Add
+       AI64Sub
+       AI64Mul
+       AI64DivS
+       AI64DivU
+       AI64RemS
+       AI64RemU
+       AI64And
+       AI64Or
+       AI64Xor
+       AI64Shl
+       AI64ShrS
+       AI64ShrU
+       AI64Rotl
+       AI64Rotr
+
+       AF32Abs
+       AF32Neg
+       AF32Ceil
+       AF32Floor
+       AF32Trunc
+       AF32Nearest
+       AF32Sqrt
+       AF32Add
+       AF32Sub
+       AF32Mul
+       AF32Div
+       AF32Min
+       AF32Max
+       AF32Copysign
+
+       AF64Abs
+       AF64Neg
+       AF64Ceil
+       AF64Floor
+       AF64Trunc
+       AF64Nearest
+       AF64Sqrt
+       AF64Add
+       AF64Sub
+       AF64Mul
+       AF64Div
+       AF64Min
+       AF64Max
+       AF64Copysign
+
+       AI32WrapI64
+       AI32TruncSF32
+       AI32TruncUF32
+       AI32TruncSF64
+       AI32TruncUF64
+       AI64ExtendSI32
+       AI64ExtendUI32
+       AI64TruncSF32
+       AI64TruncUF32
+       AI64TruncSF64
+       AI64TruncUF64
+       AF32ConvertSI32
+       AF32ConvertUI32
+       AF32ConvertSI64
+       AF32ConvertUI64
+       AF32DemoteF64
+       AF64ConvertSI32
+       AF64ConvertUI32
+       AF64ConvertSI64
+       AF64ConvertUI64
+       AF64PromoteF32
+       AI32ReinterpretF32
+       AI64ReinterpretF64
+       AF32ReinterpretI32
+       AF64ReinterpretI64
+
+       // End of low-level WebAssembly instructions.
+
+       ARESUMEPOINT
+       // ACALLNORESUME is a call which is not followed by a resume point.
+       // It is allowed inside of WebAssembly blocks, whereas obj.ACALL is not.
+       // However, it is not allowed to switch goroutines while inside of an ACALLNORESUME call.
+       ACALLNORESUME
+
+       AMOVB
+       AMOVH
+       AMOVW
+       AMOVD
+
+       AWORD
+       ALAST
+)
+
+const (
+       REG_NONE = 0
+)
+
+const (
+       // globals
+       REG_PC_F = obj.RBaseWasm + iota
+       REG_PC_B
+       REG_SP // SP is currently 32-bit, until 64-bit memory operations are available
+       REG_CTXT
+       REG_g
+       // RET* are used by runtime.return0 and runtime.reflectcall. These functions pass return values in registers.
+       REG_RET0
+       REG_RET1
+       REG_RET2
+       REG_RET3
+
+       // locals
+       REG_R0
+       REG_R1
+       REG_R2
+       REG_R3
+       REG_R4
+       REG_R5
+       REG_R6
+       REG_R7
+       REG_R8
+       REG_R9
+       REG_R10
+       REG_R11
+       REG_R12
+       REG_R13
+       REG_R14
+       REG_R15
+       REG_F0
+       REG_F1
+       REG_F2
+       REG_F3
+       REG_F4
+       REG_F5
+       REG_F6
+       REG_F7
+       REG_F8
+       REG_F9
+       REG_F10
+       REG_F11
+       REG_F12
+       REG_F13
+       REG_F14
+       REG_F15
+
+       MAXREG
+
+       MINREG  = REG_PC_F
+       REGSP   = REG_SP
+       REGCTXT = REG_CTXT
+       REGG    = REG_g
+)
diff --git a/src/cmd/internal/obj/wasm/anames.go b/src/cmd/internal/obj/wasm/anames.go
new file mode 100644 (file)
index 0000000..20d0444
--- /dev/null
@@ -0,0 +1,189 @@
+// Generated by stringer -i a.out.go -o anames.go -p wasm
+// Do not edit.
+
+package wasm
+
+import "cmd/internal/obj"
+
+var Anames = []string{
+       obj.A_ARCHSPECIFIC: "CallImport",
+       "Get",
+       "Set",
+       "Tee",
+       "Not",
+       "Unreachable",
+       "Nop",
+       "Block",
+       "Loop",
+       "If",
+       "Else",
+       "End",
+       "Br",
+       "BrIf",
+       "BrTable",
+       "Return",
+       "Call",
+       "CallIndirect",
+       "Drop",
+       "Select",
+       "I32Load",
+       "I64Load",
+       "F32Load",
+       "F64Load",
+       "I32Load8S",
+       "I32Load8U",
+       "I32Load16S",
+       "I32Load16U",
+       "I64Load8S",
+       "I64Load8U",
+       "I64Load16S",
+       "I64Load16U",
+       "I64Load32S",
+       "I64Load32U",
+       "I32Store",
+       "I64Store",
+       "F32Store",
+       "F64Store",
+       "I32Store8",
+       "I32Store16",
+       "I64Store8",
+       "I64Store16",
+       "I64Store32",
+       "CurrentMemory",
+       "GrowMemory",
+       "I32Const",
+       "I64Const",
+       "F32Const",
+       "F64Const",
+       "I32Eqz",
+       "I32Eq",
+       "I32Ne",
+       "I32LtS",
+       "I32LtU",
+       "I32GtS",
+       "I32GtU",
+       "I32LeS",
+       "I32LeU",
+       "I32GeS",
+       "I32GeU",
+       "I64Eqz",
+       "I64Eq",
+       "I64Ne",
+       "I64LtS",
+       "I64LtU",
+       "I64GtS",
+       "I64GtU",
+       "I64LeS",
+       "I64LeU",
+       "I64GeS",
+       "I64GeU",
+       "F32Eq",
+       "F32Ne",
+       "F32Lt",
+       "F32Gt",
+       "F32Le",
+       "F32Ge",
+       "F64Eq",
+       "F64Ne",
+       "F64Lt",
+       "F64Gt",
+       "F64Le",
+       "F64Ge",
+       "I32Clz",
+       "I32Ctz",
+       "I32Popcnt",
+       "I32Add",
+       "I32Sub",
+       "I32Mul",
+       "I32DivS",
+       "I32DivU",
+       "I32RemS",
+       "I32RemU",
+       "I32And",
+       "I32Or",
+       "I32Xor",
+       "I32Shl",
+       "I32ShrS",
+       "I32ShrU",
+       "I32Rotl",
+       "I32Rotr",
+       "I64Clz",
+       "I64Ctz",
+       "I64Popcnt",
+       "I64Add",
+       "I64Sub",
+       "I64Mul",
+       "I64DivS",
+       "I64DivU",
+       "I64RemS",
+       "I64RemU",
+       "I64And",
+       "I64Or",
+       "I64Xor",
+       "I64Shl",
+       "I64ShrS",
+       "I64ShrU",
+       "I64Rotl",
+       "I64Rotr",
+       "F32Abs",
+       "F32Neg",
+       "F32Ceil",
+       "F32Floor",
+       "F32Trunc",
+       "F32Nearest",
+       "F32Sqrt",
+       "F32Add",
+       "F32Sub",
+       "F32Mul",
+       "F32Div",
+       "F32Min",
+       "F32Max",
+       "F32Copysign",
+       "F64Abs",
+       "F64Neg",
+       "F64Ceil",
+       "F64Floor",
+       "F64Trunc",
+       "F64Nearest",
+       "F64Sqrt",
+       "F64Add",
+       "F64Sub",
+       "F64Mul",
+       "F64Div",
+       "F64Min",
+       "F64Max",
+       "F64Copysign",
+       "I32WrapI64",
+       "I32TruncSF32",
+       "I32TruncUF32",
+       "I32TruncSF64",
+       "I32TruncUF64",
+       "I64ExtendSI32",
+       "I64ExtendUI32",
+       "I64TruncSF32",
+       "I64TruncUF32",
+       "I64TruncSF64",
+       "I64TruncUF64",
+       "F32ConvertSI32",
+       "F32ConvertUI32",
+       "F32ConvertSI64",
+       "F32ConvertUI64",
+       "F32DemoteF64",
+       "F64ConvertSI32",
+       "F64ConvertUI32",
+       "F64ConvertSI64",
+       "F64ConvertUI64",
+       "F64PromoteF32",
+       "I32ReinterpretF32",
+       "I64ReinterpretF64",
+       "F32ReinterpretI32",
+       "F64ReinterpretI64",
+       "RESUMEPOINT",
+       "CALLNORESUME",
+       "MOVB",
+       "MOVH",
+       "MOVW",
+       "MOVD",
+       "WORD",
+       "LAST",
+}
diff --git a/src/cmd/internal/obj/wasm/wasmobj.go b/src/cmd/internal/obj/wasm/wasmobj.go
new file mode 100644 (file)
index 0000000..2b7e12a
--- /dev/null
@@ -0,0 +1,934 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package wasm
+
+import (
+       "bytes"
+       "cmd/internal/obj"
+       "cmd/internal/objabi"
+       "cmd/internal/sys"
+       "encoding/binary"
+       "fmt"
+       "io"
+       "math"
+)
+
+var Register = map[string]int16{
+       "PC_F": REG_PC_F,
+       "PC_B": REG_PC_B,
+       "SP":   REG_SP,
+       "CTXT": REG_CTXT,
+       "g":    REG_g,
+       "RET0": REG_RET0,
+       "RET1": REG_RET1,
+       "RET2": REG_RET2,
+       "RET3": REG_RET3,
+
+       "R0":  REG_R0,
+       "R1":  REG_R1,
+       "R2":  REG_R2,
+       "R3":  REG_R3,
+       "R4":  REG_R4,
+       "R5":  REG_R5,
+       "R6":  REG_R6,
+       "R7":  REG_R7,
+       "R8":  REG_R8,
+       "R9":  REG_R9,
+       "R10": REG_R10,
+       "R11": REG_R11,
+       "R12": REG_R12,
+       "R13": REG_R13,
+       "R14": REG_R14,
+       "R15": REG_R15,
+
+       "F0":  REG_F0,
+       "F1":  REG_F1,
+       "F2":  REG_F2,
+       "F3":  REG_F3,
+       "F4":  REG_F4,
+       "F5":  REG_F5,
+       "F6":  REG_F6,
+       "F7":  REG_F7,
+       "F8":  REG_F8,
+       "F9":  REG_F9,
+       "F10": REG_F10,
+       "F11": REG_F11,
+       "F12": REG_F12,
+       "F13": REG_F13,
+       "F14": REG_F14,
+       "F15": REG_F15,
+}
+
+var registerNames []string
+
+func init() {
+       obj.RegisterRegister(MINREG, MAXREG, rconv)
+       obj.RegisterOpcode(obj.ABaseWasm, Anames)
+
+       registerNames = make([]string, MAXREG-MINREG)
+       for name, reg := range Register {
+               registerNames[reg-MINREG] = name
+       }
+}
+
+func rconv(r int) string {
+       return registerNames[r-MINREG]
+}
+
+var unaryDst = map[obj.As]bool{
+       ASet:          true,
+       ATee:          true,
+       ACall:         true,
+       ACallIndirect: true,
+       ACallImport:   true,
+       ABr:           true,
+       ABrIf:         true,
+       ABrTable:      true,
+       AI32Store:     true,
+       AI64Store:     true,
+       AF32Store:     true,
+       AF64Store:     true,
+       AI32Store8:    true,
+       AI32Store16:   true,
+       AI64Store8:    true,
+       AI64Store16:   true,
+       AI64Store32:   true,
+       ACALLNORESUME: true,
+}
+
+var Linkwasm = obj.LinkArch{
+       Arch:       sys.ArchWasm,
+       Init:       instinit,
+       Preprocess: preprocess,
+       Assemble:   assemble,
+       UnaryDst:   unaryDst,
+}
+
+var (
+       morestack       *obj.LSym
+       morestackNoCtxt *obj.LSym
+       gcWriteBarrier  *obj.LSym
+       sigpanic        *obj.LSym
+       deferreturn     *obj.LSym
+       jmpdefer        *obj.LSym
+)
+
+const (
+       /* mark flags */
+       WasmImport = 1 << 0
+)
+
+func instinit(ctxt *obj.Link) {
+       morestack = ctxt.Lookup("runtime.morestack")
+       morestackNoCtxt = ctxt.Lookup("runtime.morestack_noctxt")
+       gcWriteBarrier = ctxt.Lookup("runtime.gcWriteBarrier")
+       sigpanic = ctxt.Lookup("runtime.sigpanic")
+       deferreturn = ctxt.Lookup("runtime.deferreturn")
+       jmpdefer = ctxt.Lookup(`"".jmpdefer`)
+}
+
+func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
+       appendp := func(p *obj.Prog, as obj.As, args ...obj.Addr) *obj.Prog {
+               if p.As != obj.ANOP {
+                       p2 := obj.Appendp(p, newprog)
+                       p2.Pc = p.Pc
+                       p = p2
+               }
+               p.As = as
+               switch len(args) {
+               case 0:
+                       p.From = obj.Addr{}
+                       p.To = obj.Addr{}
+               case 1:
+                       if unaryDst[as] {
+                               p.From = obj.Addr{}
+                               p.To = args[0]
+                       } else {
+                               p.From = args[0]
+                               p.To = obj.Addr{}
+                       }
+               case 2:
+                       p.From = args[0]
+                       p.To = args[1]
+               default:
+                       panic("bad args")
+               }
+               return p
+       }
+
+       framesize := s.Func.Text.To.Offset
+       if framesize < 0 {
+               panic("bad framesize")
+       }
+       s.Func.Args = s.Func.Text.To.Val.(int32)
+       s.Func.Locals = int32(framesize)
+
+       if s.Func.Text.From.Sym.Wrapper() {
+               // if g._panic != nil && g._panic.argp == FP {
+               //   g._panic.argp = bottom-of-frame
+               // }
+               //
+               // MOVD g_panic(g), R0
+               // Get R0
+               // I64Eqz
+               // Not
+               // If
+               //   Get SP
+               //   I64ExtendUI32
+               //   I64Const $framesize+8
+               //   I64Add
+               //   I64Load panic_argp(R0)
+               //   I64Eq
+               //   If
+               //     MOVD SP, panic_argp(R0)
+               //   End
+               // End
+
+               gpanic := obj.Addr{
+                       Type:   obj.TYPE_MEM,
+                       Reg:    REGG,
+                       Offset: 4 * 8, // g_panic
+               }
+
+               panicargp := obj.Addr{
+                       Type:   obj.TYPE_MEM,
+                       Reg:    REG_R0,
+                       Offset: 0, // panic.argp
+               }
+
+               p := s.Func.Text
+               p = appendp(p, AMOVD, gpanic, regAddr(REG_R0))
+
+               p = appendp(p, AGet, regAddr(REG_R0))
+               p = appendp(p, AI64Eqz)
+               p = appendp(p, ANot)
+               p = appendp(p, AIf)
+
+               p = appendp(p, AGet, regAddr(REG_SP))
+               p = appendp(p, AI64ExtendUI32)
+               p = appendp(p, AI64Const, constAddr(framesize+8))
+               p = appendp(p, AI64Add)
+               p = appendp(p, AI64Load, panicargp)
+
+               p = appendp(p, AI64Eq)
+               p = appendp(p, AIf)
+               p = appendp(p, AMOVD, regAddr(REG_SP), panicargp)
+               p = appendp(p, AEnd)
+
+               p = appendp(p, AEnd)
+       }
+
+       if framesize > 0 {
+               p := s.Func.Text
+               p = appendp(p, AGet, regAddr(REG_SP))
+               p = appendp(p, AI32Const, constAddr(framesize))
+               p = appendp(p, AI32Sub)
+               p = appendp(p, ASet, regAddr(REG_SP))
+               p.Spadj = int32(framesize)
+       }
+
+       // Introduce resume points for CALL instructions
+       // and collect other explicit resume points.
+       numResumePoints := 0
+       explicitBlockDepth := 0
+       pc := int64(0) // pc is only incremented when necessary, this avoids bloat of the BrTable instruction
+       var tableIdxs []uint64
+       tablePC := int64(0)
+       base := ctxt.PosTable.Pos(s.Func.Text.Pos).Base()
+       for p := s.Func.Text; p != nil; p = p.Link {
+               prevBase := base
+               base = ctxt.PosTable.Pos(p.Pos).Base()
+
+               switch p.As {
+               case ABlock, ALoop, AIf:
+                       explicitBlockDepth++
+
+               case AEnd:
+                       if explicitBlockDepth == 0 {
+                               panic("End without block")
+                       }
+                       explicitBlockDepth--
+
+               case ARESUMEPOINT:
+                       if explicitBlockDepth != 0 {
+                               panic("RESUME can only be used on toplevel")
+                       }
+                       p.As = AEnd
+                       for tablePC <= pc {
+                               tableIdxs = append(tableIdxs, uint64(numResumePoints))
+                               tablePC++
+                       }
+                       numResumePoints++
+                       pc++
+
+               case obj.ACALL:
+                       if explicitBlockDepth != 0 {
+                               panic("CALL can only be used on toplevel, try CALLNORESUME instead")
+                       }
+                       appendp(p, ARESUMEPOINT)
+               }
+
+               p.Pc = pc
+
+               // Increase pc whenever some pc-value table needs a new entry. Don't increase it
+               // more often to avoid bloat of the BrTable instruction.
+               // The "base != prevBase" condition detects inlined instructions. They are an
+               // implicit call, so entering and leaving this section affects the stack trace.
+               if p.As == ACALLNORESUME || p.As == obj.ANOP || p.Spadj != 0 || base != prevBase {
+                       pc++
+               }
+       }
+       tableIdxs = append(tableIdxs, uint64(numResumePoints))
+       s.Size = pc + 1
+
+       if !s.Func.Text.From.Sym.NoSplit() {
+               p := s.Func.Text
+
+               if framesize <= objabi.StackSmall {
+                       // small stack: SP <= stackguard
+                       // Get SP
+                       // Get g
+                       // I32WrapI64
+                       // I32Load $stackguard0
+                       // I32GtU
+
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AGet, regAddr(REGG))
+                       p = appendp(p, AI32WrapI64)
+                       p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize))) // G.stackguard0
+                       p = appendp(p, AI32LeU)
+               } else {
+                       // large stack: SP-framesize <= stackguard-StackSmall
+                       //              SP <= stackguard+(framesize-StackSmall)
+                       // Get SP
+                       // Get g
+                       // I32WrapI64
+                       // I32Load $stackguard0
+                       // I32Const $(framesize-StackSmall)
+                       // I32Add
+                       // I32GtU
+
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AGet, regAddr(REGG))
+                       p = appendp(p, AI32WrapI64)
+                       p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize))) // G.stackguard0
+                       p = appendp(p, AI32Const, constAddr(int64(framesize)-objabi.StackSmall))
+                       p = appendp(p, AI32Add)
+                       p = appendp(p, AI32LeU)
+               }
+               // TODO(neelance): handle wraparound case
+
+               p = appendp(p, AIf)
+               p = appendp(p, obj.ACALL, constAddr(0))
+               if s.Func.Text.From.Sym.NeedCtxt() {
+                       p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestack}
+               } else {
+                       p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestackNoCtxt}
+               }
+               p = appendp(p, AEnd)
+       }
+
+       // Add Block instructions for resume points and BrTable to jump to selected resume point.
+       if numResumePoints > 0 {
+               p := s.Func.Text
+               p = appendp(p, ALoop) // entryPointLoop, used to jump between basic blocks
+
+               for i := 0; i < numResumePoints+1; i++ {
+                       p = appendp(p, ABlock)
+               }
+               p = appendp(p, AGet, regAddr(REG_PC_B)) // read next basic block from PC_B
+               p = appendp(p, ABrTable, obj.Addr{Val: tableIdxs})
+               p = appendp(p, AEnd) // end of Block
+
+               for p.Link != nil {
+                       p = p.Link
+               }
+
+               p = appendp(p, AEnd) // end of entryPointLoop
+               p = appendp(p, obj.AUNDEF)
+       }
+
+       p := s.Func.Text
+       currentDepth := 0
+       blockDepths := make(map[*obj.Prog]int)
+       for p != nil {
+               switch p.As {
+               case ABlock, ALoop, AIf:
+                       currentDepth++
+                       blockDepths[p] = currentDepth
+               case AEnd:
+                       currentDepth--
+               }
+
+               switch p.As {
+               case ABr, ABrIf:
+                       if p.To.Type == obj.TYPE_BRANCH {
+                               blockDepth, ok := blockDepths[p.To.Val.(*obj.Prog)]
+                               if !ok {
+                                       panic("label not at block")
+                               }
+                               p.To = constAddr(int64(currentDepth - blockDepth))
+                       }
+               case obj.AJMP:
+                       jmp := *p
+                       p.As = obj.ANOP
+
+                       if jmp.To.Type == obj.TYPE_BRANCH {
+                               // jump to basic block
+                               p = appendp(p, AI32Const, constAddr(jmp.To.Val.(*obj.Prog).Pc))
+                               p = appendp(p, ASet, regAddr(REG_PC_B))               // write next basic block to PC_B
+                               p = appendp(p, ABr, constAddr(int64(currentDepth-1))) // jump to beginning of entryPointLoop
+                               break
+                       }
+
+                       // reset PC_B to function entry
+                       p = appendp(p, AI32Const, constAddr(0))
+                       p = appendp(p, ASet, regAddr(REG_PC_B))
+
+                       // low-level WebAssembly call to function
+                       switch jmp.To.Type {
+                       case obj.TYPE_MEM:
+                               p = appendp(p, ACall, jmp.To)
+                       case obj.TYPE_NONE:
+                               // (target PC is on stack)
+                               p = appendp(p, AI32WrapI64)
+                               p = appendp(p, AI32Const, constAddr(16)) // only needs PC_F bits (16-31), PC_B bits (0-15) are zero
+                               p = appendp(p, AI32ShrU)
+                               p = appendp(p, ACallIndirect)
+                       default:
+                               panic("bad target for JMP")
+                       }
+
+                       p = appendp(p, AReturn)
+
+               case obj.ACALL, ACALLNORESUME:
+                       call := *p
+                       p.As = obj.ANOP
+
+                       pcAfterCall := call.Link.Pc
+                       if call.To.Sym == sigpanic {
+                               pcAfterCall-- // sigpanic expects to be called without advancing the pc
+                       }
+
+                       // jmpdefer manipulates the return address on the stack so deferreturn gets called repeatedly.
+                       // Model this in WebAssembly with a loop.
+                       if call.To.Sym == deferreturn {
+                               p = appendp(p, ALoop)
+                       }
+
+                       // SP -= 8
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AI32Const, constAddr(8))
+                       p = appendp(p, AI32Sub)
+                       p = appendp(p, ASet, regAddr(REG_SP))
+
+                       // write return address to Go stack
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AI64Const, obj.Addr{
+                               Type:   obj.TYPE_ADDR,
+                               Name:   obj.NAME_EXTERN,
+                               Sym:    s,           // PC_F
+                               Offset: pcAfterCall, // PC_B
+                       })
+                       p = appendp(p, AI64Store, constAddr(0))
+
+                       // reset PC_B to function entry
+                       p = appendp(p, AI32Const, constAddr(0))
+                       p = appendp(p, ASet, regAddr(REG_PC_B))
+
+                       // low-level WebAssembly call to function
+                       switch call.To.Type {
+                       case obj.TYPE_MEM:
+                               p = appendp(p, ACall, call.To)
+                       case obj.TYPE_NONE:
+                               // (target PC is on stack)
+                               p = appendp(p, AI32WrapI64)
+                               p = appendp(p, AI32Const, constAddr(16)) // only needs PC_F bits (16-31), PC_B bits (0-15) are zero
+                               p = appendp(p, AI32ShrU)
+                               p = appendp(p, ACallIndirect)
+                       default:
+                               panic("bad target for CALL")
+                       }
+
+                       // gcWriteBarrier has no return value, it never unwinds the stack
+                       if call.To.Sym == gcWriteBarrier {
+                               break
+                       }
+
+                       // jmpdefer removes the frame of deferreturn from the Go stack.
+                       // However, its WebAssembly function still returns normally,
+                       // so we need to return from deferreturn without removing its
+                       // stack frame (no RET), because the frame is already gone.
+                       if call.To.Sym == jmpdefer {
+                               p = appendp(p, AReturn)
+                               break
+                       }
+
+                       // return value of call is on the top of the stack, indicating whether to unwind the WebAssembly stack
+                       p = appendp(p, AIf)
+                       if call.As == ACALLNORESUME && call.To.Sym != sigpanic { // sigpanic unwinds the stack, but it never resumes
+                               // trying to unwind WebAssembly stack but call has no resume point, terminate with error
+                               p = appendp(p, obj.AUNDEF)
+                       } else {
+                               // unwinding WebAssembly stack to switch goroutine, return 1
+                               p = appendp(p, AI32Const, constAddr(1))
+                               p = appendp(p, AReturn)
+                       }
+                       p = appendp(p, AEnd)
+
+                       // jump to before the call if jmpdefer has reset the return address to the call's PC
+                       if call.To.Sym == deferreturn {
+                               p = appendp(p, AGet, regAddr(REG_PC_B))
+                               p = appendp(p, AI32Const, constAddr(call.Pc))
+                               p = appendp(p, AI32Eq)
+                               p = appendp(p, ABrIf, constAddr(0))
+                               p = appendp(p, AEnd) // end of Loop
+                       }
+
+               case obj.ARET:
+                       ret := *p
+                       p.As = obj.ANOP
+
+                       if framesize > 0 {
+                               // SP += framesize
+                               p = appendp(p, AGet, regAddr(REG_SP))
+                               p = appendp(p, AI32Const, constAddr(framesize))
+                               p = appendp(p, AI32Add)
+                               p = appendp(p, ASet, regAddr(REG_SP))
+                               // TODO(neelance): This should theoretically set Spadj, but it only works without.
+                               // p.Spadj = int32(-framesize)
+                       }
+
+                       if ret.To.Type == obj.TYPE_MEM {
+                               // reset PC_B to function entry
+                               p = appendp(p, AI32Const, constAddr(0))
+                               p = appendp(p, ASet, regAddr(REG_PC_B))
+
+                               // low-level WebAssembly call to function
+                               p = appendp(p, ACall, ret.To)
+                               p = appendp(p, AReturn)
+                               break
+                       }
+
+                       // read return PC_F from Go stack
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AI32Load16U, constAddr(2))
+                       p = appendp(p, ASet, regAddr(REG_PC_F))
+
+                       // read return PC_B from Go stack
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AI32Load16U, constAddr(0))
+                       p = appendp(p, ASet, regAddr(REG_PC_B))
+
+                       // SP += 8
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, AI32Const, constAddr(8))
+                       p = appendp(p, AI32Add)
+                       p = appendp(p, ASet, regAddr(REG_SP))
+
+                       // not switching goroutine, return 0
+                       p = appendp(p, AI32Const, constAddr(0))
+                       p = appendp(p, AReturn)
+               }
+
+               p = p.Link
+       }
+
+       p = s.Func.Text
+       for p != nil {
+               switch p.From.Name {
+               case obj.NAME_AUTO:
+                       p.From.Offset += int64(framesize)
+               case obj.NAME_PARAM:
+                       p.From.Reg = REG_SP
+                       p.From.Offset += int64(framesize) + 8 // parameters are after the frame and the 8-byte return address
+               }
+
+               switch p.To.Name {
+               case obj.NAME_AUTO:
+                       p.To.Offset += int64(framesize)
+               case obj.NAME_PARAM:
+                       p.To.Reg = REG_SP
+                       p.To.Offset += int64(framesize) + 8 // parameters are after the frame and the 8-byte return address
+               }
+
+               switch p.As {
+               case AGet:
+                       if p.From.Type == obj.TYPE_ADDR {
+                               get := *p
+                               p.As = obj.ANOP
+
+                               switch get.From.Name {
+                               case obj.NAME_EXTERN:
+                                       p = appendp(p, AI64Const, get.From)
+                               case obj.NAME_AUTO, obj.NAME_PARAM:
+                                       p = appendp(p, AGet, regAddr(get.From.Reg))
+                                       if get.From.Reg == REG_SP {
+                                               p = appendp(p, AI64ExtendUI32)
+                                       }
+                                       if get.From.Offset != 0 {
+                                               p = appendp(p, AI64Const, constAddr(get.From.Offset))
+                                               p = appendp(p, AI64Add)
+                                       }
+                               default:
+                                       panic("bad Get: invalid name")
+                               }
+                       }
+
+               case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
+                       if p.From.Type == obj.TYPE_MEM {
+                               as := p.As
+                               from := p.From
+
+                               p.As = AGet
+                               p.From = regAddr(from.Reg)
+
+                               if from.Reg != REG_SP {
+                                       p = appendp(p, AI32WrapI64)
+                               }
+
+                               p = appendp(p, as, constAddr(from.Offset))
+                       }
+
+               case AMOVB, AMOVH, AMOVW, AMOVD:
+                       mov := *p
+                       p.As = obj.ANOP
+
+                       var loadAs obj.As
+                       var storeAs obj.As
+                       switch mov.As {
+                       case AMOVB:
+                               loadAs = AI64Load8U
+                               storeAs = AI64Store8
+                       case AMOVH:
+                               loadAs = AI64Load16U
+                               storeAs = AI64Store16
+                       case AMOVW:
+                               loadAs = AI64Load32U
+                               storeAs = AI64Store32
+                       case AMOVD:
+                               loadAs = AI64Load
+                               storeAs = AI64Store
+                       }
+
+                       appendValue := func() {
+                               switch mov.From.Type {
+                               case obj.TYPE_CONST:
+                                       p = appendp(p, AI64Const, constAddr(mov.From.Offset))
+
+                               case obj.TYPE_ADDR:
+                                       switch mov.From.Name {
+                                       case obj.NAME_NONE, obj.NAME_PARAM, obj.NAME_AUTO:
+                                               p = appendp(p, AGet, regAddr(mov.From.Reg))
+                                               if mov.From.Reg == REG_SP {
+                                                       p = appendp(p, AI64ExtendUI32)
+                                               }
+                                               p = appendp(p, AI64Const, constAddr(mov.From.Offset))
+                                               p = appendp(p, AI64Add)
+                                       case obj.NAME_EXTERN:
+                                               p = appendp(p, AI64Const, mov.From)
+                                       default:
+                                               panic("bad name for MOV")
+                                       }
+
+                               case obj.TYPE_REG:
+                                       p = appendp(p, AGet, mov.From)
+                                       if mov.From.Reg == REG_SP {
+                                               p = appendp(p, AI64ExtendUI32)
+                                       }
+
+                               case obj.TYPE_MEM:
+                                       p = appendp(p, AGet, regAddr(mov.From.Reg))
+                                       if mov.From.Reg != REG_SP {
+                                               p = appendp(p, AI32WrapI64)
+                                       }
+                                       p = appendp(p, loadAs, constAddr(mov.From.Offset))
+
+                               default:
+                                       panic("bad MOV type")
+                               }
+                       }
+
+                       switch mov.To.Type {
+                       case obj.TYPE_REG:
+                               appendValue()
+                               if mov.To.Reg == REG_SP {
+                                       p = appendp(p, AI32WrapI64)
+                               }
+                               p = appendp(p, ASet, mov.To)
+
+                       case obj.TYPE_MEM:
+                               switch mov.To.Name {
+                               case obj.NAME_NONE, obj.NAME_PARAM:
+                                       p = appendp(p, AGet, regAddr(mov.To.Reg))
+                                       if mov.To.Reg != REG_SP {
+                                               p = appendp(p, AI32WrapI64)
+                                       }
+                               case obj.NAME_EXTERN:
+                                       p = appendp(p, AI32Const, obj.Addr{Type: obj.TYPE_ADDR, Name: obj.NAME_EXTERN, Sym: mov.To.Sym})
+                               default:
+                                       panic("bad MOV name")
+                               }
+                               appendValue()
+                               p = appendp(p, storeAs, constAddr(mov.To.Offset))
+
+                       default:
+                               panic("bad MOV type")
+                       }
+
+               case ACallImport:
+                       p.As = obj.ANOP
+                       p = appendp(p, AGet, regAddr(REG_SP))
+                       p = appendp(p, ACall, obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: s})
+                       p.Mark = WasmImport
+               }
+
+               p = p.Link
+       }
+}
+
+func constAddr(value int64) obj.Addr {
+       return obj.Addr{Type: obj.TYPE_CONST, Offset: value}
+}
+
+func regAddr(reg int16) obj.Addr {
+       return obj.Addr{Type: obj.TYPE_REG, Reg: reg}
+}
+
+func assemble(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
+       w := new(bytes.Buffer)
+
+       // Function starts with declaration of locals: numbers and types.
+       switch s.Name {
+       case "memchr":
+               writeUleb128(w, 1) // number of sets of locals
+               writeUleb128(w, 3) // number of locals
+               w.WriteByte(0x7F)  // i32
+       case "memcmp":
+               writeUleb128(w, 1) // number of sets of locals
+               writeUleb128(w, 2) // number of locals
+               w.WriteByte(0x7F)  // i32
+       default:
+               writeUleb128(w, 2)  // number of sets of locals
+               writeUleb128(w, 16) // number of locals
+               w.WriteByte(0x7E)   // i64
+               writeUleb128(w, 16) // number of locals
+               w.WriteByte(0x7C)   // f64
+       }
+
+       for p := s.Func.Text; p != nil; p = p.Link {
+               switch p.As {
+               case AGet:
+                       if p.From.Type != obj.TYPE_REG {
+                               panic("bad Get: argument is not a register")
+                       }
+                       reg := p.From.Reg
+                       switch {
+                       case reg >= REG_PC_F && reg <= REG_RET3:
+                               w.WriteByte(0x23) // get_global
+                               writeUleb128(w, uint64(reg-REG_PC_F))
+                       case reg >= REG_R0 && reg <= REG_F15:
+                               w.WriteByte(0x20) // get_local
+                               writeUleb128(w, uint64(reg-REG_R0))
+                       default:
+                               panic("bad Get: invalid register")
+                       }
+                       continue
+
+               case ASet:
+                       if p.To.Type != obj.TYPE_REG {
+                               panic("bad Set: argument is not a register")
+                       }
+                       reg := p.To.Reg
+                       switch {
+                       case reg >= REG_PC_F && reg <= REG_RET3:
+                               w.WriteByte(0x24) // set_global
+                               writeUleb128(w, uint64(reg-REG_PC_F))
+                       case reg >= REG_R0 && reg <= REG_F15:
+                               if p.Link.As == AGet && p.Link.From.Reg == reg {
+                                       w.WriteByte(0x22) // tee_local
+                                       p = p.Link
+                               } else {
+                                       w.WriteByte(0x21) // set_local
+                               }
+                               writeUleb128(w, uint64(reg-REG_R0))
+                       default:
+                               panic("bad Set: invalid register")
+                       }
+                       continue
+
+               case ATee:
+                       if p.To.Type != obj.TYPE_REG {
+                               panic("bad Tee: argument is not a register")
+                       }
+                       reg := p.To.Reg
+                       switch {
+                       case reg >= REG_R0 && reg <= REG_F15:
+                               w.WriteByte(0x22) // tee_local
+                               writeUleb128(w, uint64(reg-REG_R0))
+                       default:
+                               panic("bad Tee: invalid register")
+                       }
+                       continue
+
+               case ANot:
+                       w.WriteByte(0x45) // i32.eqz
+                       continue
+
+               case obj.AUNDEF:
+                       w.WriteByte(0x00) // unreachable
+                       continue
+
+               case obj.ANOP, obj.ATEXT, obj.AFUNCDATA, obj.APCDATA:
+                       // ignore
+                       continue
+               }
+
+               switch {
+               case p.As < AUnreachable || p.As > AF64ReinterpretI64:
+                       panic(fmt.Sprintf("unexpected assembler op: %s", p.As))
+               case p.As < AEnd:
+                       w.WriteByte(byte(p.As - AUnreachable + 0x00))
+               case p.As < ADrop:
+                       w.WriteByte(byte(p.As - AEnd + 0x0B))
+               case p.As < AI32Load:
+                       w.WriteByte(byte(p.As - ADrop + 0x1A))
+               default:
+                       w.WriteByte(byte(p.As - AI32Load + 0x28))
+               }
+
+               switch p.As {
+               case ABlock, ALoop, AIf:
+                       if p.From.Offset != 0 {
+                               // block type, rarely used, e.g. for code compiled with emscripten
+                               w.WriteByte(0x80 - byte(p.From.Offset))
+                               continue
+                       }
+                       w.WriteByte(0x40)
+
+               case ABr, ABrIf:
+                       if p.To.Type != obj.TYPE_CONST {
+                               panic("bad Br/BrIf")
+                       }
+                       writeUleb128(w, uint64(p.To.Offset))
+
+               case ABrTable:
+                       idxs := p.To.Val.([]uint64)
+                       writeUleb128(w, uint64(len(idxs)-1))
+                       for _, idx := range idxs {
+                               writeUleb128(w, idx)
+                       }
+
+               case ACall:
+                       switch p.To.Type {
+                       case obj.TYPE_CONST:
+                               writeUleb128(w, uint64(p.To.Offset))
+
+                       case obj.TYPE_MEM:
+                               if p.To.Name != obj.NAME_EXTERN && p.To.Name != obj.NAME_STATIC {
+                                       fmt.Println(p.To)
+                                       panic("bad name for Call")
+                               }
+                               r := obj.Addrel(s)
+                               r.Off = int32(w.Len())
+                               r.Type = objabi.R_CALL
+                               if p.Mark&WasmImport != 0 {
+                                       r.Type = objabi.R_WASMIMPORT
+                               }
+                               r.Sym = p.To.Sym
+
+                       default:
+                               panic("bad type for Call")
+                       }
+
+               case ACallIndirect:
+                       writeUleb128(w, uint64(p.To.Offset))
+                       w.WriteByte(0x00) // reserved value
+
+               case AI32Const, AI64Const:
+                       if p.From.Name == obj.NAME_EXTERN {
+                               r := obj.Addrel(s)
+                               r.Off = int32(w.Len())
+                               r.Type = objabi.R_ADDR
+                               r.Sym = p.From.Sym
+                               r.Add = p.From.Offset
+                               break
+                       }
+                       writeSleb128(w, p.From.Offset)
+
+               case AF64Const:
+                       b := make([]byte, 8)
+                       binary.LittleEndian.PutUint64(b, math.Float64bits(p.From.Val.(float64)))
+                       w.Write(b)
+
+               case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
+                       if p.From.Offset < 0 {
+                               panic("negative offset for *Load")
+                       }
+                       if p.From.Type != obj.TYPE_CONST {
+                               panic("bad type for *Load")
+                       }
+                       writeUleb128(w, align(p.As))
+                       writeUleb128(w, uint64(p.From.Offset))
+
+               case AI32Store, AI64Store, AF32Store, AF64Store, AI32Store8, AI32Store16, AI64Store8, AI64Store16, AI64Store32:
+                       if p.To.Offset < 0 {
+                               panic("negative offset")
+                       }
+                       writeUleb128(w, align(p.As))
+                       writeUleb128(w, uint64(p.To.Offset))
+
+               case ACurrentMemory, AGrowMemory:
+                       w.WriteByte(0x00)
+
+               }
+       }
+
+       w.WriteByte(0x0b) // end
+
+       s.P = w.Bytes()
+}
+
+func align(as obj.As) uint64 {
+       switch as {
+       case AI32Load8S, AI32Load8U, AI64Load8S, AI64Load8U, AI32Store8, AI64Store8:
+               return 0
+       case AI32Load16S, AI32Load16U, AI64Load16S, AI64Load16U, AI32Store16, AI64Store16:
+               return 1
+       case AI32Load, AF32Load, AI64Load32S, AI64Load32U, AI32Store, AF32Store, AI64Store32:
+               return 2
+       case AI64Load, AF64Load, AI64Store, AF64Store:
+               return 3
+       default:
+               panic("align: bad op")
+       }
+}
+
+func writeUleb128(w io.ByteWriter, v uint64) {
+       more := true
+       for more {
+               c := uint8(v & 0x7f)
+               v >>= 7
+               more = v != 0
+               if more {
+                       c |= 0x80
+               }
+               w.WriteByte(c)
+       }
+}
+
+func writeSleb128(w io.ByteWriter, v int64) {
+       more := true
+       for more {
+               c := uint8(v & 0x7f)
+               s := uint8(v & 0x40)
+               v >>= 7
+               more = !((v == 0 && s == 0) || (v == -1 && s != 0))
+               if more {
+                       c |= 0x80
+               }
+               w.WriteByte(c)
+       }
+}
index ff19606cd2be6f6f00392dffeb7e713e67d5b363..23c7b62daf62062584eb5dd1393b39b1a98288db 100644 (file)
@@ -40,6 +40,7 @@ const (
        Hdarwin
        Hdragonfly
        Hfreebsd
+       Hjs
        Hlinux
        Hnacl
        Hnetbsd
@@ -57,6 +58,8 @@ func (h *HeadType) Set(s string) error {
                *h = Hdragonfly
        case "freebsd":
                *h = Hfreebsd
+       case "js":
+               *h = Hjs
        case "linux", "android":
                *h = Hlinux
        case "nacl":
@@ -85,6 +88,8 @@ func (h *HeadType) String() string {
                return "dragonfly"
        case Hfreebsd:
                return "freebsd"
+       case Hjs:
+               return "js"
        case Hlinux:
                return "linux"
        case Hnacl:
index ac96b3a71b6d1c3f86ca7fb8b176a5c28c1d6b4b..a3e2868a1bc9403161af4fdb1f519fea0ad103ba 100644 (file)
@@ -193,6 +193,9 @@ const (
        // R_ADDRCUOFF resolves to a pointer-sized offset from the start of the
        // symbol's DWARF compile unit.
        R_ADDRCUOFF
+
+       // R_WASMIMPORT resolves to the index of the WebAssembly function import.
+       R_WASMIMPORT
 )
 
 // IsDirectJump returns whether r is a relocation for a direct jump.
index c761a834b30d9acbba31bcb3f0e42c1b9a7bed97..487c9260e8576a325ff8597de0bf19bdec564387 100644 (file)
@@ -21,6 +21,7 @@ const (
        MIPS64
        PPC64
        S390X
+       Wasm
 )
 
 // Arch represents an individual architecture.
@@ -160,6 +161,15 @@ var ArchS390X = &Arch{
        MinLC:     2,
 }
 
+var ArchWasm = &Arch{
+       Name:      "wasm",
+       Family:    Wasm,
+       ByteOrder: binary.LittleEndian,
+       PtrSize:   8,
+       RegSize:   8,
+       MinLC:     1,
+}
+
 var Archs = [...]*Arch{
        Arch386,
        ArchAMD64,
@@ -173,4 +183,5 @@ var Archs = [...]*Arch{
        ArchPPC64,
        ArchPPC64LE,
        ArchS390X,
+       ArchWasm,
 }