]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/internal/obj/riscv: implement vector segment load/store instructions
authorwangboyao <wangboyao@bytedance.com>
Wed, 30 Jul 2025 03:14:37 +0000 (11:14 +0800)
committerMeng Zhuo <mengzhuo1203@gmail.com>
Tue, 16 Sep 2025 01:57:04 +0000 (18:57 -0700)
https://github.com/riscv/riscv-opcodes/pull/361. After this pr was
merged, riscv-opcode can generate RVV segment load/store instructions
for Go. Implement vector segment load/store instuctions.

Change-Id: I154bb75be70c0a45e2279a75c67f68b5bb57c36e
Reviewed-on: https://go-review.googlesource.com/c/go/+/691695
Reviewed-by: Mark Freeman <markfreeman@google.com>
Reviewed-by: Michael Knyszek <mknyszek@google.com>
Reviewed-by: Meng Zhuo <mengzhuo1203@gmail.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>

src/cmd/asm/internal/asm/testdata/riscv64.s
src/cmd/asm/internal/asm/testdata/riscv64error.s
src/cmd/asm/internal/asm/testdata/riscv64validation.s
src/cmd/internal/obj/riscv/anames.go
src/cmd/internal/obj/riscv/cpu.go
src/cmd/internal/obj/riscv/inst.go
src/cmd/internal/obj/riscv/obj.go

index b216149a19dc8cd5d2db6fb9473fdc7dc8c4c45a..4f7e7acd77c90af1255c671d72b7ebf848da8008 100644 (file)
@@ -543,6 +543,578 @@ start:
        VSOXEI64V       V3, V2, (X10)                   // a771250e
        VSOXEI64V       V3, V2, V0, (X10)               // a771250c
 
+       // 31.7.8: Vector Load/Store Segment Instructions
+
+       // 31.7.8.1: Vector Unit-Stride Segment Loads and Stores
+       VLSEG2E8V       (X10), V8                       // 07040522
+       VLSEG2E16V      (X10), V8                       // 07540522
+       VLSEG2E32V      (X10), V8                       // 07640522
+       VLSEG2E64V      (X10), V8                       // 07740522
+       VLSEG2E8V       (X10), V0, V8                   // 07040520
+       VLSEG2E16V      (X10), V0, V8                   // 07540520
+       VLSEG2E32V      (X10), V0, V8                   // 07640520
+       VLSEG2E64V      (X10), V0, V8                   // 07740520
+
+       VLSEG3E8V       (X10), V8                       // 07040542
+       VLSEG3E16V      (X10), V8                       // 07540542
+       VLSEG3E32V      (X10), V8                       // 07640542
+       VLSEG3E64V      (X10), V8                       // 07740542
+       VLSEG3E8V       (X10), V0, V8                   // 07040540
+       VLSEG3E16V      (X10), V0, V8                   // 07540540
+       VLSEG3E32V      (X10), V0, V8                   // 07640540
+       VLSEG3E64V      (X10), V0, V8                   // 07740540
+
+       VLSEG4E8V       (X10), V8                       // 07040562
+       VLSEG4E16V      (X10), V8                       // 07540562
+       VLSEG4E32V      (X10), V8                       // 07640562
+       VLSEG4E64V      (X10), V8                       // 07740562
+       VLSEG4E8V       (X10), V0, V8                   // 07040560
+       VLSEG4E16V      (X10), V0, V8                   // 07540560
+       VLSEG4E32V      (X10), V0, V8                   // 07640560
+       VLSEG4E64V      (X10), V0, V8                   // 07740560
+
+       VLSEG5E8V       (X10), V8                       // 07040582
+       VLSEG5E16V      (X10), V8                       // 07540582
+       VLSEG5E32V      (X10), V8                       // 07640582
+       VLSEG5E64V      (X10), V8                       // 07740582
+       VLSEG5E8V       (X10), V0, V8                   // 07040580
+       VLSEG5E16V      (X10), V0, V8                   // 07540580
+       VLSEG5E32V      (X10), V0, V8                   // 07640580
+       VLSEG5E64V      (X10), V0, V8                   // 07740580
+
+       VLSEG6E8V       (X10), V8                       // 070405a2
+       VLSEG6E16V      (X10), V8                       // 075405a2
+       VLSEG6E32V      (X10), V8                       // 076405a2
+       VLSEG6E64V      (X10), V8                       // 077405a2
+       VLSEG6E8V       (X10), V0, V8                   // 070405a0
+       VLSEG6E16V      (X10), V0, V8                   // 075405a0
+       VLSEG6E32V      (X10), V0, V8                   // 076405a0
+       VLSEG6E64V      (X10), V0, V8                   // 077405a0
+
+       VLSEG7E8V       (X10), V8                       // 070405c2
+       VLSEG7E16V      (X10), V8                       // 075405c2
+       VLSEG7E32V      (X10), V8                       // 076405c2
+       VLSEG7E64V      (X10), V8                       // 077405c2
+       VLSEG7E8V       (X10), V0, V8                   // 070405c0
+       VLSEG7E16V      (X10), V0, V8                   // 075405c0
+       VLSEG7E32V      (X10), V0, V8                   // 076405c0
+       VLSEG7E64V      (X10), V0, V8                   // 077405c0
+
+       VLSEG8E8V       (X10), V8                       // 070405e2
+       VLSEG8E16V      (X10), V8                       // 075405e2
+       VLSEG8E32V      (X10), V8                       // 076405e2
+       VLSEG8E64V      (X10), V8                       // 077405e2
+       VLSEG8E8V       (X10), V0, V8                   // 070405e0
+       VLSEG8E16V      (X10), V0, V8                   // 075405e0
+       VLSEG8E32V      (X10), V0, V8                   // 076405e0
+       VLSEG8E64V      (X10), V0, V8                   // 077405e0
+
+       VSSEG2E8V       V24, (X10)                      // 270c0522
+       VSSEG2E16V      V24, (X10)                      // 275c0522
+       VSSEG2E32V      V24, (X10)                      // 276c0522
+       VSSEG2E64V      V24, (X10)                      // 277c0522
+       VSSEG2E8V       V24, V0, (X10)                  // 270c0520
+       VSSEG2E16V      V24, V0, (X10)                  // 275c0520
+       VSSEG2E32V      V24, V0, (X10)                  // 276c0520
+       VSSEG2E64V      V24, V0, (X10)                  // 277c0520
+
+       VSSEG3E8V       V24, (X10)                      // 270c0542
+       VSSEG3E16V      V24, (X10)                      // 275c0542
+       VSSEG3E32V      V24, (X10)                      // 276c0542
+       VSSEG3E64V      V24, (X10)                      // 277c0542
+       VSSEG3E8V       V24, V0, (X10)                  // 270c0540
+       VSSEG3E16V      V24, V0, (X10)                  // 275c0540
+       VSSEG3E32V      V24, V0, (X10)                  // 276c0540
+       VSSEG3E64V      V24, V0, (X10)                  // 277c0540
+
+       VSSEG4E8V       V24, (X10)                      // 270c0562
+       VSSEG4E16V      V24, (X10)                      // 275c0562
+       VSSEG4E32V      V24, (X10)                      // 276c0562
+       VSSEG4E64V      V24, (X10)                      // 277c0562
+       VSSEG4E8V       V24, V0, (X10)                  // 270c0560
+       VSSEG4E16V      V24, V0, (X10)                  // 275c0560
+       VSSEG4E32V      V24, V0, (X10)                  // 276c0560
+       VSSEG4E64V      V24, V0, (X10)                  // 277c0560
+
+       VSSEG5E8V       V24, (X10)                      // 270c0582
+       VSSEG5E16V      V24, (X10)                      // 275c0582
+       VSSEG5E32V      V24, (X10)                      // 276c0582
+       VSSEG5E64V      V24, (X10)                      // 277c0582
+       VSSEG5E8V       V24, V0, (X10)                  // 270c0580
+       VSSEG5E16V      V24, V0, (X10)                  // 275c0580
+       VSSEG5E32V      V24, V0, (X10)                  // 276c0580
+       VSSEG5E64V      V24, V0, (X10)                  // 277c0580
+
+       VSSEG6E8V       V24, (X10)                      // 270c05a2
+       VSSEG6E16V      V24, (X10)                      // 275c05a2
+       VSSEG6E32V      V24, (X10)                      // 276c05a2
+       VSSEG6E64V      V24, (X10)                      // 277c05a2
+       VSSEG6E8V       V24, V0, (X10)                  // 270c05a0
+       VSSEG6E16V      V24, V0, (X10)                  // 275c05a0
+       VSSEG6E32V      V24, V0, (X10)                  // 276c05a0
+       VSSEG6E64V      V24, V0, (X10)                  // 277c05a0
+
+       VSSEG7E8V       V24, (X10)                      // 270c05c2
+       VSSEG7E16V      V24, (X10)                      // 275c05c2
+       VSSEG7E32V      V24, (X10)                      // 276c05c2
+       VSSEG7E64V      V24, (X10)                      // 277c05c2
+       VSSEG7E8V       V24, V0, (X10)                  // 270c05c0
+       VSSEG7E16V      V24, V0, (X10)                  // 275c05c0
+       VSSEG7E32V      V24, V0, (X10)                  // 276c05c0
+       VSSEG7E64V      V24, V0, (X10)                  // 277c05c0
+
+       VSSEG8E8V       V24, (X10)                      // 270c05e2
+       VSSEG8E16V      V24, (X10)                      // 275c05e2
+       VSSEG8E32V      V24, (X10)                      // 276c05e2
+       VSSEG8E64V      V24, (X10)                      // 277c05e2
+       VSSEG8E8V       V24, V0, (X10)                  // 270c05e0
+       VSSEG8E16V      V24, V0, (X10)                  // 275c05e0
+       VSSEG8E32V      V24, V0, (X10)                  // 276c05e0
+       VSSEG8E64V      V24, V0, (X10)                  // 277c05e0
+
+       VLSEG2E8FFV     (X10), V8                       // 07040523
+       VLSEG2E16FFV    (X10), V8                       // 07540523
+       VLSEG2E32FFV    (X10), V8                       // 07640523
+       VLSEG2E64FFV    (X10), V8                       // 07740523
+       VLSEG2E8FFV     (X10), V0, V8                   // 07040521
+       VLSEG2E16FFV    (X10), V0, V8                   // 07540521
+       VLSEG2E32FFV    (X10), V0, V8                   // 07640521
+       VLSEG2E64FFV    (X10), V0, V8                   // 07740521
+
+       VLSEG3E8FFV     (X10), V8                       // 07040543
+       VLSEG3E16FFV    (X10), V8                       // 07540543
+       VLSEG3E32FFV    (X10), V8                       // 07640543
+       VLSEG3E64FFV    (X10), V8                       // 07740543
+       VLSEG3E8FFV     (X10), V0, V8                   // 07040541
+       VLSEG3E16FFV    (X10), V0, V8                   // 07540541
+       VLSEG3E32FFV    (X10), V0, V8                   // 07640541
+       VLSEG3E64FFV    (X10), V0, V8                   // 07740541
+
+       VLSEG4E8FFV     (X10), V8                       // 07040563
+       VLSEG4E16FFV    (X10), V8                       // 07540563
+       VLSEG4E32FFV    (X10), V8                       // 07640563
+       VLSEG4E64FFV    (X10), V8                       // 07740563
+       VLSEG4E8FFV     (X10), V0, V8                   // 07040561
+       VLSEG4E16FFV    (X10), V0, V8                   // 07540561
+       VLSEG4E32FFV    (X10), V0, V8                   // 07640561
+       VLSEG4E64FFV    (X10), V0, V8                   // 07740561
+
+       VLSEG5E8FFV     (X10), V8                       // 07040583
+       VLSEG5E16FFV    (X10), V8                       // 07540583
+       VLSEG5E32FFV    (X10), V8                       // 07640583
+       VLSEG5E64FFV    (X10), V8                       // 07740583
+       VLSEG5E8FFV     (X10), V0, V8                   // 07040581
+       VLSEG5E16FFV    (X10), V0, V8                   // 07540581
+       VLSEG5E32FFV    (X10), V0, V8                   // 07640581
+       VLSEG5E64FFV    (X10), V0, V8                   // 07740581
+
+       VLSEG6E8FFV     (X10), V8                       // 070405a3
+       VLSEG6E16FFV    (X10), V8                       // 075405a3
+       VLSEG6E32FFV    (X10), V8                       // 076405a3
+       VLSEG6E64FFV    (X10), V8                       // 077405a3
+       VLSEG6E8FFV     (X10), V0, V8                   // 070405a1
+       VLSEG6E16FFV    (X10), V0, V8                   // 075405a1
+       VLSEG6E32FFV    (X10), V0, V8                   // 076405a1
+       VLSEG6E64FFV    (X10), V0, V8                   // 077405a1
+
+       VLSEG7E8FFV     (X10), V8                       // 070405c3
+       VLSEG7E16FFV    (X10), V8                       // 075405c3
+       VLSEG7E32FFV    (X10), V8                       // 076405c3
+       VLSEG7E64FFV    (X10), V8                       // 077405c3
+       VLSEG7E8FFV     (X10), V0, V8                   // 070405c1
+       VLSEG7E16FFV    (X10), V0, V8                   // 075405c1
+       VLSEG7E32FFV    (X10), V0, V8                   // 076405c1
+       VLSEG7E64FFV    (X10), V0, V8                   // 077405c1
+
+       VLSEG8E8FFV     (X10), V8                       // 070405e3
+       VLSEG8E16FFV    (X10), V8                       // 075405e3
+       VLSEG8E32FFV    (X10), V8                       // 076405e3
+       VLSEG8E64FFV    (X10), V8                       // 077405e3
+       VLSEG8E8FFV     (X10), V0, V8                   // 070405e1
+       VLSEG8E16FFV    (X10), V0, V8                   // 075405e1
+       VLSEG8E32FFV    (X10), V0, V8                   // 076405e1
+       VLSEG8E64FFV    (X10), V0, V8                   // 077405e1
+
+       // 31.7.8.2: Vector Strided Segment Loads and Stores
+       VLSSEG2E8V      (X10), X11, V8                  // 0704b52a
+       VLSSEG2E16V     (X10), X11, V8                  // 0754b52a
+       VLSSEG2E32V     (X10), X11, V8                  // 0764b52a
+       VLSSEG2E64V     (X10), X11, V8                  // 0774b52a
+       VLSSEG2E8V      (X10), X11, V0, V8              // 0704b528
+       VLSSEG2E16V     (X10), X11, V0, V8              // 0754b528
+       VLSSEG2E32V     (X10), X11, V0, V8              // 0764b528
+       VLSSEG2E64V     (X10), X11, V0, V8              // 0774b528
+
+       VLSSEG3E8V      (X10), X11, V8                  // 0704b54a
+       VLSSEG3E16V     (X10), X11, V8                  // 0754b54a
+       VLSSEG3E32V     (X10), X11, V8                  // 0764b54a
+       VLSSEG3E64V     (X10), X11, V8                  // 0774b54a
+       VLSSEG3E8V      (X10), X11, V0, V8              // 0704b548
+       VLSSEG3E16V     (X10), X11, V0, V8              // 0754b548
+       VLSSEG3E32V     (X10), X11, V0, V8              // 0764b548
+       VLSSEG3E64V     (X10), X11, V0, V8              // 0774b548
+
+       VLSSEG4E8V      (X10), X11, V8                  // 0704b56a
+       VLSSEG4E16V     (X10), X11, V8                  // 0754b56a
+       VLSSEG4E32V     (X10), X11, V8                  // 0764b56a
+       VLSSEG4E64V     (X10), X11, V8                  // 0774b56a
+       VLSSEG4E8V      (X10), X11, V0, V8              // 0704b568
+       VLSSEG4E16V     (X10), X11, V0, V8              // 0754b568
+       VLSSEG4E32V     (X10), X11, V0, V8              // 0764b568
+       VLSSEG4E64V     (X10), X11, V0, V8              // 0774b568
+
+       VLSSEG5E8V      (X10), X11, V8                  // 0704b58a
+       VLSSEG5E16V     (X10), X11, V8                  // 0754b58a
+       VLSSEG5E32V     (X10), X11, V8                  // 0764b58a
+       VLSSEG5E64V     (X10), X11, V8                  // 0774b58a
+       VLSSEG5E8V      (X10), X11, V0, V8              // 0704b588
+       VLSSEG5E16V     (X10), X11, V0, V8              // 0754b588
+       VLSSEG5E32V     (X10), X11, V0, V8              // 0764b588
+       VLSSEG5E64V     (X10), X11, V0, V8              // 0774b588
+
+       VLSSEG6E8V      (X10), X11, V8                  // 0704b5aa
+       VLSSEG6E16V     (X10), X11, V8                  // 0754b5aa
+       VLSSEG6E32V     (X10), X11, V8                  // 0764b5aa
+       VLSSEG6E64V     (X10), X11, V8                  // 0774b5aa
+       VLSSEG6E8V      (X10), X11, V0, V8              // 0704b5a8
+       VLSSEG6E16V     (X10), X11, V0, V8              // 0754b5a8
+       VLSSEG6E32V     (X10), X11, V0, V8              // 0764b5a8
+       VLSSEG6E64V     (X10), X11, V0, V8              // 0774b5a8
+
+       VLSSEG7E8V      (X10), X11, V8                  // 0704b5ca
+       VLSSEG7E16V     (X10), X11, V8                  // 0754b5ca
+       VLSSEG7E32V     (X10), X11, V8                  // 0764b5ca
+       VLSSEG7E64V     (X10), X11, V8                  // 0774b5ca
+       VLSSEG7E8V      (X10), X11, V0, V8              // 0704b5c8
+       VLSSEG7E16V     (X10), X11, V0, V8              // 0754b5c8
+       VLSSEG7E32V     (X10), X11, V0, V8              // 0764b5c8
+       VLSSEG7E64V     (X10), X11, V0, V8              // 0774b5c8
+
+       VLSSEG8E8V      (X10), X11, V8                  // 0704b5ea
+       VLSSEG8E16V     (X10), X11, V8                  // 0754b5ea
+       VLSSEG8E32V     (X10), X11, V8                  // 0764b5ea
+       VLSSEG8E64V     (X10), X11, V8                  // 0774b5ea
+       VLSSEG8E8V      (X10), X11, V0, V8              // 0704b5e8
+       VLSSEG8E16V     (X10), X11, V0, V8              // 0754b5e8
+       VLSSEG8E32V     (X10), X11, V0, V8              // 0764b5e8
+       VLSSEG8E64V     (X10), X11, V0, V8              // 0774b5e8
+
+       VSSSEG2E8V      V24, X11, (X10)                 // 270cb52a
+       VSSSEG2E16V     V24, X11, (X10)                 // 275cb52a
+       VSSSEG2E32V     V24, X11, (X10)                 // 276cb52a
+       VSSSEG2E64V     V24, X11, (X10)                 // 277cb52a
+       VSSSEG2E8V      V24, X11, V0, (X10)             // 270cb528
+       VSSSEG2E16V     V24, X11, V0, (X10)             // 275cb528
+       VSSSEG2E32V     V24, X11, V0, (X10)             // 276cb528
+       VSSSEG2E64V     V24, X11, V0, (X10)             // 277cb528
+
+       VSSSEG3E8V      V24, X11, (X10)                 // 270cb54a
+       VSSSEG3E16V     V24, X11, (X10)                 // 275cb54a
+       VSSSEG3E32V     V24, X11, (X10)                 // 276cb54a
+       VSSSEG3E64V     V24, X11, (X10)                 // 277cb54a
+       VSSSEG3E8V      V24, X11, V0, (X10)             // 270cb548
+       VSSSEG3E16V     V24, X11, V0, (X10)             // 275cb548
+       VSSSEG3E32V     V24, X11, V0, (X10)             // 276cb548
+       VSSSEG3E64V     V24, X11, V0, (X10)             // 277cb548
+
+       VSSSEG4E8V      V24, X11, (X10)                 // 270cb56a
+       VSSSEG4E16V     V24, X11, (X10)                 // 275cb56a
+       VSSSEG4E32V     V24, X11, (X10)                 // 276cb56a
+       VSSSEG4E64V     V24, X11, (X10)                 // 277cb56a
+       VSSSEG4E8V      V24, X11, V0, (X10)             // 270cb568
+       VSSSEG4E16V     V24, X11, V0, (X10)             // 275cb568
+       VSSSEG4E32V     V24, X11, V0, (X10)             // 276cb568
+       VSSSEG4E64V     V24, X11, V0, (X10)             // 277cb568
+
+       VSSSEG5E8V      V24, X11, (X10)                 // 270cb58a
+       VSSSEG5E16V     V24, X11, (X10)                 // 275cb58a
+       VSSSEG5E32V     V24, X11, (X10)                 // 276cb58a
+       VSSSEG5E64V     V24, X11, (X10)                 // 277cb58a
+       VSSSEG5E8V      V24, X11, V0, (X10)             // 270cb588
+       VSSSEG5E16V     V24, X11, V0, (X10)             // 275cb588
+       VSSSEG5E32V     V24, X11, V0, (X10)             // 276cb588
+       VSSSEG5E64V     V24, X11, V0, (X10)             // 277cb588
+
+       VSSSEG6E8V      V24, X11, (X10)                 // 270cb5aa
+       VSSSEG6E16V     V24, X11, (X10)                 // 275cb5aa
+       VSSSEG6E32V     V24, X11, (X10)                 // 276cb5aa
+       VSSSEG6E64V     V24, X11, (X10)                 // 277cb5aa
+       VSSSEG6E8V      V24, X11, V0, (X10)             // 270cb5a8
+       VSSSEG6E16V     V24, X11, V0, (X10)             // 275cb5a8
+       VSSSEG6E32V     V24, X11, V0, (X10)             // 276cb5a8
+       VSSSEG6E64V     V24, X11, V0, (X10)             // 277cb5a8
+
+       VSSSEG7E8V      V24, X11, (X10)                 // 270cb5ca
+       VSSSEG7E16V     V24, X11, (X10)                 // 275cb5ca
+       VSSSEG7E32V     V24, X11, (X10)                 // 276cb5ca
+       VSSSEG7E64V     V24, X11, (X10)                 // 277cb5ca
+       VSSSEG7E8V      V24, X11, V0, (X10)             // 270cb5c8
+       VSSSEG7E16V     V24, X11, V0, (X10)             // 275cb5c8
+       VSSSEG7E32V     V24, X11, V0, (X10)             // 276cb5c8
+       VSSSEG7E64V     V24, X11, V0, (X10)             // 277cb5c8
+
+       VSSSEG8E8V      V24, X11, (X10)                 // 270cb5ea
+       VSSSEG8E16V     V24, X11, (X10)                 // 275cb5ea
+       VSSSEG8E32V     V24, X11, (X10)                 // 276cb5ea
+       VSSSEG8E64V     V24, X11, (X10)                 // 277cb5ea
+       VSSSEG8E8V      V24, X11, V0, (X10)             // 270cb5e8
+       VSSSEG8E16V     V24, X11, V0, (X10)             // 275cb5e8
+       VSSSEG8E32V     V24, X11, V0, (X10)             // 276cb5e8
+       VSSSEG8E64V     V24, X11, V0, (X10)             // 277cb5e8
+
+       // 31.7.8.3: Vector Indexed Segment Loads and Stores
+
+       VLUXSEG2EI8V    (X10), V4, V8                   // 07044526
+       VLUXSEG2EI16V   (X10), V4, V8                   // 07544526
+       VLUXSEG2EI32V   (X10), V4, V8                   // 07644526
+       VLUXSEG2EI64V   (X10), V4, V8                   // 07744526
+       VLUXSEG2EI8V    (X10), V4, V0, V8               // 07044524
+       VLUXSEG2EI16V   (X10), V4, V0, V8               // 07544524
+       VLUXSEG2EI32V   (X10), V4, V0, V8               // 07644524
+       VLUXSEG2EI64V   (X10), V4, V0, V8               // 07744524
+
+       VLUXSEG3EI8V    (X10), V4, V8                   // 07044546
+       VLUXSEG3EI16V   (X10), V4, V8                   // 07544546
+       VLUXSEG3EI32V   (X10), V4, V8                   // 07644546
+       VLUXSEG3EI64V   (X10), V4, V8                   // 07744546
+       VLUXSEG3EI8V    (X10), V4, V0, V8               // 07044544
+       VLUXSEG3EI16V   (X10), V4, V0, V8               // 07544544
+       VLUXSEG3EI32V   (X10), V4, V0, V8               // 07644544
+       VLUXSEG3EI64V   (X10), V4, V0, V8               // 07744544
+
+       VLUXSEG4EI8V    (X10), V4, V8                   // 07044566
+       VLUXSEG4EI16V   (X10), V4, V8                   // 07544566
+       VLUXSEG4EI32V   (X10), V4, V8                   // 07644566
+       VLUXSEG4EI64V   (X10), V4, V8                   // 07744566
+       VLUXSEG4EI8V    (X10), V4, V0, V8               // 07044564
+       VLUXSEG4EI16V   (X10), V4, V0, V8               // 07544564
+       VLUXSEG4EI32V   (X10), V4, V0, V8               // 07644564
+       VLUXSEG4EI64V   (X10), V4, V0, V8               // 07744564
+
+       VLUXSEG5EI8V    (X10), V4, V8                   // 07044586
+       VLUXSEG5EI16V   (X10), V4, V8                   // 07544586
+       VLUXSEG5EI32V   (X10), V4, V8                   // 07644586
+       VLUXSEG5EI64V   (X10), V4, V8                   // 07744586
+       VLUXSEG5EI8V    (X10), V4, V0, V8               // 07044584
+       VLUXSEG5EI16V   (X10), V4, V0, V8               // 07544584
+       VLUXSEG5EI32V   (X10), V4, V0, V8               // 07644584
+       VLUXSEG5EI64V   (X10), V4, V0, V8               // 07744584
+
+       VLUXSEG6EI8V    (X10), V4, V8                   // 070445a6
+       VLUXSEG6EI16V   (X10), V4, V8                   // 075445a6
+       VLUXSEG6EI32V   (X10), V4, V8                   // 076445a6
+       VLUXSEG6EI64V   (X10), V4, V8                   // 077445a6
+       VLUXSEG6EI8V    (X10), V4, V0, V8               // 070445a4
+       VLUXSEG6EI16V   (X10), V4, V0, V8               // 075445a4
+       VLUXSEG6EI32V   (X10), V4, V0, V8               // 076445a4
+       VLUXSEG6EI64V   (X10), V4, V0, V8               // 077445a4
+
+       VLOXSEG6EI8V    (X10), V4, V8                   // 070445ae
+       VLOXSEG6EI16V   (X10), V4, V8                   // 075445ae
+       VLOXSEG6EI32V   (X10), V4, V8                   // 076445ae
+       VLOXSEG6EI64V   (X10), V4, V8                   // 077445ae
+       VLOXSEG6EI8V    (X10), V4, V0, V8               // 070445ac
+       VLOXSEG6EI16V   (X10), V4, V0, V8               // 075445ac
+       VLOXSEG6EI32V   (X10), V4, V0, V8               // 076445ac
+       VLOXSEG6EI64V   (X10), V4, V0, V8               // 077445ac
+
+       VLUXSEG7EI8V    (X10), V4, V8                   // 070445c6
+       VLUXSEG7EI16V   (X10), V4, V8                   // 075445c6
+       VLUXSEG7EI32V   (X10), V4, V8                   // 076445c6
+       VLUXSEG7EI64V   (X10), V4, V8                   // 077445c6
+       VLUXSEG7EI8V    (X10), V4, V0, V8               // 070445c4
+       VLUXSEG7EI16V   (X10), V4, V0, V8               // 075445c4
+       VLUXSEG7EI32V   (X10), V4, V0, V8               // 076445c4
+       VLUXSEG7EI64V   (X10), V4, V0, V8               // 077445c4
+
+       VLUXSEG8EI8V    (X10), V4, V8                   // 070445e6
+       VLUXSEG8EI16V   (X10), V4, V8                   // 075445e6
+       VLUXSEG8EI32V   (X10), V4, V8                   // 076445e6
+       VLUXSEG8EI64V   (X10), V4, V8                   // 077445e6
+       VLUXSEG8EI8V    (X10), V4, V0, V8               // 070445e4
+       VLUXSEG8EI16V   (X10), V4, V0, V8               // 075445e4
+       VLUXSEG8EI32V   (X10), V4, V0, V8               // 076445e4
+       VLUXSEG8EI64V   (X10), V4, V0, V8               // 077445e4
+
+       VSUXSEG2EI8V    V24, V4, (X10)                  // 270c4526
+       VSUXSEG2EI16V   V24, V4, (X10)                  // 275c4526
+       VSUXSEG2EI32V   V24, V4, (X10)                  // 276c4526
+       VSUXSEG2EI64V   V24, V4, (X10)                  // 277c4526
+       VSUXSEG2EI8V    V24, V4, V0, (X10)              // 270c4524
+       VSUXSEG2EI16V   V24, V4, V0, (X10)              // 275c4524
+       VSUXSEG2EI32V   V24, V4, V0, (X10)              // 276c4524
+       VSUXSEG2EI64V   V24, V4, V0, (X10)              // 277c4524
+
+       VSUXSEG3EI8V    V24, V4, (X10)                  // 270c4546
+       VSUXSEG3EI16V   V24, V4, (X10)                  // 275c4546
+       VSUXSEG3EI32V   V24, V4, (X10)                  // 276c4546
+       VSUXSEG3EI64V   V24, V4, (X10)                  // 277c4546
+       VSUXSEG3EI8V    V24, V4, V0, (X10)              // 270c4544
+       VSUXSEG3EI16V   V24, V4, V0, (X10)              // 275c4544
+       VSUXSEG3EI32V   V24, V4, V0, (X10)              // 276c4544
+       VSUXSEG3EI64V   V24, V4, V0, (X10)              // 277c4544
+
+       VSUXSEG4EI8V    V24, V4, (X10)                  // 270c4566
+       VSUXSEG4EI16V   V24, V4, (X10)                  // 275c4566
+       VSUXSEG4EI32V   V24, V4, (X10)                  // 276c4566
+       VSUXSEG4EI64V   V24, V4, (X10)                  // 277c4566
+       VSUXSEG4EI8V    V24, V4, V0, (X10)              // 270c4564
+       VSUXSEG4EI16V   V24, V4, V0, (X10)              // 275c4564
+       VSUXSEG4EI32V   V24, V4, V0, (X10)              // 276c4564
+       VSUXSEG4EI64V   V24, V4, V0, (X10)              // 277c4564
+
+       VSUXSEG5EI8V    V24, V4, (X10)                  // 270c4586
+       VSUXSEG5EI16V   V24, V4, (X10)                  // 275c4586
+       VSUXSEG5EI32V   V24, V4, (X10)                  // 276c4586
+       VSUXSEG5EI64V   V24, V4, (X10)                  // 277c4586
+       VSUXSEG5EI8V    V24, V4, V0, (X10)              // 270c4584
+       VSUXSEG5EI16V   V24, V4, V0, (X10)              // 275c4584
+       VSUXSEG5EI32V   V24, V4, V0, (X10)              // 276c4584
+       VSUXSEG5EI64V   V24, V4, V0, (X10)              // 277c4584
+
+       VSUXSEG6EI8V    V24, V4, (X10)                  // 270c45a6
+       VSUXSEG6EI16V   V24, V4, (X10)                  // 275c45a6
+       VSUXSEG6EI32V   V24, V4, (X10)                  // 276c45a6
+       VSUXSEG6EI64V   V24, V4, (X10)                  // 277c45a6
+       VSUXSEG6EI8V    V24, V4, V0, (X10)              // 270c45a4
+       VSUXSEG6EI16V   V24, V4, V0, (X10)              // 275c45a4
+       VSUXSEG6EI32V   V24, V4, V0, (X10)              // 276c45a4
+       VSUXSEG6EI64V   V24, V4, V0, (X10)              // 277c45a4
+
+       VSUXSEG7EI8V    V24, V4, (X10)                  // 270c45c6
+       VSUXSEG7EI16V   V24, V4, (X10)                  // 275c45c6
+       VSUXSEG7EI32V   V24, V4, (X10)                  // 276c45c6
+       VSUXSEG7EI64V   V24, V4, (X10)                  // 277c45c6
+       VSUXSEG7EI8V    V24, V4, V0, (X10)              // 270c45c4
+       VSUXSEG7EI16V   V24, V4, V0, (X10)              // 275c45c4
+       VSUXSEG7EI32V   V24, V4, V0, (X10)              // 276c45c4
+       VSUXSEG7EI64V   V24, V4, V0, (X10)              // 277c45c4
+
+       VSUXSEG8EI8V    V24, V4, (X10)                  // 270c45e6
+       VSUXSEG8EI16V   V24, V4, (X10)                  // 275c45e6
+       VSUXSEG8EI32V   V24, V4, (X10)                  // 276c45e6
+       VSUXSEG8EI64V   V24, V4, (X10)                  // 277c45e6
+       VSUXSEG8EI8V    V24, V4, V0, (X10)              // 270c45e4
+       VSUXSEG8EI16V   V24, V4, V0, (X10)              // 275c45e4
+       VSUXSEG8EI32V   V24, V4, V0, (X10)              // 276c45e4
+       VSUXSEG8EI64V   V24, V4, V0, (X10)              // 277c45e4
+
+       VLOXSEG2EI8V    (X10), V4, V8                   // 0704452e
+       VLOXSEG2EI16V   (X10), V4, V8                   // 0754452e
+       VLOXSEG2EI32V   (X10), V4, V8                   // 0764452e
+       VLOXSEG2EI64V   (X10), V4, V8                   // 0774452e
+       VLOXSEG2EI8V    (X10), V4, V0, V8               // 0704452c
+       VLOXSEG2EI16V   (X10), V4, V0, V8               // 0754452c
+       VLOXSEG2EI32V   (X10), V4, V0, V8               // 0764452c
+       VLOXSEG2EI64V   (X10), V4, V0, V8               // 0774452c
+
+       VLOXSEG3EI8V    (X10), V4, V8                   // 0704454e
+       VLOXSEG3EI16V   (X10), V4, V8                   // 0754454e
+       VLOXSEG3EI32V   (X10), V4, V8                   // 0764454e
+       VLOXSEG3EI64V   (X10), V4, V8                   // 0774454e
+       VLOXSEG3EI8V    (X10), V4, V0, V8               // 0704454c
+       VLOXSEG3EI16V   (X10), V4, V0, V8               // 0754454c
+       VLOXSEG3EI32V   (X10), V4, V0, V8               // 0764454c
+       VLOXSEG3EI64V   (X10), V4, V0, V8               // 0774454c
+       VLOXSEG4EI8V    (X10), V4, V8                   // 0704456e
+       VLOXSEG4EI16V   (X10), V4, V8                   // 0754456e
+       VLOXSEG4EI32V   (X10), V4, V8                   // 0764456e
+       VLOXSEG4EI64V   (X10), V4, V8                   // 0774456e
+       VLOXSEG4EI8V    (X10), V4, V0, V8               // 0704456c
+       VLOXSEG4EI16V   (X10), V4, V0, V8               // 0754456c
+       VLOXSEG4EI32V   (X10), V4, V0, V8               // 0764456c
+       VLOXSEG4EI64V   (X10), V4, V0, V8               // 0774456c
+
+       VLOXSEG5EI8V    (X10), V4, V8                   // 0704458e
+       VLOXSEG5EI16V   (X10), V4, V8                   // 0754458e
+       VLOXSEG5EI32V   (X10), V4, V8                   // 0764458e
+       VLOXSEG5EI64V   (X10), V4, V8                   // 0774458e
+       VLOXSEG5EI8V    (X10), V4, V0, V8               // 0704458c
+       VLOXSEG5EI16V   (X10), V4, V0, V8               // 0754458c
+       VLOXSEG5EI32V   (X10), V4, V0, V8               // 0764458c
+       VLOXSEG5EI64V   (X10), V4, V0, V8               // 0774458c
+
+       VLOXSEG7EI8V    (X10), V4, V8                   // 070445ce
+       VLOXSEG7EI16V   (X10), V4, V8                   // 075445ce
+       VLOXSEG7EI32V   (X10), V4, V8                   // 076445ce
+       VLOXSEG7EI64V   (X10), V4, V8                   // 077445ce
+       VLOXSEG7EI8V    (X10), V4, V0, V8               // 070445cc
+       VLOXSEG7EI16V   (X10), V4, V0, V8               // 075445cc
+       VLOXSEG7EI32V   (X10), V4, V0, V8               // 076445cc
+       VLOXSEG7EI64V   (X10), V4, V0, V8               // 077445cc
+
+       VLOXSEG8EI8V    (X10), V4, V8                   // 070445ee
+       VLOXSEG8EI16V   (X10), V4, V8                   // 075445ee
+       VLOXSEG8EI32V   (X10), V4, V8                   // 076445ee
+       VLOXSEG8EI64V   (X10), V4, V8                   // 077445ee
+       VLOXSEG8EI8V    (X10), V4, V0, V8               // 070445ec
+       VLOXSEG8EI16V   (X10), V4, V0, V8               // 075445ec
+       VLOXSEG8EI32V   (X10), V4, V0, V8               // 076445ec
+       VLOXSEG8EI64V   (X10), V4, V0, V8               // 077445ec
+
+       VSOXSEG2EI8V    V24, V4, (X10)                  // 270c452e
+       VSOXSEG2EI16V   V24, V4, (X10)                  // 275c452e
+       VSOXSEG2EI32V   V24, V4, (X10)                  // 276c452e
+       VSOXSEG2EI64V   V24, V4, (X10)                  // 277c452e
+       VSOXSEG2EI8V    V24, V4, V0, (X10)              // 270c452c
+       VSOXSEG2EI16V   V24, V4, V0, (X10)              // 275c452c
+       VSOXSEG2EI32V   V24, V4, V0, (X10)              // 276c452c
+       VSOXSEG2EI64V   V24, V4, V0, (X10)              // 277c452c
+
+       VSOXSEG3EI8V    V24, V4, (X10)                  // 270c454e
+       VSOXSEG3EI16V   V24, V4, (X10)                  // 275c454e
+       VSOXSEG3EI32V   V24, V4, (X10)                  // 276c454e
+       VSOXSEG3EI64V   V24, V4, (X10)                  // 277c454e
+       VSOXSEG3EI8V    V24, V4, V0, (X10)              // 270c454c
+       VSOXSEG3EI16V   V24, V4, V0, (X10)              // 275c454c
+       VSOXSEG3EI32V   V24, V4, V0, (X10)              // 276c454c
+       VSOXSEG3EI64V   V24, V4, V0, (X10)              // 277c454c
+
+       VSOXSEG4EI8V    V24, V4, (X10)                  // 270c456e
+       VSOXSEG4EI16V   V24, V4, (X10)                  // 275c456e
+       VSOXSEG4EI32V   V24, V4, (X10)                  // 276c456e
+       VSOXSEG4EI64V   V24, V4, (X10)                  // 277c456e
+       VSOXSEG4EI8V    V24, V4, V0, (X10)              // 270c456c
+       VSOXSEG4EI16V   V24, V4, V0, (X10)              // 275c456c
+       VSOXSEG4EI32V   V24, V4, V0, (X10)              // 276c456c
+       VSOXSEG4EI64V   V24, V4, V0, (X10)              // 277c456c
+
+       VSOXSEG5EI8V    V24, V4, (X10)                  // 270c458e
+       VSOXSEG5EI16V   V24, V4, (X10)                  // 275c458e
+       VSOXSEG5EI32V   V24, V4, (X10)                  // 276c458e
+       VSOXSEG5EI64V   V24, V4, (X10)                  // 277c458e
+       VSOXSEG5EI8V    V24, V4, V0, (X10)              // 270c458c
+       VSOXSEG5EI16V   V24, V4, V0, (X10)              // 275c458c
+       VSOXSEG5EI32V   V24, V4, V0, (X10)              // 276c458c
+       VSOXSEG5EI64V   V24, V4, V0, (X10)              // 277c458c
+
+       VSOXSEG6EI8V    V24, V4, (X10)                  // 270c45ae
+       VSOXSEG6EI16V   V24, V4, (X10)                  // 275c45ae
+       VSOXSEG6EI32V   V24, V4, (X10)                  // 276c45ae
+       VSOXSEG6EI64V   V24, V4, (X10)                  // 277c45ae
+       VSOXSEG6EI8V    V24, V4, V0, (X10)              // 270c45ac
+       VSOXSEG6EI16V   V24, V4, V0, (X10)              // 275c45ac
+       VSOXSEG6EI32V   V24, V4, V0, (X10)              // 276c45ac
+       VSOXSEG6EI64V   V24, V4, V0, (X10)              // 277c45ac
+
+       VSOXSEG7EI8V    V24, V4, (X10)                  // 270c45ce
+       VSOXSEG7EI16V   V24, V4, (X10)                  // 275c45ce
+       VSOXSEG7EI32V   V24, V4, (X10)                  // 276c45ce
+       VSOXSEG7EI64V   V24, V4, (X10)                  // 277c45ce
+       VSOXSEG7EI8V    V24, V4, V0, (X10)              // 270c45cc
+       VSOXSEG7EI16V   V24, V4, V0, (X10)              // 275c45cc
+       VSOXSEG7EI32V   V24, V4, V0, (X10)              // 276c45cc
+       VSOXSEG7EI64V   V24, V4, V0, (X10)              // 277c45cc
+
+       VSOXSEG8EI8V    V24, V4, (X10)                  // 270c45ee
+       VSOXSEG8EI16V   V24, V4, (X10)                  // 275c45ee
+       VSOXSEG8EI32V   V24, V4, (X10)                  // 276c45ee
+       VSOXSEG8EI64V   V24, V4, (X10)                  // 277c45ee
+       VSOXSEG8EI8V    V24, V4, V0, (X10)              // 270c45ec
+       VSOXSEG8EI16V   V24, V4, V0, (X10)              // 275c45ec
+       VSOXSEG8EI32V   V24, V4, V0, (X10)              // 276c45ec
+       VSOXSEG8EI64V   V24, V4, V0, (X10)              // 277c45ec
+
        // 31.7.9: Vector Load/Store Whole Register Instructions
        VL1RV           (X10), V3                       // 87018502
        VL1RE8V         (X10), V3                       // 87018502
index 434f221fd9871fd7f5ffda96668b4faad633d72a..113b4ad2d6e0499957ac691e2bbf01f74c442bf5 100644 (file)
@@ -80,6 +80,15 @@ TEXT errors(SB),$0
        VSUXEI8V        V3, V2, V1, (X10)               // ERROR "invalid vector mask register"
        VLOXEI8V        (X10), V2, V1, V3               // ERROR "invalid vector mask register"
        VSOXEI8V        V3, V2, V1, (X10)               // ERROR "invalid vector mask register"
+       VLSEG2E8V       (X10), V1, V3                   // ERROR "invalid vector mask register"
+       VLSEG2E8FFV     (X10), V1, V3                   // ERROR "invalid vector mask register"
+       VSSEG2E8V       V3, V1, (X10)                   // ERROR "invalid vector mask register"
+       VLSSEG2E8V      (X10), X10, V1, V3              // ERROR "invalid vector mask register"
+       VSSSEG2E8V      V3, X11, V1, (X10)              // ERROR "invalid vector mask register"
+       VLUXSEG2EI8V    (X10), V2, V1, V3               // ERROR "invalid vector mask register"
+       VSUXSEG2EI8V    V3, V2, V1, (X10)               // ERROR "invalid vector mask register"
+       VLOXSEG2EI8V    (X10), V2, V1, V3               // ERROR "invalid vector mask register"
+       VSOXSEG2EI8V    V3, V2, V1, (X10)               // ERROR "invalid vector mask register"
        VL1RV           (X10), V0, V3                   // ERROR "too many operands for instruction"
        VS1RV           V3, V0, (X11)                   // ERROR "too many operands for instruction"
        VADDVV          V1, V2, V4, V3                  // ERROR "invalid vector mask register"
index 55bf518e68bba5921aaacbc017d55c529462b5fc..eac1a992c3b4a8a7b3d7da67df48f60fb8a87a8f 100644 (file)
@@ -43,6 +43,33 @@ TEXT validation(SB),$0
        VSOXEI8V        X10, V2, (X10)                  // ERROR "expected vector register in vd position"
        VSOXEI8V        V3, V2, (V1)                    // ERROR "expected integer register in rs1 position"
        VSOXEI8V        V3, X11, V0, (X10)              // ERROR "expected vector register in vs2 position"
+       VLSEG2E8V       (X10), X10                      // ERROR "expected vector register in vd position"
+       VLSEG2E8V       (V1), V3                        // ERROR "expected integer register in rs1 position"
+       VLSEG2E8FFV     (X10), X10                      // ERROR "expected vector register in vd position"
+       VLSEG2E8FFV     (V1), V3                        // ERROR "expected integer register in rs1 position"
+       VSSEG2E8V       X10, (X10)                      // ERROR "expected vector register in vs1 position"
+       VSSEG2E8V       V3, (V1)                        // ERROR "expected integer register in rd position"
+       VLSSEG2E8V      (X10), V3                       // ERROR "expected integer register in rs2 position"
+       VLSSEG2E8V      (X10), X10, X11                 // ERROR "expected vector register in vd position"
+       VLSSEG2E8V      (V1), X10, V3                   // ERROR "expected integer register in rs1 position"
+       VLSSEG2E8V      (X10), V1, V0, V3               // ERROR "expected integer register in rs2 position"
+       VSSSEG2E8V      V3, (X10)                       // ERROR "expected integer register in rs2 position"
+       VSSSEG2E8V      X10, X11, (X10)                 // ERROR "expected vector register in vd position"
+       VSSSEG2E8V      V3, X11, (V1)                   // ERROR "expected integer register in rs1 position"
+       VSSSEG2E8V      V3, V1, V0, (X10)               // ERROR "expected integer register in rs2 position"
+       VLUXSEG2EI8V    (X10), V2, X11                  // ERROR "expected vector register in vd position"
+       VLUXSEG2EI8V    (X10), V2, X11                  // ERROR "expected vector register in vd position"
+       VLUXSEG2EI8V    (V1), V2, V3                    // ERROR "expected integer register in rs1 position"
+       VLUXSEG2EI8V    (X10), X11, V0, V3              // ERROR "expected vector register in vs2 position"
+       VSUXSEG2EI8V    X10, V2, (X10)                  // ERROR "expected vector register in vd position"
+       VSUXSEG2EI8V    V3, V2, (V1)                    // ERROR "expected integer register in rs1 position"
+       VSUXSEG2EI8V    V3, X11, V0, (X10)              // ERROR "expected vector register in vs2 position"
+       VLOXSEG2EI8V    (X10), V2, X11                  // ERROR "expected vector register in vd position"
+       VLOXSEG2EI8V    (V1), V2, V3                    // ERROR "expected integer register in rs1 position"
+       VLOXSEG2EI8V    (X10), X11, V0, V3              // ERROR "expected vector register in vs2 position"
+       VSOXSEG2EI8V    X10, V2, (X10)                  // ERROR "expected vector register in vd position"
+       VSOXSEG2EI8V    V3, V2, (V1)                    // ERROR "expected integer register in rs1 position"
+       VSOXSEG2EI8V    V3, X11, V0, (X10)              // ERROR "expected vector register in vs2 position"
        VL1RV           (X10), X10                      // ERROR "expected vector register in vd position"
        VL1RV           (V1), V3                        // ERROR "expected integer register in rs1 position"
        VS1RV           X11, (X11)                      // ERROR "expected vector register in vs1 position"
index f0be8f6b875060b182328120c2f3245a5896e944..88ac746573b0b8e911c1debe63ea04fc19fc6001 100644 (file)
@@ -314,6 +314,258 @@ var Anames = []string{
        "VLE16FFV",
        "VLE32FFV",
        "VLE64FFV",
+       "VLSEG2E8V",
+       "VLSEG3E8V",
+       "VLSEG4E8V",
+       "VLSEG5E8V",
+       "VLSEG6E8V",
+       "VLSEG7E8V",
+       "VLSEG8E8V",
+       "VLSEG2E16V",
+       "VLSEG3E16V",
+       "VLSEG4E16V",
+       "VLSEG5E16V",
+       "VLSEG6E16V",
+       "VLSEG7E16V",
+       "VLSEG8E16V",
+       "VLSEG2E32V",
+       "VLSEG3E32V",
+       "VLSEG4E32V",
+       "VLSEG5E32V",
+       "VLSEG6E32V",
+       "VLSEG7E32V",
+       "VLSEG8E32V",
+       "VLSEG2E64V",
+       "VLSEG3E64V",
+       "VLSEG4E64V",
+       "VLSEG5E64V",
+       "VLSEG6E64V",
+       "VLSEG7E64V",
+       "VLSEG8E64V",
+       "VSSEG2E8V",
+       "VSSEG3E8V",
+       "VSSEG4E8V",
+       "VSSEG5E8V",
+       "VSSEG6E8V",
+       "VSSEG7E8V",
+       "VSSEG8E8V",
+       "VSSEG2E16V",
+       "VSSEG3E16V",
+       "VSSEG4E16V",
+       "VSSEG5E16V",
+       "VSSEG6E16V",
+       "VSSEG7E16V",
+       "VSSEG8E16V",
+       "VSSEG2E32V",
+       "VSSEG3E32V",
+       "VSSEG4E32V",
+       "VSSEG5E32V",
+       "VSSEG6E32V",
+       "VSSEG7E32V",
+       "VSSEG8E32V",
+       "VSSEG2E64V",
+       "VSSEG3E64V",
+       "VSSEG4E64V",
+       "VSSEG5E64V",
+       "VSSEG6E64V",
+       "VSSEG7E64V",
+       "VSSEG8E64V",
+       "VLSEG2E8FFV",
+       "VLSEG3E8FFV",
+       "VLSEG4E8FFV",
+       "VLSEG5E8FFV",
+       "VLSEG6E8FFV",
+       "VLSEG7E8FFV",
+       "VLSEG8E8FFV",
+       "VLSEG2E16FFV",
+       "VLSEG3E16FFV",
+       "VLSEG4E16FFV",
+       "VLSEG5E16FFV",
+       "VLSEG6E16FFV",
+       "VLSEG7E16FFV",
+       "VLSEG8E16FFV",
+       "VLSEG2E32FFV",
+       "VLSEG3E32FFV",
+       "VLSEG4E32FFV",
+       "VLSEG5E32FFV",
+       "VLSEG6E32FFV",
+       "VLSEG7E32FFV",
+       "VLSEG8E32FFV",
+       "VLSEG2E64FFV",
+       "VLSEG3E64FFV",
+       "VLSEG4E64FFV",
+       "VLSEG5E64FFV",
+       "VLSEG6E64FFV",
+       "VLSEG7E64FFV",
+       "VLSEG8E64FFV",
+       "VLSSEG2E8V",
+       "VLSSEG3E8V",
+       "VLSSEG4E8V",
+       "VLSSEG5E8V",
+       "VLSSEG6E8V",
+       "VLSSEG7E8V",
+       "VLSSEG8E8V",
+       "VLSSEG2E16V",
+       "VLSSEG3E16V",
+       "VLSSEG4E16V",
+       "VLSSEG5E16V",
+       "VLSSEG6E16V",
+       "VLSSEG7E16V",
+       "VLSSEG8E16V",
+       "VLSSEG2E32V",
+       "VLSSEG3E32V",
+       "VLSSEG4E32V",
+       "VLSSEG5E32V",
+       "VLSSEG6E32V",
+       "VLSSEG7E32V",
+       "VLSSEG8E32V",
+       "VLSSEG2E64V",
+       "VLSSEG3E64V",
+       "VLSSEG4E64V",
+       "VLSSEG5E64V",
+       "VLSSEG6E64V",
+       "VLSSEG7E64V",
+       "VLSSEG8E64V",
+       "VSSSEG2E8V",
+       "VSSSEG3E8V",
+       "VSSSEG4E8V",
+       "VSSSEG5E8V",
+       "VSSSEG6E8V",
+       "VSSSEG7E8V",
+       "VSSSEG8E8V",
+       "VSSSEG2E16V",
+       "VSSSEG3E16V",
+       "VSSSEG4E16V",
+       "VSSSEG5E16V",
+       "VSSSEG6E16V",
+       "VSSSEG7E16V",
+       "VSSSEG8E16V",
+       "VSSSEG2E32V",
+       "VSSSEG3E32V",
+       "VSSSEG4E32V",
+       "VSSSEG5E32V",
+       "VSSSEG6E32V",
+       "VSSSEG7E32V",
+       "VSSSEG8E32V",
+       "VSSSEG2E64V",
+       "VSSSEG3E64V",
+       "VSSSEG4E64V",
+       "VSSSEG5E64V",
+       "VSSSEG6E64V",
+       "VSSSEG7E64V",
+       "VSSSEG8E64V",
+       "VLOXSEG2EI8V",
+       "VLOXSEG3EI8V",
+       "VLOXSEG4EI8V",
+       "VLOXSEG5EI8V",
+       "VLOXSEG6EI8V",
+       "VLOXSEG7EI8V",
+       "VLOXSEG8EI8V",
+       "VLOXSEG2EI16V",
+       "VLOXSEG3EI16V",
+       "VLOXSEG4EI16V",
+       "VLOXSEG5EI16V",
+       "VLOXSEG6EI16V",
+       "VLOXSEG7EI16V",
+       "VLOXSEG8EI16V",
+       "VLOXSEG2EI32V",
+       "VLOXSEG3EI32V",
+       "VLOXSEG4EI32V",
+       "VLOXSEG5EI32V",
+       "VLOXSEG6EI32V",
+       "VLOXSEG7EI32V",
+       "VLOXSEG8EI32V",
+       "VLOXSEG2EI64V",
+       "VLOXSEG3EI64V",
+       "VLOXSEG4EI64V",
+       "VLOXSEG5EI64V",
+       "VLOXSEG6EI64V",
+       "VLOXSEG7EI64V",
+       "VLOXSEG8EI64V",
+       "VSOXSEG2EI8V",
+       "VSOXSEG3EI8V",
+       "VSOXSEG4EI8V",
+       "VSOXSEG5EI8V",
+       "VSOXSEG6EI8V",
+       "VSOXSEG7EI8V",
+       "VSOXSEG8EI8V",
+       "VSOXSEG2EI16V",
+       "VSOXSEG3EI16V",
+       "VSOXSEG4EI16V",
+       "VSOXSEG5EI16V",
+       "VSOXSEG6EI16V",
+       "VSOXSEG7EI16V",
+       "VSOXSEG8EI16V",
+       "VSOXSEG2EI32V",
+       "VSOXSEG3EI32V",
+       "VSOXSEG4EI32V",
+       "VSOXSEG5EI32V",
+       "VSOXSEG6EI32V",
+       "VSOXSEG7EI32V",
+       "VSOXSEG8EI32V",
+       "VSOXSEG2EI64V",
+       "VSOXSEG3EI64V",
+       "VSOXSEG4EI64V",
+       "VSOXSEG5EI64V",
+       "VSOXSEG6EI64V",
+       "VSOXSEG7EI64V",
+       "VSOXSEG8EI64V",
+       "VLUXSEG2EI8V",
+       "VLUXSEG3EI8V",
+       "VLUXSEG4EI8V",
+       "VLUXSEG5EI8V",
+       "VLUXSEG6EI8V",
+       "VLUXSEG7EI8V",
+       "VLUXSEG8EI8V",
+       "VLUXSEG2EI16V",
+       "VLUXSEG3EI16V",
+       "VLUXSEG4EI16V",
+       "VLUXSEG5EI16V",
+       "VLUXSEG6EI16V",
+       "VLUXSEG7EI16V",
+       "VLUXSEG8EI16V",
+       "VLUXSEG2EI32V",
+       "VLUXSEG3EI32V",
+       "VLUXSEG4EI32V",
+       "VLUXSEG5EI32V",
+       "VLUXSEG6EI32V",
+       "VLUXSEG7EI32V",
+       "VLUXSEG8EI32V",
+       "VLUXSEG2EI64V",
+       "VLUXSEG3EI64V",
+       "VLUXSEG4EI64V",
+       "VLUXSEG5EI64V",
+       "VLUXSEG6EI64V",
+       "VLUXSEG7EI64V",
+       "VLUXSEG8EI64V",
+       "VSUXSEG2EI8V",
+       "VSUXSEG3EI8V",
+       "VSUXSEG4EI8V",
+       "VSUXSEG5EI8V",
+       "VSUXSEG6EI8V",
+       "VSUXSEG7EI8V",
+       "VSUXSEG8EI8V",
+       "VSUXSEG2EI16V",
+       "VSUXSEG3EI16V",
+       "VSUXSEG4EI16V",
+       "VSUXSEG5EI16V",
+       "VSUXSEG6EI16V",
+       "VSUXSEG7EI16V",
+       "VSUXSEG8EI16V",
+       "VSUXSEG2EI32V",
+       "VSUXSEG3EI32V",
+       "VSUXSEG4EI32V",
+       "VSUXSEG5EI32V",
+       "VSUXSEG6EI32V",
+       "VSUXSEG7EI32V",
+       "VSUXSEG8EI32V",
+       "VSUXSEG2EI64V",
+       "VSUXSEG3EI64V",
+       "VSUXSEG4EI64V",
+       "VSUXSEG5EI64V",
+       "VSUXSEG6EI64V",
+       "VSUXSEG7EI64V",
+       "VSUXSEG8EI64V",
        "VL1RE8V",
        "VL1RE16V",
        "VL1RE32V",
index b0fcda218c715c20edf69f8b9bf7feeddb13e64e..e265e0448230c759c917756788fa0d33cb95ca1d 100644 (file)
@@ -744,6 +744,272 @@ const (
        AVLE32FFV
        AVLE64FFV
 
+       // 31.7.8. Vector Load/Store Segment Instructions
+
+       // 31.7.8.1. Vector Unit-Stride Segment Loads and Stores
+       AVLSEG2E8V
+       AVLSEG3E8V
+       AVLSEG4E8V
+       AVLSEG5E8V
+       AVLSEG6E8V
+       AVLSEG7E8V
+       AVLSEG8E8V
+       AVLSEG2E16V
+       AVLSEG3E16V
+       AVLSEG4E16V
+       AVLSEG5E16V
+       AVLSEG6E16V
+       AVLSEG7E16V
+       AVLSEG8E16V
+       AVLSEG2E32V
+       AVLSEG3E32V
+       AVLSEG4E32V
+       AVLSEG5E32V
+       AVLSEG6E32V
+       AVLSEG7E32V
+       AVLSEG8E32V
+       AVLSEG2E64V
+       AVLSEG3E64V
+       AVLSEG4E64V
+       AVLSEG5E64V
+       AVLSEG6E64V
+       AVLSEG7E64V
+       AVLSEG8E64V
+
+       AVSSEG2E8V
+       AVSSEG3E8V
+       AVSSEG4E8V
+       AVSSEG5E8V
+       AVSSEG6E8V
+       AVSSEG7E8V
+       AVSSEG8E8V
+       AVSSEG2E16V
+       AVSSEG3E16V
+       AVSSEG4E16V
+       AVSSEG5E16V
+       AVSSEG6E16V
+       AVSSEG7E16V
+       AVSSEG8E16V
+       AVSSEG2E32V
+       AVSSEG3E32V
+       AVSSEG4E32V
+       AVSSEG5E32V
+       AVSSEG6E32V
+       AVSSEG7E32V
+       AVSSEG8E32V
+       AVSSEG2E64V
+       AVSSEG3E64V
+       AVSSEG4E64V
+       AVSSEG5E64V
+       AVSSEG6E64V
+       AVSSEG7E64V
+       AVSSEG8E64V
+
+       AVLSEG2E8FFV
+       AVLSEG3E8FFV
+       AVLSEG4E8FFV
+       AVLSEG5E8FFV
+       AVLSEG6E8FFV
+       AVLSEG7E8FFV
+       AVLSEG8E8FFV
+       AVLSEG2E16FFV
+       AVLSEG3E16FFV
+       AVLSEG4E16FFV
+       AVLSEG5E16FFV
+       AVLSEG6E16FFV
+       AVLSEG7E16FFV
+       AVLSEG8E16FFV
+       AVLSEG2E32FFV
+       AVLSEG3E32FFV
+       AVLSEG4E32FFV
+       AVLSEG5E32FFV
+       AVLSEG6E32FFV
+       AVLSEG7E32FFV
+       AVLSEG8E32FFV
+       AVLSEG2E64FFV
+       AVLSEG3E64FFV
+       AVLSEG4E64FFV
+       AVLSEG5E64FFV
+       AVLSEG6E64FFV
+       AVLSEG7E64FFV
+       AVLSEG8E64FFV
+
+       // 31.7.8.2. Vector Strided Segment Loads and Stores
+       AVLSSEG2E8V
+       AVLSSEG3E8V
+       AVLSSEG4E8V
+       AVLSSEG5E8V
+       AVLSSEG6E8V
+       AVLSSEG7E8V
+       AVLSSEG8E8V
+       AVLSSEG2E16V
+       AVLSSEG3E16V
+       AVLSSEG4E16V
+       AVLSSEG5E16V
+       AVLSSEG6E16V
+       AVLSSEG7E16V
+       AVLSSEG8E16V
+       AVLSSEG2E32V
+       AVLSSEG3E32V
+       AVLSSEG4E32V
+       AVLSSEG5E32V
+       AVLSSEG6E32V
+       AVLSSEG7E32V
+       AVLSSEG8E32V
+       AVLSSEG2E64V
+       AVLSSEG3E64V
+       AVLSSEG4E64V
+       AVLSSEG5E64V
+       AVLSSEG6E64V
+       AVLSSEG7E64V
+       AVLSSEG8E64V
+
+       AVSSSEG2E8V
+       AVSSSEG3E8V
+       AVSSSEG4E8V
+       AVSSSEG5E8V
+       AVSSSEG6E8V
+       AVSSSEG7E8V
+       AVSSSEG8E8V
+       AVSSSEG2E16V
+       AVSSSEG3E16V
+       AVSSSEG4E16V
+       AVSSSEG5E16V
+       AVSSSEG6E16V
+       AVSSSEG7E16V
+       AVSSSEG8E16V
+       AVSSSEG2E32V
+       AVSSSEG3E32V
+       AVSSSEG4E32V
+       AVSSSEG5E32V
+       AVSSSEG6E32V
+       AVSSSEG7E32V
+       AVSSSEG8E32V
+       AVSSSEG2E64V
+       AVSSSEG3E64V
+       AVSSSEG4E64V
+       AVSSSEG5E64V
+       AVSSSEG6E64V
+       AVSSSEG7E64V
+       AVSSSEG8E64V
+
+       // 31.7.8.3. Vector Indexed Segment Loads and Stores
+       AVLOXSEG2EI8V
+       AVLOXSEG3EI8V
+       AVLOXSEG4EI8V
+       AVLOXSEG5EI8V
+       AVLOXSEG6EI8V
+       AVLOXSEG7EI8V
+       AVLOXSEG8EI8V
+       AVLOXSEG2EI16V
+       AVLOXSEG3EI16V
+       AVLOXSEG4EI16V
+       AVLOXSEG5EI16V
+       AVLOXSEG6EI16V
+       AVLOXSEG7EI16V
+       AVLOXSEG8EI16V
+       AVLOXSEG2EI32V
+       AVLOXSEG3EI32V
+       AVLOXSEG4EI32V
+       AVLOXSEG5EI32V
+       AVLOXSEG6EI32V
+       AVLOXSEG7EI32V
+       AVLOXSEG8EI32V
+       AVLOXSEG2EI64V
+       AVLOXSEG3EI64V
+       AVLOXSEG4EI64V
+       AVLOXSEG5EI64V
+       AVLOXSEG6EI64V
+       AVLOXSEG7EI64V
+       AVLOXSEG8EI64V
+
+       AVSOXSEG2EI8V
+       AVSOXSEG3EI8V
+       AVSOXSEG4EI8V
+       AVSOXSEG5EI8V
+       AVSOXSEG6EI8V
+       AVSOXSEG7EI8V
+       AVSOXSEG8EI8V
+       AVSOXSEG2EI16V
+       AVSOXSEG3EI16V
+       AVSOXSEG4EI16V
+       AVSOXSEG5EI16V
+       AVSOXSEG6EI16V
+       AVSOXSEG7EI16V
+       AVSOXSEG8EI16V
+       AVSOXSEG2EI32V
+       AVSOXSEG3EI32V
+       AVSOXSEG4EI32V
+       AVSOXSEG5EI32V
+       AVSOXSEG6EI32V
+       AVSOXSEG7EI32V
+       AVSOXSEG8EI32V
+       AVSOXSEG2EI64V
+       AVSOXSEG3EI64V
+       AVSOXSEG4EI64V
+       AVSOXSEG5EI64V
+       AVSOXSEG6EI64V
+       AVSOXSEG7EI64V
+       AVSOXSEG8EI64V
+
+       AVLUXSEG2EI8V
+       AVLUXSEG3EI8V
+       AVLUXSEG4EI8V
+       AVLUXSEG5EI8V
+       AVLUXSEG6EI8V
+       AVLUXSEG7EI8V
+       AVLUXSEG8EI8V
+       AVLUXSEG2EI16V
+       AVLUXSEG3EI16V
+       AVLUXSEG4EI16V
+       AVLUXSEG5EI16V
+       AVLUXSEG6EI16V
+       AVLUXSEG7EI16V
+       AVLUXSEG8EI16V
+       AVLUXSEG2EI32V
+       AVLUXSEG3EI32V
+       AVLUXSEG4EI32V
+       AVLUXSEG5EI32V
+       AVLUXSEG6EI32V
+       AVLUXSEG7EI32V
+       AVLUXSEG8EI32V
+       AVLUXSEG2EI64V
+       AVLUXSEG3EI64V
+       AVLUXSEG4EI64V
+       AVLUXSEG5EI64V
+       AVLUXSEG6EI64V
+       AVLUXSEG7EI64V
+       AVLUXSEG8EI64V
+
+       AVSUXSEG2EI8V
+       AVSUXSEG3EI8V
+       AVSUXSEG4EI8V
+       AVSUXSEG5EI8V
+       AVSUXSEG6EI8V
+       AVSUXSEG7EI8V
+       AVSUXSEG8EI8V
+       AVSUXSEG2EI16V
+       AVSUXSEG3EI16V
+       AVSUXSEG4EI16V
+       AVSUXSEG5EI16V
+       AVSUXSEG6EI16V
+       AVSUXSEG7EI16V
+       AVSUXSEG8EI16V
+       AVSUXSEG2EI32V
+       AVSUXSEG3EI32V
+       AVSUXSEG4EI32V
+       AVSUXSEG5EI32V
+       AVSUXSEG6EI32V
+       AVSUXSEG7EI32V
+       AVSUXSEG8EI32V
+       AVSUXSEG2EI64V
+       AVSUXSEG3EI64V
+       AVSUXSEG4EI64V
+       AVSUXSEG5EI64V
+       AVSUXSEG6EI64V
+       AVSUXSEG7EI64V
+       AVSUXSEG8EI64V
+
        // 31.7.9: Vector Load/Store Whole Register Instructions
        AVL1RE8V
        AVL1RE16V
index 16f2272b03f6f1b29e8dea297c12d9f1f33af658..a6a03dc56523c9c4979fcd5f2668048d6114cbcc 100644 (file)
@@ -808,46 +808,326 @@ func encode(a obj.As) *inst {
                return &inst{0x7, 0x0, 0x0, 0x8, -472, 0x71}
        case AVLE16V:
                return &inst{0x7, 0x5, 0x0, 0x0, 0, 0x0}
+       case AVLSEG2E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 512, 0x10}
+       case AVLSEG3E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1024, 0x20}
+       case AVLSEG4E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1536, 0x30}
+       case AVLSEG5E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -2048, 0x40}
+       case AVLSEG6E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1536, 0x50}
+       case AVLSEG7E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1024, 0x60}
+       case AVLSEG8E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -512, 0x70}
        case AVLE16FFV:
                return &inst{0x7, 0x5, 0x0, 0x10, 16, 0x0}
+       case AVLSEG2E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, 528, 0x10}
+       case AVLSEG3E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, 1040, 0x20}
+       case AVLSEG4E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, 1552, 0x30}
+       case AVLSEG5E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, -2032, 0x40}
+       case AVLSEG6E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, -1520, 0x50}
+       case AVLSEG7E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, -1008, 0x60}
+       case AVLSEG8E16FFV:
+               return &inst{0x7, 0x5, 0x0, 0x10, -496, 0x70}
        case AVLE32V:
                return &inst{0x7, 0x6, 0x0, 0x0, 0, 0x0}
+       case AVLSEG2E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 512, 0x10}
+       case AVLSEG3E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1024, 0x20}
+       case AVLSEG4E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1536, 0x30}
+       case AVLSEG5E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -2048, 0x40}
+       case AVLSEG6E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1536, 0x50}
+       case AVLSEG7E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1024, 0x60}
+       case AVLSEG8E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -512, 0x70}
        case AVLE32FFV:
                return &inst{0x7, 0x6, 0x0, 0x10, 16, 0x0}
+       case AVLSEG2E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, 528, 0x10}
+       case AVLSEG3E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, 1040, 0x20}
+       case AVLSEG4E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, 1552, 0x30}
+       case AVLSEG5E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, -2032, 0x40}
+       case AVLSEG6E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, -1520, 0x50}
+       case AVLSEG7E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, -1008, 0x60}
+       case AVLSEG8E32FFV:
+               return &inst{0x7, 0x6, 0x0, 0x10, -496, 0x70}
        case AVLE64V:
                return &inst{0x7, 0x7, 0x0, 0x0, 0, 0x0}
+       case AVLSEG2E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 512, 0x10}
+       case AVLSEG3E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1024, 0x20}
+       case AVLSEG4E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1536, 0x30}
+       case AVLSEG5E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -2048, 0x40}
+       case AVLSEG6E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1536, 0x50}
+       case AVLSEG7E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1024, 0x60}
+       case AVLSEG8E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -512, 0x70}
        case AVLE64FFV:
                return &inst{0x7, 0x7, 0x0, 0x10, 16, 0x0}
+       case AVLSEG2E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, 528, 0x10}
+       case AVLSEG3E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, 1040, 0x20}
+       case AVLSEG4E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, 1552, 0x30}
+       case AVLSEG5E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, -2032, 0x40}
+       case AVLSEG6E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, -1520, 0x50}
+       case AVLSEG7E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, -1008, 0x60}
+       case AVLSEG8E64FFV:
+               return &inst{0x7, 0x7, 0x0, 0x10, -496, 0x70}
        case AVLE8V:
                return &inst{0x7, 0x0, 0x0, 0x0, 0, 0x0}
+       case AVLSEG2E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 512, 0x10}
+       case AVLSEG3E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1024, 0x20}
+       case AVLSEG4E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1536, 0x30}
+       case AVLSEG5E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -2048, 0x40}
+       case AVLSEG6E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1536, 0x50}
+       case AVLSEG7E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1024, 0x60}
+       case AVLSEG8E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -512, 0x70}
        case AVLE8FFV:
                return &inst{0x7, 0x0, 0x0, 0x10, 16, 0x0}
+       case AVLSEG2E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, 528, 0x10}
+       case AVLSEG3E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, 1040, 0x20}
+       case AVLSEG4E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, 1552, 0x30}
+       case AVLSEG5E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, -2032, 0x40}
+       case AVLSEG6E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, -1520, 0x50}
+       case AVLSEG7E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, -1008, 0x60}
+       case AVLSEG8E8FFV:
+               return &inst{0x7, 0x0, 0x0, 0x10, -496, 0x70}
        case AVLMV:
                return &inst{0x7, 0x0, 0x0, 0xb, 43, 0x1}
        case AVLOXEI16V:
                return &inst{0x7, 0x5, 0x0, 0x0, 192, 0x6}
+       case AVLOXSEG2EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 704, 0x16}
+       case AVLOXSEG3EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1216, 0x26}
+       case AVLOXSEG4EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1728, 0x36}
+       case AVLOXSEG5EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1856, 0x46}
+       case AVLOXSEG6EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1344, 0x56}
+       case AVLOXSEG7EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -832, 0x66}
+       case AVLOXSEG8EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -320, 0x76}
        case AVLOXEI32V:
                return &inst{0x7, 0x6, 0x0, 0x0, 192, 0x6}
+       case AVLOXSEG2EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 704, 0x16}
+       case AVLOXSEG3EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1216, 0x26}
+       case AVLOXSEG4EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1728, 0x36}
+       case AVLOXSEG5EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1856, 0x46}
+       case AVLOXSEG6EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1344, 0x56}
+       case AVLOXSEG7EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -832, 0x66}
+       case AVLOXSEG8EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -320, 0x76}
        case AVLOXEI64V:
                return &inst{0x7, 0x7, 0x0, 0x0, 192, 0x6}
+       case AVLOXSEG2EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 704, 0x16}
+       case AVLOXSEG3EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1216, 0x26}
+       case AVLOXSEG4EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1728, 0x36}
+       case AVLOXSEG5EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1856, 0x46}
+       case AVLOXSEG6EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1344, 0x56}
+       case AVLOXSEG7EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -832, 0x66}
+       case AVLOXSEG8EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -320, 0x76}
        case AVLOXEI8V:
                return &inst{0x7, 0x0, 0x0, 0x0, 192, 0x6}
+       case AVLOXSEG2EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 704, 0x16}
+       case AVLOXSEG3EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1216, 0x26}
+       case AVLOXSEG4EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1728, 0x36}
+       case AVLOXSEG5EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1856, 0x46}
+       case AVLOXSEG6EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1344, 0x56}
+       case AVLOXSEG7EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -832, 0x66}
+       case AVLOXSEG8EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -320, 0x76}
        case AVLSE16V:
                return &inst{0x7, 0x5, 0x0, 0x0, 128, 0x4}
+       case AVLSSEG2E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 640, 0x14}
+       case AVLSSEG3E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1152, 0x24}
+       case AVLSSEG4E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1664, 0x34}
+       case AVLSSEG5E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1920, 0x44}
+       case AVLSSEG6E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1408, 0x54}
+       case AVLSSEG7E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -896, 0x64}
+       case AVLSSEG8E16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -384, 0x74}
        case AVLSE32V:
                return &inst{0x7, 0x6, 0x0, 0x0, 128, 0x4}
+       case AVLSSEG2E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 640, 0x14}
+       case AVLSSEG3E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1152, 0x24}
+       case AVLSSEG4E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1664, 0x34}
+       case AVLSSEG5E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1920, 0x44}
+       case AVLSSEG6E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1408, 0x54}
+       case AVLSSEG7E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -896, 0x64}
+       case AVLSSEG8E32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -384, 0x74}
        case AVLSE64V:
                return &inst{0x7, 0x7, 0x0, 0x0, 128, 0x4}
+       case AVLSSEG2E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 640, 0x14}
+       case AVLSSEG3E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1152, 0x24}
+       case AVLSSEG4E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1664, 0x34}
+       case AVLSSEG5E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1920, 0x44}
+       case AVLSSEG6E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1408, 0x54}
+       case AVLSSEG7E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -896, 0x64}
+       case AVLSSEG8E64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -384, 0x74}
        case AVLSE8V:
                return &inst{0x7, 0x0, 0x0, 0x0, 128, 0x4}
+       case AVLSSEG2E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 640, 0x14}
+       case AVLSSEG3E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1152, 0x24}
+       case AVLSSEG4E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1664, 0x34}
+       case AVLSSEG5E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1920, 0x44}
+       case AVLSSEG6E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1408, 0x54}
+       case AVLSSEG7E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -896, 0x64}
+       case AVLSSEG8E8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -384, 0x74}
        case AVLUXEI16V:
                return &inst{0x7, 0x5, 0x0, 0x0, 64, 0x2}
+       case AVLUXSEG2EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 576, 0x12}
+       case AVLUXSEG3EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1088, 0x22}
+       case AVLUXSEG4EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, 1600, 0x32}
+       case AVLUXSEG5EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1984, 0x42}
+       case AVLUXSEG6EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -1472, 0x52}
+       case AVLUXSEG7EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -960, 0x62}
+       case AVLUXSEG8EI16V:
+               return &inst{0x7, 0x5, 0x0, 0x0, -448, 0x72}
        case AVLUXEI32V:
                return &inst{0x7, 0x6, 0x0, 0x0, 64, 0x2}
+       case AVLUXSEG2EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 576, 0x12}
+       case AVLUXSEG3EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1088, 0x22}
+       case AVLUXSEG4EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, 1600, 0x32}
+       case AVLUXSEG5EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1984, 0x42}
+       case AVLUXSEG6EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -1472, 0x52}
+       case AVLUXSEG7EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -960, 0x62}
+       case AVLUXSEG8EI32V:
+               return &inst{0x7, 0x6, 0x0, 0x0, -448, 0x72}
        case AVLUXEI64V:
                return &inst{0x7, 0x7, 0x0, 0x0, 64, 0x2}
+       case AVLUXSEG2EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 576, 0x12}
+       case AVLUXSEG3EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1088, 0x22}
+       case AVLUXSEG4EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, 1600, 0x32}
+       case AVLUXSEG5EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1984, 0x42}
+       case AVLUXSEG6EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -1472, 0x52}
+       case AVLUXSEG7EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -960, 0x62}
+       case AVLUXSEG8EI64V:
+               return &inst{0x7, 0x7, 0x0, 0x0, -448, 0x72}
        case AVLUXEI8V:
                return &inst{0x7, 0x0, 0x0, 0x0, 64, 0x2}
+       case AVLUXSEG2EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 576, 0x12}
+       case AVLUXSEG3EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1088, 0x22}
+       case AVLUXSEG4EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, 1600, 0x32}
+       case AVLUXSEG5EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1984, 0x42}
+       case AVLUXSEG6EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -1472, 0x52}
+       case AVLUXSEG7EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -960, 0x62}
+       case AVLUXSEG8EI8V:
+               return &inst{0x7, 0x0, 0x0, 0x0, -448, 0x72}
        case AVMACCVV:
                return &inst{0x57, 0x2, 0x0, 0x0, -1216, 0x5a}
        case AVMACCVX:
@@ -1114,12 +1394,68 @@ func encode(a obj.As) *inst {
                return &inst{0x57, 0x4, 0x0, 0x0, 1152, 0x24}
        case AVSE16V:
                return &inst{0x27, 0x5, 0x0, 0x0, 0, 0x0}
+       case AVSSEG2E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 512, 0x10}
+       case AVSSEG3E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1024, 0x20}
+       case AVSSEG4E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1536, 0x30}
+       case AVSSEG5E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -2048, 0x40}
+       case AVSSEG6E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1536, 0x50}
+       case AVSSEG7E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1024, 0x60}
+       case AVSSEG8E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -512, 0x70}
        case AVSE32V:
                return &inst{0x27, 0x6, 0x0, 0x0, 0, 0x0}
+       case AVSSEG2E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 512, 0x10}
+       case AVSSEG3E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1024, 0x20}
+       case AVSSEG4E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1536, 0x30}
+       case AVSSEG5E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -2048, 0x40}
+       case AVSSEG6E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1536, 0x50}
+       case AVSSEG7E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1024, 0x60}
+       case AVSSEG8E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -512, 0x70}
        case AVSE64V:
                return &inst{0x27, 0x7, 0x0, 0x0, 0, 0x0}
+       case AVSSEG2E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 512, 0x10}
+       case AVSSEG3E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1024, 0x20}
+       case AVSSEG4E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1536, 0x30}
+       case AVSSEG5E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -2048, 0x40}
+       case AVSSEG6E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1536, 0x50}
+       case AVSSEG7E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1024, 0x60}
+       case AVSSEG8E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -512, 0x70}
        case AVSE8V:
                return &inst{0x27, 0x0, 0x0, 0x0, 0, 0x0}
+       case AVSSEG2E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 512, 0x10}
+       case AVSSEG3E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1024, 0x20}
+       case AVSSEG4E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1536, 0x30}
+       case AVSSEG5E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -2048, 0x40}
+       case AVSSEG6E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1536, 0x50}
+       case AVSSEG7E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1024, 0x60}
+       case AVSSEG8E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -512, 0x70}
        case AVSETIVLI:
                return &inst{0x57, 0x7, 0x0, 0x0, -1024, 0x60}
        case AVSETVL:
@@ -1158,12 +1494,68 @@ func encode(a obj.As) *inst {
                return &inst{0x57, 0x4, 0x0, 0x0, -1600, 0x4e}
        case AVSOXEI16V:
                return &inst{0x27, 0x5, 0x0, 0x0, 192, 0x6}
+       case AVSOXSEG2EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 704, 0x16}
+       case AVSOXSEG3EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1216, 0x26}
+       case AVSOXSEG4EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1728, 0x36}
+       case AVSOXSEG5EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1856, 0x46}
+       case AVSOXSEG6EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1344, 0x56}
+       case AVSOXSEG7EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -832, 0x66}
+       case AVSOXSEG8EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -320, 0x76}
        case AVSOXEI32V:
                return &inst{0x27, 0x6, 0x0, 0x0, 192, 0x6}
+       case AVSOXSEG2EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 704, 0x16}
+       case AVSOXSEG3EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1216, 0x26}
+       case AVSOXSEG4EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1728, 0x36}
+       case AVSOXSEG5EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1856, 0x46}
+       case AVSOXSEG6EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1344, 0x56}
+       case AVSOXSEG7EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -832, 0x66}
+       case AVSOXSEG8EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -320, 0x76}
        case AVSOXEI64V:
                return &inst{0x27, 0x7, 0x0, 0x0, 192, 0x6}
+       case AVSOXSEG2EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 704, 0x16}
+       case AVSOXSEG3EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1216, 0x26}
+       case AVSOXSEG4EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1728, 0x36}
+       case AVSOXSEG5EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1856, 0x46}
+       case AVSOXSEG6EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1344, 0x56}
+       case AVSOXSEG7EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -832, 0x66}
+       case AVSOXSEG8EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -320, 0x76}
        case AVSOXEI8V:
                return &inst{0x27, 0x0, 0x0, 0x0, 192, 0x6}
+       case AVSOXSEG2EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 704, 0x16}
+       case AVSOXSEG3EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1216, 0x26}
+       case AVSOXSEG4EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1728, 0x36}
+       case AVSOXSEG5EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1856, 0x46}
+       case AVSOXSEG6EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1344, 0x56}
+       case AVSOXSEG7EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -832, 0x66}
+       case AVSOXSEG8EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -320, 0x76}
        case AVSRAVI:
                return &inst{0x57, 0x3, 0x0, 0x0, -1472, 0x52}
        case AVSRAVV:
@@ -1178,12 +1570,68 @@ func encode(a obj.As) *inst {
                return &inst{0x57, 0x4, 0x0, 0x0, -1536, 0x50}
        case AVSSE16V:
                return &inst{0x27, 0x5, 0x0, 0x0, 128, 0x4}
+       case AVSSSEG2E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 640, 0x14}
+       case AVSSSEG3E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1152, 0x24}
+       case AVSSSEG4E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1664, 0x34}
+       case AVSSSEG5E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1920, 0x44}
+       case AVSSSEG6E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1408, 0x54}
+       case AVSSSEG7E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -896, 0x64}
+       case AVSSSEG8E16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -384, 0x74}
        case AVSSE32V:
                return &inst{0x27, 0x6, 0x0, 0x0, 128, 0x4}
+       case AVSSSEG2E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 640, 0x14}
+       case AVSSSEG3E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1152, 0x24}
+       case AVSSSEG4E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1664, 0x34}
+       case AVSSSEG5E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1920, 0x44}
+       case AVSSSEG6E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1408, 0x54}
+       case AVSSSEG7E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -896, 0x64}
+       case AVSSSEG8E32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -384, 0x74}
        case AVSSE64V:
                return &inst{0x27, 0x7, 0x0, 0x0, 128, 0x4}
+       case AVSSSEG2E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 640, 0x14}
+       case AVSSSEG3E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1152, 0x24}
+       case AVSSSEG4E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1664, 0x34}
+       case AVSSSEG5E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1920, 0x44}
+       case AVSSSEG6E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1408, 0x54}
+       case AVSSSEG7E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -896, 0x64}
+       case AVSSSEG8E64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -384, 0x74}
        case AVSSE8V:
                return &inst{0x27, 0x0, 0x0, 0x0, 128, 0x4}
+       case AVSSSEG2E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 640, 0x14}
+       case AVSSSEG3E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1152, 0x24}
+       case AVSSSEG4E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1664, 0x34}
+       case AVSSSEG5E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1920, 0x44}
+       case AVSSSEG6E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1408, 0x54}
+       case AVSSSEG7E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -896, 0x64}
+       case AVSSSEG8E8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -384, 0x74}
        case AVSSRAVI:
                return &inst{0x57, 0x3, 0x0, 0x0, -1344, 0x56}
        case AVSSRAVV:
@@ -1210,12 +1658,68 @@ func encode(a obj.As) *inst {
                return &inst{0x57, 0x4, 0x0, 0x0, 128, 0x4}
        case AVSUXEI16V:
                return &inst{0x27, 0x5, 0x0, 0x0, 64, 0x2}
+       case AVSUXSEG2EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 576, 0x12}
+       case AVSUXSEG3EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1088, 0x22}
+       case AVSUXSEG4EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, 1600, 0x32}
+       case AVSUXSEG5EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1984, 0x42}
+       case AVSUXSEG6EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -1472, 0x52}
+       case AVSUXSEG7EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -960, 0x62}
+       case AVSUXSEG8EI16V:
+               return &inst{0x27, 0x5, 0x0, 0x0, -448, 0x72}
        case AVSUXEI32V:
                return &inst{0x27, 0x6, 0x0, 0x0, 64, 0x2}
+       case AVSUXSEG2EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 576, 0x12}
+       case AVSUXSEG3EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1088, 0x22}
+       case AVSUXSEG4EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, 1600, 0x32}
+       case AVSUXSEG5EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1984, 0x42}
+       case AVSUXSEG6EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -1472, 0x52}
+       case AVSUXSEG7EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -960, 0x62}
+       case AVSUXSEG8EI32V:
+               return &inst{0x27, 0x6, 0x0, 0x0, -448, 0x72}
        case AVSUXEI64V:
                return &inst{0x27, 0x7, 0x0, 0x0, 64, 0x2}
+       case AVSUXSEG2EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 576, 0x12}
+       case AVSUXSEG3EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1088, 0x22}
+       case AVSUXSEG4EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, 1600, 0x32}
+       case AVSUXSEG5EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1984, 0x42}
+       case AVSUXSEG6EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -1472, 0x52}
+       case AVSUXSEG7EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -960, 0x62}
+       case AVSUXSEG8EI64V:
+               return &inst{0x27, 0x7, 0x0, 0x0, -448, 0x72}
        case AVSUXEI8V:
                return &inst{0x27, 0x0, 0x0, 0x0, 64, 0x2}
+       case AVSUXSEG2EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 576, 0x12}
+       case AVSUXSEG3EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1088, 0x22}
+       case AVSUXSEG4EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, 1600, 0x32}
+       case AVSUXSEG5EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1984, 0x42}
+       case AVSUXSEG6EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -1472, 0x52}
+       case AVSUXSEG7EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -960, 0x62}
+       case AVSUXSEG8EI8V:
+               return &inst{0x27, 0x0, 0x0, 0x0, -448, 0x72}
        case AVWADDVV:
                return &inst{0x57, 0x2, 0x0, 0x0, -960, 0x62}
        case AVWADDVX:
index db8d663c5a311518dc943222afb48aebec4fb59f..1538d03179be24a26a0f7b671271eff4ec65c81b 100644 (file)
@@ -2151,6 +2151,260 @@ var instructions = [ALAST & obj.AMask]instructionData{
        AVSOXEI32V & obj.AMask: {enc: sVIVEncoding},
        AVSOXEI64V & obj.AMask: {enc: sVIVEncoding},
 
+       // 31.7.8: Vector Load/Store Segment Instructions
+       AVLSEG2E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG3E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG4E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG5E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG6E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG7E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG8E8V & obj.AMask:     {enc: iVEncoding},
+       AVLSEG2E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG3E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG4E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG5E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG6E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG7E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG8E16V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG2E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG3E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG4E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG5E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG6E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG7E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG8E32V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG2E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG3E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG4E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG5E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG6E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG7E64V & obj.AMask:    {enc: iVEncoding},
+       AVLSEG8E64V & obj.AMask:    {enc: iVEncoding},
+       AVSSEG2E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG3E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG4E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG5E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG6E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG7E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG8E8V & obj.AMask:     {enc: sVEncoding},
+       AVSSEG2E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG3E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG4E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG5E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG6E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG7E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG8E16V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG2E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG3E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG4E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG5E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG6E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG7E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG8E32V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG2E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG3E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG4E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG5E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG6E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG7E64V & obj.AMask:    {enc: sVEncoding},
+       AVSSEG8E64V & obj.AMask:    {enc: sVEncoding},
+       AVLSEG2E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG3E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG4E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG5E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG6E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG7E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG8E8FFV & obj.AMask:   {enc: iVEncoding},
+       AVLSEG2E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG3E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG4E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG5E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG6E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG7E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG8E16FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG2E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG3E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG4E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG5E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG6E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG7E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG8E32FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG2E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG3E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG4E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG5E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG6E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG7E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSEG8E64FFV & obj.AMask:  {enc: iVEncoding},
+       AVLSSEG2E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG3E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG4E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG5E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG6E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG7E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG8E8V & obj.AMask:    {enc: iIIVEncoding},
+       AVLSSEG2E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG3E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG4E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG5E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG6E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG7E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG8E16V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG2E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG3E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG4E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG5E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG6E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG7E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG8E32V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG2E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG3E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG4E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG5E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG6E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG7E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVLSSEG8E64V & obj.AMask:   {enc: iIIVEncoding},
+       AVSSSEG2E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG3E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG4E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG5E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG6E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG7E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG8E8V & obj.AMask:    {enc: sVIIEncoding},
+       AVSSSEG2E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG3E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG4E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG5E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG6E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG7E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG8E16V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG2E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG3E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG4E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG5E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG6E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG7E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG8E32V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG2E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG3E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG4E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG5E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG6E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG7E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVSSSEG8E64V & obj.AMask:   {enc: sVIIEncoding},
+       AVLOXSEG2EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG3EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG4EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG5EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG6EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG7EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG8EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLOXSEG2EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG3EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG4EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG5EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG6EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG7EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG8EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG2EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG3EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG4EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG5EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG6EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG7EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG8EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG2EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG3EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG4EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG5EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG6EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG7EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLOXSEG8EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVSOXSEG2EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG3EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG4EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG5EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG6EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG7EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG8EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSOXSEG2EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG3EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG4EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG5EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG6EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG7EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG8EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG2EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG3EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG4EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG5EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG6EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG7EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG8EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG2EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG3EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG4EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG5EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG6EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG7EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSOXSEG8EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVLUXSEG2EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG3EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG4EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG5EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG6EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG7EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG8EI8V & obj.AMask:  {enc: iVIVEncoding},
+       AVLUXSEG2EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG3EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG4EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG5EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG6EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG7EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG8EI16V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG2EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG3EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG4EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG5EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG6EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG7EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG8EI32V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG2EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG3EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG4EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG5EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG6EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG7EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVLUXSEG8EI64V & obj.AMask: {enc: iVIVEncoding},
+       AVSUXSEG2EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG3EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG4EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG5EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG6EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG7EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG8EI8V & obj.AMask:  {enc: sVIVEncoding},
+       AVSUXSEG2EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG3EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG4EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG5EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG6EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG7EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG8EI16V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG2EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG3EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG4EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG5EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG6EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG7EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG8EI32V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG2EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG3EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG4EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG5EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG6EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG7EI64V & obj.AMask: {enc: sVIVEncoding},
+       AVSUXSEG8EI64V & obj.AMask: {enc: sVIVEncoding},
+
        // 31.7.9: Vector Load/Store Whole Register Instructions
        AVL1RE8V & obj.AMask:  {enc: iVEncoding},
        AVL1RE16V & obj.AMask: {enc: iVEncoding},
@@ -3539,7 +3793,19 @@ func instructionsForProg(p *obj.Prog) []*instruction {
                        ins.rs1 = uint32(p.From.Offset)
                }
 
-       case AVLE8V, AVLE16V, AVLE32V, AVLE64V, AVSE8V, AVSE16V, AVSE32V, AVSE64V, AVLMV, AVSMV:
+       case AVLE8V, AVLE16V, AVLE32V, AVLE64V, AVSE8V, AVSE16V, AVSE32V, AVSE64V, AVLMV, AVSMV,
+               AVLSEG2E8V, AVLSEG3E8V, AVLSEG4E8V, AVLSEG5E8V, AVLSEG6E8V, AVLSEG7E8V, AVLSEG8E8V,
+               AVLSEG2E16V, AVLSEG3E16V, AVLSEG4E16V, AVLSEG5E16V, AVLSEG6E16V, AVLSEG7E16V, AVLSEG8E16V,
+               AVLSEG2E32V, AVLSEG3E32V, AVLSEG4E32V, AVLSEG5E32V, AVLSEG6E32V, AVLSEG7E32V, AVLSEG8E32V,
+               AVLSEG2E64V, AVLSEG3E64V, AVLSEG4E64V, AVLSEG5E64V, AVLSEG6E64V, AVLSEG7E64V, AVLSEG8E64V,
+               AVSSEG2E8V, AVSSEG3E8V, AVSSEG4E8V, AVSSEG5E8V, AVSSEG6E8V, AVSSEG7E8V, AVSSEG8E8V,
+               AVSSEG2E16V, AVSSEG3E16V, AVSSEG4E16V, AVSSEG5E16V, AVSSEG6E16V, AVSSEG7E16V, AVSSEG8E16V,
+               AVSSEG2E32V, AVSSEG3E32V, AVSSEG4E32V, AVSSEG5E32V, AVSSEG6E32V, AVSSEG7E32V, AVSSEG8E32V,
+               AVSSEG2E64V, AVSSEG3E64V, AVSSEG4E64V, AVSSEG5E64V, AVSSEG6E64V, AVSSEG7E64V, AVSSEG8E64V,
+               AVLSEG2E8FFV, AVLSEG3E8FFV, AVLSEG4E8FFV, AVLSEG5E8FFV, AVLSEG6E8FFV, AVLSEG7E8FFV, AVLSEG8E8FFV,
+               AVLSEG2E16FFV, AVLSEG3E16FFV, AVLSEG4E16FFV, AVLSEG5E16FFV, AVLSEG6E16FFV, AVLSEG7E16FFV, AVLSEG8E16FFV,
+               AVLSEG2E32FFV, AVLSEG3E32FFV, AVLSEG4E32FFV, AVLSEG5E32FFV, AVLSEG6E32FFV, AVLSEG7E32FFV, AVLSEG8E32FFV,
+               AVLSEG2E64FFV, AVLSEG3E64FFV, AVLSEG4E64FFV, AVLSEG5E64FFV, AVLSEG6E64FFV, AVLSEG7E64FFV, AVLSEG8E64FFV:
                // Set mask bit
                switch {
                case ins.rs1 == obj.REG_NONE:
@@ -3550,7 +3816,19 @@ func instructionsForProg(p *obj.Prog) []*instruction {
                ins.rd, ins.rs1, ins.rs2 = uint32(p.To.Reg), uint32(p.From.Reg), obj.REG_NONE
 
        case AVLSE8V, AVLSE16V, AVLSE32V, AVLSE64V,
-               AVLUXEI8V, AVLUXEI16V, AVLUXEI32V, AVLUXEI64V, AVLOXEI8V, AVLOXEI16V, AVLOXEI32V, AVLOXEI64V:
+               AVLUXEI8V, AVLUXEI16V, AVLUXEI32V, AVLUXEI64V, AVLOXEI8V, AVLOXEI16V, AVLOXEI32V, AVLOXEI64V,
+               AVLSSEG2E8V, AVLSSEG3E8V, AVLSSEG4E8V, AVLSSEG5E8V, AVLSSEG6E8V, AVLSSEG7E8V, AVLSSEG8E8V,
+               AVLSSEG2E16V, AVLSSEG3E16V, AVLSSEG4E16V, AVLSSEG5E16V, AVLSSEG6E16V, AVLSSEG7E16V, AVLSSEG8E16V,
+               AVLSSEG2E32V, AVLSSEG3E32V, AVLSSEG4E32V, AVLSSEG5E32V, AVLSSEG6E32V, AVLSSEG7E32V, AVLSSEG8E32V,
+               AVLSSEG2E64V, AVLSSEG3E64V, AVLSSEG4E64V, AVLSSEG5E64V, AVLSSEG6E64V, AVLSSEG7E64V, AVLSSEG8E64V,
+               AVLOXSEG2EI8V, AVLOXSEG3EI8V, AVLOXSEG4EI8V, AVLOXSEG5EI8V, AVLOXSEG6EI8V, AVLOXSEG7EI8V, AVLOXSEG8EI8V,
+               AVLOXSEG2EI16V, AVLOXSEG3EI16V, AVLOXSEG4EI16V, AVLOXSEG5EI16V, AVLOXSEG6EI16V, AVLOXSEG7EI16V, AVLOXSEG8EI16V,
+               AVLOXSEG2EI32V, AVLOXSEG3EI32V, AVLOXSEG4EI32V, AVLOXSEG5EI32V, AVLOXSEG6EI32V, AVLOXSEG7EI32V, AVLOXSEG8EI32V,
+               AVLOXSEG2EI64V, AVLOXSEG3EI64V, AVLOXSEG4EI64V, AVLOXSEG5EI64V, AVLOXSEG6EI64V, AVLOXSEG7EI64V, AVLOXSEG8EI64V,
+               AVLUXSEG2EI8V, AVLUXSEG3EI8V, AVLUXSEG4EI8V, AVLUXSEG5EI8V, AVLUXSEG6EI8V, AVLUXSEG7EI8V, AVLUXSEG8EI8V,
+               AVLUXSEG2EI16V, AVLUXSEG3EI16V, AVLUXSEG4EI16V, AVLUXSEG5EI16V, AVLUXSEG6EI16V, AVLUXSEG7EI16V, AVLUXSEG8EI16V,
+               AVLUXSEG2EI32V, AVLUXSEG3EI32V, AVLUXSEG4EI32V, AVLUXSEG5EI32V, AVLUXSEG6EI32V, AVLUXSEG7EI32V, AVLUXSEG8EI32V,
+               AVLUXSEG2EI64V, AVLUXSEG3EI64V, AVLUXSEG4EI64V, AVLUXSEG5EI64V, AVLUXSEG6EI64V, AVLUXSEG7EI64V, AVLUXSEG8EI64V:
                // Set mask bit
                switch {
                case ins.rs3 == obj.REG_NONE:
@@ -3561,7 +3839,19 @@ func instructionsForProg(p *obj.Prog) []*instruction {
                ins.rs1, ins.rs2, ins.rs3 = ins.rs2, ins.rs1, obj.REG_NONE
 
        case AVSSE8V, AVSSE16V, AVSSE32V, AVSSE64V,
-               AVSUXEI8V, AVSUXEI16V, AVSUXEI32V, AVSUXEI64V, AVSOXEI8V, AVSOXEI16V, AVSOXEI32V, AVSOXEI64V:
+               AVSUXEI8V, AVSUXEI16V, AVSUXEI32V, AVSUXEI64V, AVSOXEI8V, AVSOXEI16V, AVSOXEI32V, AVSOXEI64V,
+               AVSSSEG2E8V, AVSSSEG3E8V, AVSSSEG4E8V, AVSSSEG5E8V, AVSSSEG6E8V, AVSSSEG7E8V, AVSSSEG8E8V,
+               AVSSSEG2E16V, AVSSSEG3E16V, AVSSSEG4E16V, AVSSSEG5E16V, AVSSSEG6E16V, AVSSSEG7E16V, AVSSSEG8E16V,
+               AVSSSEG2E32V, AVSSSEG3E32V, AVSSSEG4E32V, AVSSSEG5E32V, AVSSSEG6E32V, AVSSSEG7E32V, AVSSSEG8E32V,
+               AVSSSEG2E64V, AVSSSEG3E64V, AVSSSEG4E64V, AVSSSEG5E64V, AVSSSEG6E64V, AVSSSEG7E64V, AVSSSEG8E64V,
+               AVSOXSEG2EI8V, AVSOXSEG3EI8V, AVSOXSEG4EI8V, AVSOXSEG5EI8V, AVSOXSEG6EI8V, AVSOXSEG7EI8V, AVSOXSEG8EI8V,
+               AVSOXSEG2EI16V, AVSOXSEG3EI16V, AVSOXSEG4EI16V, AVSOXSEG5EI16V, AVSOXSEG6EI16V, AVSOXSEG7EI16V, AVSOXSEG8EI16V,
+               AVSOXSEG2EI32V, AVSOXSEG3EI32V, AVSOXSEG4EI32V, AVSOXSEG5EI32V, AVSOXSEG6EI32V, AVSOXSEG7EI32V, AVSOXSEG8EI32V,
+               AVSOXSEG2EI64V, AVSOXSEG3EI64V, AVSOXSEG4EI64V, AVSOXSEG5EI64V, AVSOXSEG6EI64V, AVSOXSEG7EI64V, AVSOXSEG8EI64V,
+               AVSUXSEG2EI8V, AVSUXSEG3EI8V, AVSUXSEG4EI8V, AVSUXSEG5EI8V, AVSUXSEG6EI8V, AVSUXSEG7EI8V, AVSUXSEG8EI8V,
+               AVSUXSEG2EI16V, AVSUXSEG3EI16V, AVSUXSEG4EI16V, AVSUXSEG5EI16V, AVSUXSEG6EI16V, AVSUXSEG7EI16V, AVSUXSEG8EI16V,
+               AVSUXSEG2EI32V, AVSUXSEG3EI32V, AVSUXSEG4EI32V, AVSUXSEG5EI32V, AVSUXSEG6EI32V, AVSUXSEG7EI32V, AVSUXSEG8EI32V,
+               AVSUXSEG2EI64V, AVSUXSEG3EI64V, AVSUXSEG4EI64V, AVSUXSEG5EI64V, AVSUXSEG6EI64V, AVSUXSEG7EI64V, AVSUXSEG8EI64V:
                // Set mask bit
                switch {
                case ins.rs3 == obj.REG_NONE: