]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/objdump: updates from golang.org/x/arch/ppc64/ppc64asm
authorLynn Boger <laboger@linux.vnet.ibm.com>
Tue, 25 Oct 2016 20:13:07 +0000 (15:13 -0500)
committerBrad Fitzpatrick <bradfitz@golang.org>
Tue, 25 Oct 2016 21:16:33 +0000 (21:16 +0000)
Update the ppc64x disassembly code for use by objdump
from golang.org/x/arch/ppc64/ppc64asm commit fcea5ea.
Enable the objdump testcase for external linking on ppc64le
make a minor fix to the expected output.

Fixes #17447

Change-Id: I769cc7f8bfade594690a476dfe77ab33677ac03b
Reviewed-on: https://go-review.googlesource.com/32015
Run-TryBot: Brad Fitzpatrick <bradfitz@golang.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
src/cmd/objdump/objdump_test.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/decode.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/ext_test.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/inst.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/objdump_test.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/objdumpext_test.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/plan9.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/tables.go
src/cmd/vendor/golang.org/x/arch/ppc64/ppc64asm/testdata/decode.txt

index ffd5b3079e26aee8509eb12c60c8d41a750bca0d..cf1b53c2b3cb612b7b488822617400373e3c5e27 100644 (file)
@@ -54,7 +54,7 @@ var ppcNeed = []string{
        "fmthello.go:6",
        "TEXT main.main(SB)",
        "BR main.main(SB)",
-       "BL fmt.Println(SB)",
+       "CALL fmt.Println(SB)",
        "RET",
 }
 
@@ -143,7 +143,7 @@ func TestDisasmExtld(t *testing.T) {
                t.Skipf("skipping on %s", runtime.GOOS)
        }
        switch runtime.GOARCH {
-       case "ppc64", "ppc64le":
+       case "ppc64":
                t.Skipf("skipping on %s, no support for external linking, issue 9038", runtime.GOARCH)
        case "arm64":
                t.Skipf("skipping on %s, issue 10106", runtime.GOARCH)
index a40b8b7854c0438427e1cfc05bc37996c3e325a1..e1518d52e5c112440f99ee6fe2b90a8247d2db23 100644 (file)
@@ -52,6 +52,8 @@ func (a argField) Parse(i uint32) Arg {
                return F0 + Reg(a.BitFields.Parse(i))
        case TypeVecReg:
                return V0 + Reg(a.BitFields.Parse(i))
+       case TypeVecSReg:
+               return VS0 + Reg(a.BitFields.Parse(i))
        case TypeSpReg:
                return SpReg(a.BitFields.Parse(i))
        case TypeImmSigned:
@@ -78,6 +80,7 @@ const (
        TypeCondRegField         // conditional register field (0-7)
        TypeFPReg                // floating point register
        TypeVecReg               // vector register
+       TypeVecSReg              // VSX register
        TypeSpReg                // special register (depends on Op)
        TypeImmSigned            // signed immediate
        TypeImmUnsigned          // unsigned immediate/flag/mask, this is the catch-all type
@@ -101,6 +104,8 @@ func (t ArgType) String() string {
                return "FPReg"
        case TypeVecReg:
                return "VecReg"
+       case TypeVecSReg:
+               return "VecSReg"
        case TypeSpReg:
                return "SpReg"
        case TypeImmSigned:
index 01abefb324f55980ecc155d8e9747c084c937203..b5539844fcd861ad02632fdc304c1f36615490a7 100644 (file)
@@ -176,7 +176,7 @@ func testExtDis(
        t.Logf("%d test cases, %d expected mismatches, %d failures; %.0f cases/second", totalTests, totalSkips, totalErrors, float64(totalTests)/time.Since(start).Seconds())
 
        if err := <-errc; err != nil {
-               t.Fatal("external disassembler: %v", err)
+               t.Fatalf("external disassembler: %v", err)
        }
 
 }
index bf72ac6d0fbc6fbca6b7e93e8e248548990a0855..bd86b9230c40f3abc13420e16d54e15a9dfa78ce 100644 (file)
@@ -156,38 +156,70 @@ const (
        V29
        V30
        V31
-       V32
-       V33
-       V34
-       V35
-       V36
-       V37
-       V38
-       V39
-       V40
-       V41
-       V42
-       V43
-       V44
-       V45
-       V46
-       V47
-       V48
-       V49
-       V50
-       V51
-       V52
-       V53
-       V54
-       V55
-       V56
-       V57
-       V58
-       V59
-       V60
-       V61
-       V62
-       V63
+       VS0
+       VS1
+       VS2
+       VS3
+       VS4
+       VS5
+       VS6
+       VS7
+       VS8
+       VS9
+       VS10
+       VS11
+       VS12
+       VS13
+       VS14
+       VS15
+       VS16
+       VS17
+       VS18
+       VS19
+       VS20
+       VS21
+       VS22
+       VS23
+       VS24
+       VS25
+       VS26
+       VS27
+       VS28
+       VS29
+       VS30
+       VS31
+       VS32
+       VS33
+       VS34
+       VS35
+       VS36
+       VS37
+       VS38
+       VS39
+       VS40
+       VS41
+       VS42
+       VS43
+       VS44
+       VS45
+       VS46
+       VS47
+       VS48
+       VS49
+       VS50
+       VS51
+       VS52
+       VS53
+       VS54
+       VS55
+       VS56
+       VS57
+       VS58
+       VS59
+       VS60
+       VS61
+       VS62
+       VS63
 )
 
 func (Reg) IsArg() {}
@@ -197,10 +229,12 @@ func (r Reg) String() string {
                return fmt.Sprintf("r%d", int(r-R0))
        case F0 <= r && r <= F31:
                return fmt.Sprintf("f%d", int(r-F0))
-       case V0 <= r && r <= V63:
+       case V0 <= r && r <= V31:
                return fmt.Sprintf("v%d", int(r-V0))
+       case VS0 <= r && r <= VS63:
+               return fmt.Sprintf("vs%d", int(r-VS0))
        default:
-               return fmt.Sprint("Reg(%d)", int(r))
+               return fmt.Sprintf("Reg(%d)", int(r))
        }
 }
 
index 101c9dff25453118960611ca80383332a2db6950..ae825fd23c961f4f6e672d1cf21a4ad13a6575fc 100644 (file)
@@ -12,7 +12,10 @@ import (
 
 func TestObjdumpPowerTestdata(t *testing.T) { testObjdump(t, testdataCases(t)) }
 func TestObjdumpPowerManual(t *testing.T)   { testObjdump(t, hexCases(t, objdumpManualTests)) }
-func TestObjdumpPowerRandom(t *testing.T)   { testObjdump(t, randomCases(t)) }
+
+// Disable this for now since generating all possible bit combinations within a word
+// generates lots of ppc64x instructions not possible with golang so not worth supporting..
+//func TestObjdumpPowerRandom(t *testing.T)   { testObjdump(t, randomCases(t)) }
 
 // objdumpManualTests holds test cases that will be run by TestObjdumpARMManual.
 // If you are debugging a few cases that turned up in a longer run, it can be useful
index 64dd27407c11cf90e59427963b21ddb9da558490..55cff31b45e86902edfc2a437d96f58820b9aeba 100644 (file)
@@ -19,7 +19,7 @@ import (
        "testing"
 )
 
-const objdumpPath = "/usr/local/bin/powerpc64-unknown-linux-gnu-objdump"
+const objdumpPath = "/usr/bin/objdump"
 
 func testObjdump(t *testing.T, generate func(func([]byte))) {
        if testing.Short() {
index f9bafb9adc77b7906a4a6989121448e42bce9ff2..57a761e36faa0ef9c01e76f64e7565fbdcf0d488 100644 (file)
@@ -148,16 +148,25 @@ var revCondMap = map[string]string{
 
 // plan9OpMap maps an Op to its Plan 9 mnemonics, if different than its GNU mnemonics.
 var plan9OpMap = map[Op]string{
-       LBZ: "MOVBZ", STB: "MOVB",
+       LWARX: "LWAR", STWCX_: "STWCCC",
+       LDARX: "LDAR", STDCX_: "STDCCC",
+       LHARX: "LHAR", STHCX_: "STHCCC",
+       LBARX: "LBAR", STBCX_: "STBCCC",
+       ADDI: "ADD",
+       ADD_: "ADDCC",
+       LBZ:  "MOVBZ", STB: "MOVB",
        LBZU: "MOVBZU", STBU: "MOVBU", // TODO(minux): indexed forms are not handled
        LHZ: "MOVHZ", LHA: "MOVH", STH: "MOVH",
        LHZU: "MOVHZU", STHU: "MOVHU",
+       LI:  "MOVD",
+       LIS: "ADDIS",
        LWZ: "MOVWZ", LWA: "MOVW", STW: "MOVW",
        LWZU: "MOVWZU", STWU: "MOVWU",
        LD: "MOVD", STD: "MOVD",
        LDU: "MOVDU", STDU: "MOVDU",
-       MTSPR: "MOV", MFSPR: "MOV", // the width is ambiguous for SPRs
+       MTSPR: "MOVD", MFSPR: "MOVD", // the width is ambiguous for SPRs
        B:     "BR",
+       BL:    "CALL",
        CMPLD: "CMPU", CMPLW: "CMPWU",
        CMPD: "CMP", CMPW: "CMPW",
 }
index a9bd796c0d09b23e3ef151173b12efb52ce58782..24c745c8485693bf288ba90470f9f2789ac7d4c9 100644 (file)
@@ -1,3 +1,6 @@
+// DO NOT EDIT
+// generated by: ppc64map -fmt=decoder ../pp64.csv
+
 package ppc64asm
 
 const (
@@ -2686,7 +2689,7 @@ var (
        ap_SpReg_16_20_11_15       = &argField{Type: TypeSpReg, Shift: 0, BitFields: BitFields{{16, 5}, {11, 5}}}
        ap_ImmUnsigned_12_19       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{12, 8}}}
        ap_ImmUnsigned_10_10       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{10, 1}}}
-       ap_VecReg_31_31_6_10       = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{31, 1}, {6, 5}}}
+       ap_VecSReg_31_31_6_10      = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{31, 1}, {6, 5}}}
        ap_FPReg_6_10              = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{6, 5}}}
        ap_FPReg_16_20             = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{16, 5}}}
        ap_FPReg_11_15             = &argField{Type: TypeFPReg, Shift: 0, BitFields: BitFields{{11, 5}}}
@@ -2712,10 +2715,10 @@ var (
        ap_ImmUnsigned_21_22       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{21, 2}}}
        ap_ImmUnsigned_11_12       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 2}}}
        ap_ImmUnsigned_11_11       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{11, 1}}}
-       ap_VecReg_30_30_16_20      = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{30, 1}, {16, 5}}}
-       ap_VecReg_29_29_11_15      = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{29, 1}, {11, 5}}}
+       ap_VecSReg_30_30_16_20     = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{30, 1}, {16, 5}}}
+       ap_VecSReg_29_29_11_15     = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{29, 1}, {11, 5}}}
        ap_ImmUnsigned_22_23       = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{22, 2}}}
-       ap_VecReg_28_28_21_25      = &argField{Type: TypeVecReg, Shift: 0, BitFields: BitFields{{28, 1}, {21, 5}}}
+       ap_VecSReg_28_28_21_25     = &argField{Type: TypeVecSReg, Shift: 0, BitFields: BitFields{{28, 1}, {21, 5}}}
        ap_CondRegField_29_31      = &argField{Type: TypeCondRegField, Shift: 0, BitFields: BitFields{{29, 3}}}
        ap_ImmUnsigned_7_10        = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{7, 4}}}
        ap_ImmUnsigned_9_10        = &argField{Type: TypeImmUnsigned, Shift: 0, BitFields: BitFields{{9, 2}}}
@@ -3276,15 +3279,15 @@ var instFormats = [...]instFormat{
        {MTSLE, 0xfc0007fe, 0x7c000126, 0x3dff801, // Move To Split Little Endian X-form (mtsle L)
                [5]*argField{ap_ImmUnsigned_10_10}},
        {MFVSRD, 0xfc0007fe, 0x7c000066, 0xf800, // Move From VSR Doubleword XX1-form (mfvsrd RA,XS)
-               [5]*argField{ap_Reg_11_15, ap_VecReg_31_31_6_10}},
+               [5]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
        {MFVSRWZ, 0xfc0007fe, 0x7c0000e6, 0xf800, // Move From VSR Word and Zero XX1-form (mfvsrwz RA,XS)
-               [5]*argField{ap_Reg_11_15, ap_VecReg_31_31_6_10}},
+               [5]*argField{ap_Reg_11_15, ap_VecSReg_31_31_6_10}},
        {MTVSRD, 0xfc0007fe, 0x7c000166, 0xf800, // Move To VSR Doubleword XX1-form (mtvsrd XT,RA)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
        {MTVSRWA, 0xfc0007fe, 0x7c0001a6, 0xf800, // Move To VSR Word Algebraic XX1-form (mtvsrwa XT,RA)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
        {MTVSRWZ, 0xfc0007fe, 0x7c0001e6, 0xf800, // Move To VSR Word and Zero XX1-form (mtvsrwz XT,RA)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15}},
        {MTOCRF, 0xfc1007fe, 0x7c100120, 0x801, // Move To One Condition Register Field XFX-form (mtocrf FXM,RS)
                [5]*argField{ap_ImmUnsigned_12_19, ap_Reg_6_10}},
        {MFOCRF, 0xfc1007fe, 0x7c100026, 0x801, // Move From One Condition Register Field XFX-form (mfocrf RT,FXM)
@@ -4122,313 +4125,313 @@ var instFormats = [...]instFormat{
        {DSCRI_, 0xfc0003ff, 0xec0000c5, 0x0, // DFP Shift Significand Right Immediate [Quad] Z22-form (dscri. FRT,FRA,SH)
                [5]*argField{ap_FPReg_6_10, ap_FPReg_11_15, ap_ImmUnsigned_16_21}},
        {LXSDX, 0xfc0007fe, 0x7c000498, 0x0, // Load VSX Scalar Doubleword Indexed XX1-form (lxsdx XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXSIWAX, 0xfc0007fe, 0x7c000098, 0x0, // Load VSX Scalar as Integer Word Algebraic Indexed XX1-form (lxsiwax XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXSIWZX, 0xfc0007fe, 0x7c000018, 0x0, // Load VSX Scalar as Integer Word and Zero Indexed XX1-form (lxsiwzx XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXSSPX, 0xfc0007fe, 0x7c000418, 0x0, // Load VSX Scalar Single-Precision Indexed XX1-form (lxsspx XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXVD2X, 0xfc0007fe, 0x7c000698, 0x0, // Load VSX Vector Doubleword*2 Indexed XX1-form (lxvd2x XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXVDSX, 0xfc0007fe, 0x7c000298, 0x0, // Load VSX Vector Doubleword & Splat Indexed XX1-form (lxvdsx XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {LXVW4X, 0xfc0007fe, 0x7c000618, 0x0, // Load VSX Vector Word*4 Indexed XX1-form (lxvw4x XT,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {STXSDX, 0xfc0007fe, 0x7c000598, 0x0, // Store VSX Scalar Doubleword Indexed XX1-form (stxsdx XS,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {STXSIWX, 0xfc0007fe, 0x7c000118, 0x0, // Store VSX Scalar as Integer Word Indexed XX1-form (stxsiwx XS,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {STXSSPX, 0xfc0007fe, 0x7c000518, 0x0, // Store VSX Scalar Single-Precision Indexed XX1-form (stxsspx XS,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {STXVD2X, 0xfc0007fe, 0x7c000798, 0x0, // Store VSX Vector Doubleword*2 Indexed XX1-form (stxvd2x XS,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {STXVW4X, 0xfc0007fe, 0x7c000718, 0x0, // Store VSX Vector Word*4 Indexed XX1-form (stxvw4x XS,RA,RB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {XSABSDP, 0xfc0007fc, 0xf0000564, 0x1f0000, // VSX Scalar Absolute Value Double-Precision XX2-form (xsabsdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSADDDP, 0xfc0007f8, 0xf0000100, 0x0, // VSX Scalar Add Double-Precision XX3-form (xsadddp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSADDSP, 0xfc0007f8, 0xf0000000, 0x0, // VSX Scalar Add Single-Precision XX3-form (xsaddsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSCMPODP, 0xfc0007f8, 0xf0000158, 0x600001, // VSX Scalar Compare Ordered Double-Precision XX3-form (xscmpodp BF,XA,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSCMPUDP, 0xfc0007f8, 0xf0000118, 0x600001, // VSX Scalar Compare Unordered Double-Precision XX3-form (xscmpudp BF,XA,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSCPSGNDP, 0xfc0007f8, 0xf0000580, 0x0, // VSX Scalar Copy Sign Double-Precision XX3-form (xscpsgndp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSCVDPSP, 0xfc0007fc, 0xf0000424, 0x1f0000, // VSX Scalar round Double-Precision to single-precision and Convert to Single-Precision format XX2-form (xscvdpsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVDPSPN, 0xfc0007fc, 0xf000042c, 0x1f0000, // VSX Scalar Convert Scalar Single-Precision to Vector Single-Precision format Non-signalling XX2-form (xscvdpspn XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVDPSXDS, 0xfc0007fc, 0xf0000560, 0x1f0000, // VSX Scalar truncate Double-Precision to integer and Convert to Signed Integer Doubleword format with Saturate XX2-form (xscvdpsxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVDPSXWS, 0xfc0007fc, 0xf0000160, 0x1f0000, // VSX Scalar truncate Double-Precision to integer and Convert to Signed Integer Word format with Saturate XX2-form (xscvdpsxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVDPUXDS, 0xfc0007fc, 0xf0000520, 0x1f0000, // VSX Scalar truncate Double-Precision integer and Convert to Unsigned Integer Doubleword format with Saturate XX2-form (xscvdpuxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVDPUXWS, 0xfc0007fc, 0xf0000120, 0x1f0000, // VSX Scalar truncate Double-Precision to integer and Convert to Unsigned Integer Word format with Saturate XX2-form (xscvdpuxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVSPDP, 0xfc0007fc, 0xf0000524, 0x1f0000, // VSX Scalar Convert Single-Precision to Double-Precision format XX2-form (xscvspdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVSPDPN, 0xfc0007fc, 0xf000052c, 0x1f0000, // VSX Scalar Convert Single-Precision to Double-Precision format Non-signalling XX2-form (xscvspdpn XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVSXDDP, 0xfc0007fc, 0xf00005e0, 0x1f0000, // VSX Scalar Convert Signed Integer Doubleword to floating-point format and round to Double-Precision format XX2-form (xscvsxddp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVSXDSP, 0xfc0007fc, 0xf00004e0, 0x1f0000, // VSX Scalar Convert Signed Integer Doubleword to floating-point format and round to Single-Precision XX2-form (xscvsxdsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVUXDDP, 0xfc0007fc, 0xf00005a0, 0x1f0000, // VSX Scalar Convert Unsigned Integer Doubleword to floating-point format and round to Double-Precision format XX2-form (xscvuxddp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSCVUXDSP, 0xfc0007fc, 0xf00004a0, 0x1f0000, // VSX Scalar Convert Unsigned Integer Doubleword to floating-point format and round to Single-Precision XX2-form (xscvuxdsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSDIVDP, 0xfc0007f8, 0xf00001c0, 0x0, // VSX Scalar Divide Double-Precision XX3-form (xsdivdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSDIVSP, 0xfc0007f8, 0xf00000c0, 0x0, // VSX Scalar Divide Single-Precision XX3-form (xsdivsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMADDADP, 0xfc0007f8, 0xf0000108, 0x0, // VSX Scalar Multiply-Add Double-Precision XX3-form (xsmaddadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMADDASP, 0xfc0007f8, 0xf0000008, 0x0, // VSX Scalar Multiply-Add Single-Precision XX3-form (xsmaddasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMAXDP, 0xfc0007f8, 0xf0000500, 0x0, // VSX Scalar Maximum Double-Precision XX3-form (xsmaxdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMINDP, 0xfc0007f8, 0xf0000540, 0x0, // VSX Scalar Minimum Double-Precision XX3-form (xsmindp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMSUBADP, 0xfc0007f8, 0xf0000188, 0x0, // VSX Scalar Multiply-Subtract Double-Precision XX3-form (xsmsubadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMSUBASP, 0xfc0007f8, 0xf0000088, 0x0, // VSX Scalar Multiply-Subtract Single-Precision XX3-form (xsmsubasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMULDP, 0xfc0007f8, 0xf0000180, 0x0, // VSX Scalar Multiply Double-Precision XX3-form (xsmuldp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSMULSP, 0xfc0007f8, 0xf0000080, 0x0, // VSX Scalar Multiply Single-Precision XX3-form (xsmulsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSNABSDP, 0xfc0007fc, 0xf00005a4, 0x1f0000, // VSX Scalar Negative Absolute Value Double-Precision XX2-form (xsnabsdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSNEGDP, 0xfc0007fc, 0xf00005e4, 0x1f0000, // VSX Scalar Negate Double-Precision XX2-form (xsnegdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSNMADDADP, 0xfc0007f8, 0xf0000508, 0x0, // VSX Scalar Negative Multiply-Add Double-Precision XX3-form (xsnmaddadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSNMADDASP, 0xfc0007f8, 0xf0000408, 0x0, // VSX Scalar Negative Multiply-Add Single-Precision XX3-form (xsnmaddasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSNMSUBADP, 0xfc0007f8, 0xf0000588, 0x0, // VSX Scalar Negative Multiply-Subtract Double-Precision XX3-form (xsnmsubadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSNMSUBASP, 0xfc0007f8, 0xf0000488, 0x0, // VSX Scalar Negative Multiply-Subtract Single-Precision XX3-form (xsnmsubasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSRDPI, 0xfc0007fc, 0xf0000124, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round to Nearest Away XX2-form (xsrdpi XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRDPIC, 0xfc0007fc, 0xf00001ac, 0x1f0000, // VSX Scalar Round to Double-Precision Integer exact using Current rounding mode XX2-form (xsrdpic XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRDPIM, 0xfc0007fc, 0xf00001e4, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward -Infinity XX2-form (xsrdpim XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRDPIP, 0xfc0007fc, 0xf00001a4, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward +Infinity XX2-form (xsrdpip XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRDPIZ, 0xfc0007fc, 0xf0000164, 0x1f0000, // VSX Scalar Round to Double-Precision Integer using round toward Zero XX2-form (xsrdpiz XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSREDP, 0xfc0007fc, 0xf0000168, 0x1f0000, // VSX Scalar Reciprocal Estimate Double-Precision XX2-form (xsredp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRESP, 0xfc0007fc, 0xf0000068, 0x1f0000, // VSX Scalar Reciprocal Estimate Single-Precision XX2-form (xsresp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRSP, 0xfc0007fc, 0xf0000464, 0x1f0000, // VSX Scalar Round to Single-Precision XX2-form (xsrsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRSQRTEDP, 0xfc0007fc, 0xf0000128, 0x1f0000, // VSX Scalar Reciprocal Square Root Estimate Double-Precision XX2-form (xsrsqrtedp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSRSQRTESP, 0xfc0007fc, 0xf0000028, 0x1f0000, // VSX Scalar Reciprocal Square Root Estimate Single-Precision XX2-form (xsrsqrtesp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSSQRTDP, 0xfc0007fc, 0xf000012c, 0x1f0000, // VSX Scalar Square Root Double-Precision XX2-form (xssqrtdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSSQRTSP, 0xfc0007fc, 0xf000002c, 0x1f0000, // VSX Scalar Square Root Single-Precision XX-form (xssqrtsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XSSUBDP, 0xfc0007f8, 0xf0000140, 0x0, // VSX Scalar Subtract Double-Precision XX3-form (xssubdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSSUBSP, 0xfc0007f8, 0xf0000040, 0x0, // VSX Scalar Subtract Single-Precision XX3-form (xssubsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSTDIVDP, 0xfc0007f8, 0xf00001e8, 0x600001, // VSX Scalar Test for software Divide Double-Precision XX3-form (xstdivdp BF,XA,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XSTSQRTDP, 0xfc0007fc, 0xf00001a8, 0x7f0001, // VSX Scalar Test for software Square Root Double-Precision XX2-form (xstsqrtdp BF,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
        {XVABSDP, 0xfc0007fc, 0xf0000764, 0x1f0000, // VSX Vector Absolute Value Double-Precision XX2-form (xvabsdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVABSSP, 0xfc0007fc, 0xf0000664, 0x1f0000, // VSX Vector Absolute Value Single-Precision XX2-form (xvabssp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVADDDP, 0xfc0007f8, 0xf0000300, 0x0, // VSX Vector Add Double-Precision XX3-form (xvadddp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVADDSP, 0xfc0007f8, 0xf0000200, 0x0, // VSX Vector Add Single-Precision XX3-form (xvaddsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPEQDP, 0xfc0007f8, 0xf0000318, 0x0, // VSX Vector Compare Equal To Double-Precision [ & Record ] XX3-form (xvcmpeqdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPEQDP_, 0xfc0007f8, 0xf0000718, 0x0, // VSX Vector Compare Equal To Double-Precision [ & Record ] XX3-form (xvcmpeqdp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPEQSP, 0xfc0007f8, 0xf0000218, 0x0, // VSX Vector Compare Equal To Single-Precision [ & Record ] XX3-form (xvcmpeqsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPEQSP_, 0xfc0007f8, 0xf0000618, 0x0, // VSX Vector Compare Equal To Single-Precision [ & Record ] XX3-form (xvcmpeqsp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGEDP, 0xfc0007f8, 0xf0000398, 0x0, // VSX Vector Compare Greater Than or Equal To Double-Precision [ & Record ] XX3-form (xvcmpgedp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGEDP_, 0xfc0007f8, 0xf0000798, 0x0, // VSX Vector Compare Greater Than or Equal To Double-Precision [ & Record ] XX3-form (xvcmpgedp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGESP, 0xfc0007f8, 0xf0000298, 0x0, // VSX Vector Compare Greater Than or Equal To Single-Precision [ & record CR6 ] XX3-form (xvcmpgesp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGESP_, 0xfc0007f8, 0xf0000698, 0x0, // VSX Vector Compare Greater Than or Equal To Single-Precision [ & record CR6 ] XX3-form (xvcmpgesp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGTDP, 0xfc0007f8, 0xf0000358, 0x0, // VSX Vector Compare Greater Than Double-Precision [ & record CR6 ] XX3-form (xvcmpgtdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGTDP_, 0xfc0007f8, 0xf0000758, 0x0, // VSX Vector Compare Greater Than Double-Precision [ & record CR6 ] XX3-form (xvcmpgtdp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGTSP, 0xfc0007f8, 0xf0000258, 0x0, // VSX Vector Compare Greater Than Single-Precision [ & record CR6 ] XX3-form (xvcmpgtsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCMPGTSP_, 0xfc0007f8, 0xf0000658, 0x0, // VSX Vector Compare Greater Than Single-Precision [ & record CR6 ] XX3-form (xvcmpgtsp. XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCPSGNDP, 0xfc0007f8, 0xf0000780, 0x0, // VSX Vector Copy Sign Double-Precision XX3-form (xvcpsgndp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCPSGNSP, 0xfc0007f8, 0xf0000680, 0x0, // VSX Vector Copy Sign Single-Precision XX3-form (xvcpsgnsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVCVDPSP, 0xfc0007fc, 0xf0000624, 0x1f0000, // VSX Vector round Double-Precision to single-precision and Convert to Single-Precision format XX2-form (xvcvdpsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVDPSXDS, 0xfc0007fc, 0xf0000760, 0x1f0000, // VSX Vector truncate Double-Precision to integer and Convert to Signed Integer Doubleword format with Saturate XX2-form (xvcvdpsxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVDPSXWS, 0xfc0007fc, 0xf0000360, 0x1f0000, // VSX Vector truncate Double-Precision to integer and Convert to Signed Integer Word format with Saturate XX2-form (xvcvdpsxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVDPUXDS, 0xfc0007fc, 0xf0000720, 0x1f0000, // VSX Vector truncate Double-Precision to integer and Convert to Unsigned Integer Doubleword format with Saturate XX2-form (xvcvdpuxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVDPUXWS, 0xfc0007fc, 0xf0000320, 0x1f0000, // VSX Vector truncate Double-Precision to integer and Convert to Unsigned Integer Word format with Saturate XX2-form (xvcvdpuxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSPDP, 0xfc0007fc, 0xf0000724, 0x1f0000, // VSX Vector Convert Single-Precision to Double-Precision format XX2-form (xvcvspdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSPSXDS, 0xfc0007fc, 0xf0000660, 0x1f0000, // VSX Vector truncate Single-Precision to integer and Convert to Signed Integer Doubleword format with Saturate XX2-form (xvcvspsxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSPSXWS, 0xfc0007fc, 0xf0000260, 0x1f0000, // VSX Vector truncate Single-Precision to integer and Convert to Signed Integer Word format with Saturate XX2-form (xvcvspsxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSPUXDS, 0xfc0007fc, 0xf0000620, 0x1f0000, // VSX Vector truncate Single-Precision to integer and Convert to Unsigned Integer Doubleword format with Saturate XX2-form (xvcvspuxds XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSPUXWS, 0xfc0007fc, 0xf0000220, 0x1f0000, // VSX Vector truncate Single-Precision to integer and Convert to Unsigned Integer Word format with Saturate XX2-form (xvcvspuxws XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSXDDP, 0xfc0007fc, 0xf00007e0, 0x1f0000, // VSX Vector Convert and round Signed Integer Doubleword to Double-Precision format XX2-form (xvcvsxddp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSXDSP, 0xfc0007fc, 0xf00006e0, 0x1f0000, // VSX Vector Convert and round Signed Integer Doubleword to Single-Precision format XX2-form (xvcvsxdsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSXWDP, 0xfc0007fc, 0xf00003e0, 0x1f0000, // VSX Vector Convert Signed Integer Word to Double-Precision format XX2-form (xvcvsxwdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVSXWSP, 0xfc0007fc, 0xf00002e0, 0x1f0000, // VSX Vector Convert and round Signed Integer Word to Single-Precision format XX2-form (xvcvsxwsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVUXDDP, 0xfc0007fc, 0xf00007a0, 0x1f0000, // VSX Vector Convert and round Unsigned Integer Doubleword to Double-Precision format XX2-form (xvcvuxddp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVUXDSP, 0xfc0007fc, 0xf00006a0, 0x1f0000, // VSX Vector Convert and round Unsigned Integer Doubleword to Single-Precision format XX2-form (xvcvuxdsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVUXWDP, 0xfc0007fc, 0xf00003a0, 0x1f0000, // VSX Vector Convert and round Unsigned Integer Word to Double-Precision format XX2-form (xvcvuxwdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVCVUXWSP, 0xfc0007fc, 0xf00002a0, 0x1f0000, // VSX Vector Convert and round Unsigned Integer Word to Single-Precision format XX2-form (xvcvuxwsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVDIVDP, 0xfc0007f8, 0xf00003c0, 0x0, // VSX Vector Divide Double-Precision XX3-form (xvdivdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVDIVSP, 0xfc0007f8, 0xf00002c0, 0x0, // VSX Vector Divide Single-Precision XX3-form (xvdivsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMADDADP, 0xfc0007f8, 0xf0000308, 0x0, // VSX Vector Multiply-Add Double-Precision XX3-form (xvmaddadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMADDASP, 0xfc0007f8, 0xf0000208, 0x0, // VSX Vector Multiply-Add Single-Precision XX3-form (xvmaddasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMAXDP, 0xfc0007f8, 0xf0000700, 0x0, // VSX Vector Maximum Double-Precision XX3-form (xvmaxdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMAXSP, 0xfc0007f8, 0xf0000600, 0x0, // VSX Vector Maximum Single-Precision XX3-form (xvmaxsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMINDP, 0xfc0007f8, 0xf0000740, 0x0, // VSX Vector Minimum Double-Precision XX3-form (xvmindp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMINSP, 0xfc0007f8, 0xf0000640, 0x0, // VSX Vector Minimum Single-Precision XX3-form (xvminsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMSUBADP, 0xfc0007f8, 0xf0000388, 0x0, // VSX Vector Multiply-Subtract Double-Precision XX3-form (xvmsubadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMSUBASP, 0xfc0007f8, 0xf0000288, 0x0, // VSX Vector Multiply-Subtract Single-Precision XX3-form (xvmsubasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMULDP, 0xfc0007f8, 0xf0000380, 0x0, // VSX Vector Multiply Double-Precision XX3-form (xvmuldp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVMULSP, 0xfc0007f8, 0xf0000280, 0x0, // VSX Vector Multiply Single-Precision XX3-form (xvmulsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVNABSDP, 0xfc0007fc, 0xf00007a4, 0x1f0000, // VSX Vector Negative Absolute Value Double-Precision XX2-form (xvnabsdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVNABSSP, 0xfc0007fc, 0xf00006a4, 0x1f0000, // VSX Vector Negative Absolute Value Single-Precision XX2-form (xvnabssp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVNEGDP, 0xfc0007fc, 0xf00007e4, 0x1f0000, // VSX Vector Negate Double-Precision XX2-form (xvnegdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVNEGSP, 0xfc0007fc, 0xf00006e4, 0x1f0000, // VSX Vector Negate Single-Precision XX2-form (xvnegsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVNMADDADP, 0xfc0007f8, 0xf0000708, 0x0, // VSX Vector Negative Multiply-Add Double-Precision XX3-form (xvnmaddadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVNMADDASP, 0xfc0007f8, 0xf0000608, 0x0, // VSX Vector Negative Multiply-Add Single-Precision XX3-form (xvnmaddasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVNMSUBADP, 0xfc0007f8, 0xf0000788, 0x0, // VSX Vector Negative Multiply-Subtract Double-Precision XX3-form (xvnmsubadp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVNMSUBASP, 0xfc0007f8, 0xf0000688, 0x0, // VSX Vector Negative Multiply-Subtract Single-Precision XX3-form (xvnmsubasp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVRDPI, 0xfc0007fc, 0xf0000324, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round to Nearest Away XX2-form (xvrdpi XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRDPIC, 0xfc0007fc, 0xf00003ac, 0x1f0000, // VSX Vector Round to Double-Precision Integer Exact using Current rounding mode XX2-form (xvrdpic XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRDPIM, 0xfc0007fc, 0xf00003e4, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward -Infinity XX2-form (xvrdpim XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRDPIP, 0xfc0007fc, 0xf00003a4, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward +Infinity XX2-form (xvrdpip XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRDPIZ, 0xfc0007fc, 0xf0000364, 0x1f0000, // VSX Vector Round to Double-Precision Integer using round toward Zero XX2-form (xvrdpiz XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVREDP, 0xfc0007fc, 0xf0000368, 0x1f0000, // VSX Vector Reciprocal Estimate Double-Precision XX2-form (xvredp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRESP, 0xfc0007fc, 0xf0000268, 0x1f0000, // VSX Vector Reciprocal Estimate Single-Precision XX2-form (xvresp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSPI, 0xfc0007fc, 0xf0000224, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round to Nearest Away XX2-form (xvrspi XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSPIC, 0xfc0007fc, 0xf00002ac, 0x1f0000, // VSX Vector Round to Single-Precision Integer Exact using Current rounding mode XX2-form (xvrspic XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSPIM, 0xfc0007fc, 0xf00002e4, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward -Infinity XX2-form (xvrspim XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSPIP, 0xfc0007fc, 0xf00002a4, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward +Infinity XX2-form (xvrspip XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSPIZ, 0xfc0007fc, 0xf0000264, 0x1f0000, // VSX Vector Round to Single-Precision Integer using round toward Zero XX2-form (xvrspiz XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSQRTEDP, 0xfc0007fc, 0xf0000328, 0x1f0000, // VSX Vector Reciprocal Square Root Estimate Double-Precision XX2-form (xvrsqrtedp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVRSQRTESP, 0xfc0007fc, 0xf0000228, 0x1f0000, // VSX Vector Reciprocal Square Root Estimate Single-Precision XX2-form (xvrsqrtesp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVSQRTDP, 0xfc0007fc, 0xf000032c, 0x1f0000, // VSX Vector Square Root Double-Precision XX2-form (xvsqrtdp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVSQRTSP, 0xfc0007fc, 0xf000022c, 0x1f0000, // VSX Vector Square Root Single-Precision XX2-form (xvsqrtsp XT,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20}},
        {XVSUBDP, 0xfc0007f8, 0xf0000340, 0x0, // VSX Vector Subtract Double-Precision XX3-form (xvsubdp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVSUBSP, 0xfc0007f8, 0xf0000240, 0x0, // VSX Vector Subtract Single-Precision XX3-form (xvsubsp XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVTDIVDP, 0xfc0007f8, 0xf00003e8, 0x600001, // VSX Vector Test for software Divide Double-Precision XX3-form (xvtdivdp BF,XA,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVTDIVSP, 0xfc0007f8, 0xf00002e8, 0x600001, // VSX Vector Test for software Divide Single-Precision XX3-form (xvtdivsp BF,XA,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XVTSQRTDP, 0xfc0007fc, 0xf00003a8, 0x7f0001, // VSX Vector Test for software Square Root Double-Precision XX2-form (xvtsqrtdp BF,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
        {XVTSQRTSP, 0xfc0007fc, 0xf00002a8, 0x7f0001, // VSX Vector Test for software Square Root Single-Precision XX2-form (xvtsqrtsp BF,XB)
-               [5]*argField{ap_CondRegField_6_8, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_CondRegField_6_8, ap_VecSReg_30_30_16_20}},
        {XXLAND, 0xfc0007f8, 0xf0000410, 0x0, // VSX Logical AND XX3-form (xxland XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLANDC, 0xfc0007f8, 0xf0000450, 0x0, // VSX Logical AND with Complement XX3-form (xxlandc XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLEQV, 0xfc0007f8, 0xf00005d0, 0x0, // VSX Logical Equivalence XX3-form (xxleqv XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLNAND, 0xfc0007f8, 0xf0000590, 0x0, // VSX Logical NAND XX3-form (xxlnand XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLORC, 0xfc0007f8, 0xf0000550, 0x0, // VSX Logical OR with Complement XX3-form (xxlorc XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLNOR, 0xfc0007f8, 0xf0000510, 0x0, // VSX Logical NOR XX3-form (xxlnor XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLOR, 0xfc0007f8, 0xf0000490, 0x0, // VSX Logical OR XX3-form (xxlor XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXLXOR, 0xfc0007f8, 0xf00004d0, 0x0, // VSX Logical XOR XX3-form (xxlxor XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXMRGHW, 0xfc0007f8, 0xf0000090, 0x0, // VSX Merge High Word XX3-form (xxmrghw XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXMRGLW, 0xfc0007f8, 0xf0000190, 0x0, // VSX Merge Low Word XX3-form (xxmrglw XT,XA,XB)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20}},
        {XXPERMDI, 0xfc0004f8, 0xf0000050, 0x0, // VSX Permute Doubleword Immediate XX3-form (xxpermdi XT,XA,XB,DM)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20, ap_ImmUnsigned_22_23}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
        {XXSEL, 0xfc000030, 0xf0000030, 0x0, // VSX Select XX4-form (xxsel XT,XA,XB,XC)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20, ap_VecReg_28_28_21_25}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_VecSReg_28_28_21_25}},
        {XXSLDWI, 0xfc0004f8, 0xf0000010, 0x0, // VSX Shift Left Double by Word Immediate XX3-form (xxsldwi XT,XA,XB,SHW)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_29_29_11_15, ap_VecReg_30_30_16_20, ap_ImmUnsigned_22_23}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_29_29_11_15, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_22_23}},
        {XXSPLTW, 0xfc0007fc, 0xf0000290, 0x1c0000, // VSX Splat Word XX2-form (xxspltw XT,XB,UIM)
-               [5]*argField{ap_VecReg_31_31_6_10, ap_VecReg_30_30_16_20, ap_ImmUnsigned_14_15}},
+               [5]*argField{ap_VecSReg_31_31_6_10, ap_VecSReg_30_30_16_20, ap_ImmUnsigned_14_15}},
        {BRINC, 0xfc0007ff, 0x1000020f, 0x0, // Bit Reversed Increment EVX-form (brinc RT,RA,RB)
                [5]*argField{ap_Reg_6_10, ap_Reg_11_15, ap_Reg_16_20}},
        {EVABS, 0xfc0007ff, 0x10000208, 0xf800, // Vector Absolute Value EVX-form (evabs RT,RA)
index 956d5cdb737d8dbcb36c864d48fc878596bde5d5..b4e5db2cb2c3f3121387876bfacea630c152c94f 100644 (file)
@@ -13,7 +13,7 @@ c61bb730|     gnu     lfsu f16,-18640(r27)
 a9a912c1|      gnu     lha r13,4801(r9)
 ebb24fd1|      gnu     ldu r29,20432(r18)
 b1ce0612|      gnu     sth r14,1554(r14)
-f3d74322|      gnu     xvcvdpuxws v30,v40
+f3c04322|      gnu     xvcvdpuxws vs30,vs40
 945c62a2|      gnu     stwu r2,25250(r28)
 9c8156e3|      gnu     stbu r4,22243(r1)
 f91b9c7a|      gnu     stq r8,-25480(r27)