imports: []string{"math/bits"},
tests: linuxMIPSTests,
},
+ {
+ arch: "mips64",
+ os: "linux",
+ tests: linuxMIPS64Tests,
+ },
{
arch: "ppc64le",
os: "linux",
`,
pos: []string{"TEXT\t.*, [$]-8-8"},
},
+ {
+ // check that we don't emit comparisons for constant shift
+ fn: `
+//go:nosplit
+ func $(x int) int {
+ return x << 17
+ }
+ `,
+ pos: []string{"LSL\t\\$17"},
+ neg: []string{"CMP"},
+ },
}
var linuxMIPSTests = []*asmTest{
},
}
+var linuxMIPS64Tests = []*asmTest{
+ {
+ // check that we don't emit comparisons for constant shift
+ fn: `
+ func $(x int) int {
+ return x << 17
+ }
+ `,
+ pos: []string{"SLLV\t\\$17"},
+ neg: []string{"SGT"},
+ },
+}
+
var linuxPPC64LETests = []*asmTest{
// Fused multiply-add/sub instructions.
{
(NeqB x y) -> (XOR x y)
(Not x) -> (XOR (MOVDconst [1]) x)
-// constant shifts
-(Lsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SLLconst x [c])
-(Rsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRAconst x [c])
-(Rsh64Ux64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRLconst x [c])
-(Lsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SLLconst x [c])
-(Rsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRAconst (SignExt32to64 x) [c])
-(Rsh32Ux64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRLconst (ZeroExt32to64 x) [c])
-(Lsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SLLconst x [c])
-(Rsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRAconst (SignExt16to64 x) [c])
-(Rsh16Ux64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRLconst (ZeroExt16to64 x) [c])
-(Lsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SLLconst x [c])
-(Rsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRAconst (SignExt8to64 x) [c])
-(Rsh8Ux64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRLconst (ZeroExt8to64 x) [c])
-
-// large constant shifts
-(Lsh64x64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
-(Rsh64Ux64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
-(Lsh32x64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
-(Rsh32Ux64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
-(Lsh16x64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
-(Rsh16Ux64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
-(Lsh8x64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
-(Rsh8Ux64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
-
-// large constant signed right shift, we leave the sign bit
-(Rsh64x64 x (MOVDconst [c])) && uint64(c) >= 64 -> (SRAconst x [63])
-(Rsh32x64 x (MOVDconst [c])) && uint64(c) >= 32 -> (SRAconst (SignExt32to64 x) [63])
-(Rsh16x64 x (MOVDconst [c])) && uint64(c) >= 16 -> (SRAconst (SignExt16to64 x) [63])
-(Rsh8x64 x (MOVDconst [c])) && uint64(c) >= 8 -> (SRAconst (SignExt8to64 x) [63])
-
// shifts
// hardware instruction uses only the low 6 bits of the shift
// we compare to 64 to ensure Go semantics for large shifts
-(Lsh64x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
-(Lsh64x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Lsh64x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Lsh64x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Lsh32x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
-(Lsh32x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Lsh32x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Lsh32x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Lsh16x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
-(Lsh16x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Lsh16x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Lsh16x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Lsh8x64 <t> x y) -> (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
-(Lsh8x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Lsh8x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Lsh8x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Rsh64Ux64 <t> x y) -> (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
-(Rsh64Ux32 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Rsh64Ux16 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Rsh64Ux8 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Rsh32Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
-(Rsh32Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Rsh32Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Rsh32Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Rsh16Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
-(Rsh16Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Rsh16Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Rsh16Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Rsh8Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
-(Rsh8Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
-(Rsh8Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
-(Rsh8Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
-
-(Rsh64x64 x y) -> (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
-(Rsh64x32 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
-(Rsh64x16 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
-(Rsh64x8 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
-
-(Rsh32x64 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
-(Rsh32x32 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
-(Rsh32x16 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
-(Rsh32x8 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
-
-(Rsh16x64 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
-(Rsh16x32 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
-(Rsh16x16 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
-(Rsh16x8 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
-
-(Rsh8x64 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
-(Rsh8x32 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
-(Rsh8x16 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
-(Rsh8x8 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+(Lsh64x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Lsh64x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Lsh64x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Lsh64x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Lsh32x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Lsh32x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Lsh32x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Lsh32x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Lsh16x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Lsh16x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Lsh16x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Lsh16x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Lsh8x64 <t> x y) -> (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Lsh8x32 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Lsh8x16 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Lsh8x8 <t> x y) -> (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Rsh64Ux64 <t> x y) -> (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Rsh64Ux32 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Rsh64Ux16 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Rsh64Ux8 <t> x y) -> (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Rsh32Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Rsh32Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Rsh32Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Rsh32Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Rsh16Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Rsh16Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Rsh16Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Rsh16Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Rsh8Ux64 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
+(Rsh8Ux32 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+(Rsh8Ux16 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+(Rsh8Ux8 <t> x y) -> (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+
+(Rsh64x64 x y) -> (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
+(Rsh64x32 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+(Rsh64x16 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+(Rsh64x8 x y) -> (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+
+(Rsh32x64 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
+(Rsh32x32 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+(Rsh32x16 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+(Rsh32x8 x y) -> (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+
+(Rsh16x64 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
+(Rsh16x32 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+(Rsh16x16 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+(Rsh16x8 x y) -> (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+
+(Rsh8x64 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
+(Rsh8x32 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+(Rsh8x16 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+(Rsh8x8 x y) -> (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
// constants
(Const64 [val]) -> (MOVDconst [val])
// shifts
// hardware instruction uses only the low 6 bits of the shift
// we compare to 64 to ensure Go semantics for large shifts
-(Lsh64x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
-(Lsh64x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
-(Lsh64x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
-(Lsh64x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
-
-(Lsh32x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
-(Lsh32x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
-(Lsh32x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
-(Lsh32x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
-
-(Lsh16x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
-(Lsh16x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
-(Lsh16x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
-(Lsh16x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
-
-(Lsh8x64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
-(Lsh8x32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
-(Lsh8x16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
-(Lsh8x8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
-
-(Rsh64Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> x y))
-(Rsh64Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
-(Rsh64Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
-(Rsh64Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
-
-(Rsh32Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
-(Rsh32Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
-(Rsh32Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
-(Rsh32Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
-
-(Rsh16Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
-(Rsh16Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
-(Rsh16Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
-(Rsh16Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
-
-(Rsh8Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
-(Rsh8Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
-(Rsh8Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
-(Rsh8Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
-
-(Rsh64x64 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
-(Rsh64x32 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
-(Rsh64x16 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
-(Rsh64x8 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
-
-(Rsh32x64 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
-(Rsh32x32 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
-(Rsh32x16 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
-(Rsh32x8 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
-
-(Rsh16x64 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
-(Rsh16x32 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
-(Rsh16x16 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
-(Rsh16x8 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
-
-(Rsh8x64 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
-(Rsh8x32 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
-(Rsh8x16 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
-(Rsh8x8 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+(Lsh64x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
+(Lsh64x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+(Lsh64x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+(Lsh64x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+
+(Lsh32x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
+(Lsh32x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+(Lsh32x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+(Lsh32x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+
+(Lsh16x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
+(Lsh16x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+(Lsh16x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+(Lsh16x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+
+(Lsh8x64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
+(Lsh8x32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+(Lsh8x16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+(Lsh8x8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+
+(Rsh64Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
+(Rsh64Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
+(Rsh64Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
+(Rsh64Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
+
+(Rsh32Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
+(Rsh32Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
+(Rsh32Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
+(Rsh32Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
+
+(Rsh16Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
+(Rsh16Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
+(Rsh16Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
+(Rsh16Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
+
+(Rsh8Ux64 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
+(Rsh8Ux32 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
+(Rsh8Ux16 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
+(Rsh8Ux8 <t> x y) -> (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
+
+(Rsh64x64 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
+(Rsh64x32 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+(Rsh64x16 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+(Rsh64x8 <t> x y) -> (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+
+(Rsh32x64 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
+(Rsh32x32 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+(Rsh32x16 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+(Rsh32x8 <t> x y) -> (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+
+(Rsh16x64 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
+(Rsh16x32 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+(Rsh16x16 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+(Rsh16x8 <t> x y) -> (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+
+(Rsh8x64 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
+(Rsh8x32 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+(Rsh8x16 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+(Rsh8x8 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// unary ops
(Neg64 x) -> (NEGV x)
_ = typ
// match: (Lsh16x16 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh16x32 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh16x64 x (MOVDconst [c]))
- // cond: uint64(c) < 16
- // result: (SLLconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpARM64SLLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh16x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 16
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 16) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Lsh16x64 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh16x8 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh32x16 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh32x32 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh32x64 x (MOVDconst [c]))
- // cond: uint64(c) < 32
- // result: (SLLconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpARM64SLLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh32x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 32
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 32) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Lsh32x64 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh32x8 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh64x16 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh64x32 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh64x64 x (MOVDconst [c]))
- // cond: uint64(c) < 64
- // result: (SLLconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpARM64SLLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh64x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 64
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 64) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Lsh64x64 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh64x8 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh8x16 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh8x32 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Lsh8x64 x (MOVDconst [c]))
- // cond: uint64(c) < 8
- // result: (SLLconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpARM64SLLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh8x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 8
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 8) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Lsh8x64 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SLL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh8x8 <t> x y)
// cond:
- // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SLL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16Ux64 x (MOVDconst [c]))
- // cond: uint64(c) < 16
- // result: (SRLconst (ZeroExt16to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpARM64SRLconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh16Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 16
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 16) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Rsh16Ux64 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SRL <t> (ZeroExt16to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16x16 x y)
// cond:
- // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+ // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16x32 x y)
// cond:
- // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+ // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh16x64 x (MOVDconst [c]))
- // cond: uint64(c) < 16
- // result: (SRAconst (SignExt16to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh16x64 x (MOVDconst [c]))
- // cond: uint64(c) >= 16
- // result: (SRAconst (SignExt16to64 x) [63])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 16) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = 63
- v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh16x64 x y)
// cond:
- // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
+ // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh16x8 x y)
// cond:
- // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+ // result: (SRA (SignExt16to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh32Ux64 x (MOVDconst [c]))
- // cond: uint64(c) < 32
- // result: (SRLconst (ZeroExt32to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpARM64SRLconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh32Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 32
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 32) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Rsh32Ux64 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SRL <t> (ZeroExt32to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32x16 x y)
// cond:
- // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+ // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32x32 x y)
// cond:
- // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+ // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh32x64 x (MOVDconst [c]))
- // cond: uint64(c) < 32
- // result: (SRAconst (SignExt32to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh32x64 x (MOVDconst [c]))
- // cond: uint64(c) >= 32
- // result: (SRAconst (SignExt32to64 x) [63])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 32) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = 63
- v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh32x64 x y)
// cond:
- // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
+ // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh32x8 x y)
// cond:
- // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+ // result: (SRA (SignExt32to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SRL <t> x (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SRL <t> x (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh64Ux64 x (MOVDconst [c]))
- // cond: uint64(c) < 64
- // result: (SRLconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpARM64SRLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh64Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 64
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 64) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Rsh64Ux64 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SRL <t> x y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(x)
v0.AddArg(y)
v.AddArg(v0)
- v1 := b.NewValue0(v.Pos, OpConst64, t)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v1.AuxInt = 0
v.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SRL <t> x (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v1.AddArg(y)
v0.AddArg(v1)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64x16 x y)
// cond:
- // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+ // result: (SRA x (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64x32 x y)
// cond:
- // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+ // result: (SRA x (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
b := v.Block
_ = b
- // match: (Rsh64x64 x (MOVDconst [c]))
- // cond: uint64(c) < 64
- // result: (SRAconst x [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh64x64 x (MOVDconst [c]))
- // cond: uint64(c) >= 64
- // result: (SRAconst x [63])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 64) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = 63
- v.AddArg(x)
- return true
- }
// match: (Rsh64x64 x y)
// cond:
- // result: (SRA x (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
+ // result: (SRA x (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(x)
v0 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v0.AddArg(y)
- v1 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v1.AuxInt = 63
v0.AddArg(v1)
v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh64x8 x y)
// cond:
- // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+ // result: (SRA x (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v1.AddArg(y)
v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v0.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8Ux64 x (MOVDconst [c]))
- // cond: uint64(c) < 8
- // result: (SRLconst (ZeroExt8to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpARM64SRLconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh8Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 8
- // result: (MOVDconst [0])
- for {
- _ = v.Args[1]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 8) {
- break
- }
- v.reset(OpARM64MOVDconst)
- v.AuxInt = 0
- return true
- }
// match: (Rsh8Ux64 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
+ // result: (CSELULT (SRL <t> (ZeroExt8to64 x) y) (MOVDconst <t> [0]) (CMPconst [64] y))
for {
t := v.Type
_ = v.Args[1]
v0.AddArg(v1)
v0.AddArg(y)
v.AddArg(v0)
- v2 := b.NewValue0(v.Pos, OpConst64, t)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v2.AuxInt = 0
v.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
- // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
+ // result: (CSELULT (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVDconst <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v2.AddArg(y)
v0.AddArg(v2)
v.AddArg(v0)
- v3 := b.NewValue0(v.Pos, OpConst64, t)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, t)
v3.AuxInt = 0
v.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8x16 x y)
// cond:
- // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
+ // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt16to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8x32 x y)
// cond:
- // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
+ // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt32to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = b
typ := &b.Func.Config.Types
_ = typ
- // match: (Rsh8x64 x (MOVDconst [c]))
- // cond: uint64(c) < 8
- // result: (SRAconst (SignExt8to64 x) [c])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh8x64 x (MOVDconst [c]))
- // cond: uint64(c) >= 8
- // result: (SRAconst (SignExt8to64 x) [63])
- for {
- _ = v.Args[1]
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpARM64MOVDconst {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) >= 8) {
- break
- }
- v.reset(OpARM64SRAconst)
- v.AuxInt = 63
- v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh8x64 x y)
// cond:
- // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
+ // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> y (MOVDconst <y.Type> [63]) (CMPconst [64] y)))
for {
_ = v.Args[1]
x := v.Args[0]
v.AddArg(v0)
v1 := b.NewValue0(v.Pos, OpARM64CSELULT, y.Type)
v1.AddArg(y)
- v2 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v2.AuxInt = 63
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Rsh8x8 x y)
// cond:
- // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
+ // result: (SRA (SignExt8to64 x) (CSELULT <y.Type> (ZeroExt8to64 y) (MOVDconst <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
for {
_ = v.Args[1]
x := v.Args[0]
v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v2.AddArg(y)
v1.AddArg(v2)
- v3 := b.NewValue0(v.Pos, OpConst64, y.Type)
+ v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, y.Type)
v3.AuxInt = 63
v1.AddArg(v3)
v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
_ = typ
// match: (Lsh16x16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Lsh16x32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Lsh16x64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Lsh16x8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Lsh32x16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Lsh32x32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Lsh32x64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Lsh32x8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Lsh64x16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Lsh64x32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Lsh64x64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Lsh64x8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Lsh8x16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Lsh8x32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Lsh8x64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Lsh8x8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Rsh16Ux64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Rsh16x16 <t> x y)
// cond:
- // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+ // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh16x32 <t> x y)
// cond:
- // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+ // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh16x64 <t> x y)
// cond:
- // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
+ // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
_ = typ
// match: (Rsh16x8 <t> x y)
// cond:
- // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+ // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Rsh32Ux64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Rsh32x16 <t> x y)
// cond:
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+ // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh32x32 <t> x y)
// cond:
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+ // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh32x64 <t> x y)
// cond:
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
+ // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
_ = typ
// match: (Rsh32x8 <t> x y)
// cond:
- // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+ // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Rsh64Ux64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> x y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Rsh64x16 <t> x y)
// cond:
- // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+ // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
_ = typ
// match: (Rsh64x32 <t> x y)
// cond:
- // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+ // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
_ = typ
// match: (Rsh64x64 <t> x y)
// cond:
- // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
+ // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v2.AddArg(y)
- v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v3.AuxInt = 63
v2.AddArg(v3)
v1.AddArg(v2)
_ = typ
// match: (Rsh64x8 <t> x y)
// cond:
- // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+ // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v3.AddArg(y)
v2.AddArg(v3)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v2.AddArg(v4)
v1.AddArg(v2)
_ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
_ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
_ = typ
// match: (Rsh8Ux64 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v1.AddArg(y)
_ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
- // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
+ // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v.reset(OpMIPS64AND)
v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
- v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v2.AuxInt = 64
v1.AddArg(v2)
v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
_ = typ
// match: (Rsh8x16 <t> x y)
// cond:
- // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y)))
+ // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh8x32 <t> x y)
// cond:
- // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y)))
+ // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)
_ = typ
// match: (Rsh8x64 <t> x y)
// cond:
- // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y))
+ // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
for {
t := v.Type
_ = v.Args[1]
v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
v3.AddArg(y)
- v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v4.AuxInt = 63
v3.AddArg(v4)
v2.AddArg(v3)
_ = typ
// match: (Rsh8x8 <t> x y)
// cond:
- // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y)))
+ // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
for {
t := v.Type
_ = v.Args[1]
v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
v4.AddArg(y)
v3.AddArg(v4)
- v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
+ v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
v5.AuxInt = 63
v3.AddArg(v5)
v2.AddArg(v3)