v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh16x32 x (MOVDconst [c]))
- // cond: uint32(c) < 16
- // result: (SLWconst x [c&15])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = int64ToAuxInt(c & 15)
- v.AddArg(x)
- return true
- }
// match: (Lsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh16x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 16
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 16) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Lsh16x64 x (MOVDconst [c]))
// cond: uint64(c) < 16
// result: (SLWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh32x32 x (MOVDconst [c]))
- // cond: uint32(c) < 32
- // result: (SLWconst x [c&31])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = int64ToAuxInt(c & 31)
- v.AddArg(x)
- return true
- }
// match: (Lsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh32x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 32
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 32) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Lsh32x64 x (MOVDconst [c]))
// cond: uint64(c) < 32
// result: (SLWconst x [c])
v.AddArg2(x, y)
return true
}
- // match: (Lsh32x64 x (AND y (MOVDconst [31])))
- // result: (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
- for {
- x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SLW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Lsh32x64 x (Select0 <typ.Int32> (ANDCCconst [31] y)))
- // result: (SLW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 || v_1.Type != typ.Int32 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || auxIntToInt64(v_1_0.AuxInt) != 31 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SLW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
// match: (Lsh32x64 x y)
// result: (SLW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh64x32 x (MOVDconst [c]))
- // cond: uint32(c) < 64
- // result: (SLDconst x [c&63])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SLDconst)
- v.AuxInt = int64ToAuxInt(c & 63)
- v.AddArg(x)
- return true
- }
// match: (Lsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh64x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 64
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 64) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Lsh64x64 x (MOVDconst [c]))
// cond: uint64(c) < 64
// result: (SLDconst x [c])
v.AddArg2(x, y)
return true
}
- // match: (Lsh64x64 x (AND y (MOVDconst [63])))
- // result: (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SLD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Lsh64x64 x (Select0 (ANDCCconst <typ.Int64> [63] y)))
- // result: (SLD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.Int64 || auxIntToInt64(v_1_0.AuxInt) != 63 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SLD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
// match: (Lsh64x64 x y)
// result: (SLD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh8x32 x (MOVDconst [c]))
- // cond: uint32(c) < 8
- // result: (SLWconst x [c&7])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = int64ToAuxInt(c & 7)
- v.AddArg(x)
- return true
- }
// match: (Lsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh8x64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 8
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 8) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Lsh8x64 x (MOVDconst [c]))
// cond: uint64(c) < 8
// result: (SLWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16Ux32 x (MOVDconst [c]))
- // cond: uint32(c) < 16
- // result: (SRWconst (ZeroExt16to32 x) [c&15])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = int64ToAuxInt(c & 15)
- v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh16Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 16
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 16) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Rsh16Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 16
// result: (SRWconst (ZeroExt16to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16x32 x (MOVDconst [c]))
- // cond: uint32(c) < 16
- // result: (SRAWconst (SignExt16to32 x) [c&15])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = int64ToAuxInt(c & 15)
- v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32Ux32 x (MOVDconst [c]))
- // cond: uint32(c) < 32
- // result: (SRWconst x [c&31])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = int64ToAuxInt(c & 31)
- v.AddArg(x)
- return true
- }
// match: (Rsh32Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 32
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 32) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Rsh32Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 32
// result: (SRWconst x [c])
v.AddArg2(x, y)
return true
}
- // match: (Rsh32Ux64 x (AND y (MOVDconst [31])))
- // result: (SRW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
+ // match: (Rsh32Ux64 x y)
+ // result: (SRW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
+ for {
+ x := v_0
+ y := v_1
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Pos, OpPPC64ISEL, typ.Int32)
+ v0.AuxInt = int32ToAuxInt(0)
+ v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v1.AuxInt = int64ToAuxInt(-1)
+ v2 := b.NewValue0(v.Pos, OpPPC64CMPU, types.TypeFlags)
+ v3 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
+ v3.AuxInt = int64ToAuxInt(32)
+ v2.AddArg2(y, v3)
+ v0.AddArg3(y, v1, v2)
+ v.AddArg2(x, v0)
+ return true
+ }
+}
+func rewriteValuePPC64_OpRsh32Ux8(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (Rsh32Ux8 x y)
+ // cond: shiftIsBounded(v)
+ // result: (SRW x y)
for {
x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh32Ux64 x (Select0 (ANDCCconst <typ.UInt> [31] y)))
- // result: (SRW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y))))
- // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 32 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpSelect0 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 {
- break
- }
- y := v_1_1_0.Args[0]
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(31)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y))))
- // result: (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpSelect0 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 31 {
- break
- }
- y := v_1_0_0.Args[0]
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32Ux64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 32 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64AND || v_1_1.Type != typ.UInt {
- break
- }
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- v_1_1_1 := v_1_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_1_0, v_1_1_1 = _i0+1, v_1_1_1, v_1_1_0 {
- y := v_1_1_0
- if v_1_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(31)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh32Ux64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64AND || v_1_0.Type != typ.UInt {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- v_1_0_1 := v_1_0.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
- y := v_1_0_0
- if v_1_0_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh32Ux64 x y)
- // result: (SRW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
- for {
- x := v_0
- y := v_1
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Pos, OpPPC64ISEL, typ.Int32)
- v0.AuxInt = int32ToAuxInt(0)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(-1)
- v2 := b.NewValue0(v.Pos, OpPPC64CMPU, types.TypeFlags)
- v3 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v3.AuxInt = int64ToAuxInt(32)
- v2.AddArg2(y, v3)
- v0.AddArg3(y, v1, v2)
- v.AddArg2(x, v0)
- return true
- }
-}
-func rewriteValuePPC64_OpRsh32Ux8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (Rsh32Ux8 x y)
- // cond: shiftIsBounded(v)
- // result: (SRW x y)
- for {
- x := v_0
- y := v_1
- if !(shiftIsBounded(v)) {
+ y := v_1
+ if !(shiftIsBounded(v)) {
break
}
v.reset(OpPPC64SRW)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32x32 x (MOVDconst [c]))
- // cond: uint32(c) < 32
- // result: (SRAWconst x [c&31])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = int64ToAuxInt(c & 31)
- v.AddArg(x)
- return true
- }
// match: (Rsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
v.AddArg2(x, y)
return true
}
- // match: (Rsh32x64 x (AND y (MOVDconst [31])))
- // result: (SRAW x (Select0 <typ.Int32> (ANDCCconst [31] y)))
- for {
- x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int32)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh32x64 x (Select0 (ANDCCconst <typ.UInt> [31] y)))
- // result: (SRAW x (Select0 <typ.UInt> (ANDCCconst [31] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 31 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(31)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (Select0 (ANDCCconst <typ.UInt> [31] y))))
- // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 32 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpSelect0 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 31 {
- break
- }
- y := v_1_1_0.Args[0]
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(31)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32x64 x (SUBFCconst <typ.UInt> [32] (Select0 (ANDCCconst <typ.UInt> [31] y))))
- // result: (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpSelect0 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 31 {
- break
- }
- y := v_1_0_0.Args[0]
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh32x64 x (SUB <typ.UInt> (MOVDconst [32]) (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRAW x (SUB <typ.UInt> (MOVDconst [32]) (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 32 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64AND || v_1_1.Type != typ.UInt {
- break
- }
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- v_1_1_1 := v_1_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_1_0, v_1_1_1 = _i0+1, v_1_1_1, v_1_1_0 {
- y := v_1_1_0
- if v_1_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(32)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(31)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh32x64 x (SUBFCconst <typ.UInt> [32] (AND <typ.UInt> y (MOVDconst [31]))))
- // result: (SRAW x (SUBFCconst <typ.UInt> [32] (Select0 <typ.UInt> (ANDCCconst [31] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 32 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64AND || v_1_0.Type != typ.UInt {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- v_1_0_1 := v_1_0.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
- y := v_1_0_0
- if v_1_0_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0_1.AuxInt) != 31 {
- continue
- }
- v.reset(OpPPC64SRAW)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(32)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(31)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
// match: (Rsh32x64 x y)
// result: (SRAW x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [32]))))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64Ux32 x (MOVDconst [c]))
- // cond: uint32(c) < 64
- // result: (SRDconst x [c&63])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SRDconst)
- v.AuxInt = int64ToAuxInt(c & 63)
- v.AddArg(x)
- return true
- }
// match: (Rsh64Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 64
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 64) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Rsh64Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 64
// result: (SRDconst x [c])
v.AddArg2(x, y)
return true
}
- // match: (Rsh64Ux64 x (AND y (MOVDconst [63])))
- // result: (SRD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh64Ux64 x (Select0 (ANDCCconst <typ.UInt> [63] y)))
- // result: (SRD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y))))
- // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 64 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpSelect0 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 {
- break
- }
- y := v_1_1_0.Args[0]
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(63)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y))))
- // result: (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpSelect0 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 63 {
- break
- }
- y := v_1_0_0.Args[0]
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64Ux64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 64 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64AND || v_1_1.Type != typ.UInt {
- break
- }
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- v_1_1_1 := v_1_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_1_0, v_1_1_1 = _i0+1, v_1_1_1, v_1_1_0 {
- y := v_1_1_0
- if v_1_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(63)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh64Ux64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64AND || v_1_0.Type != typ.UInt {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- v_1_0_1 := v_1_0.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
- y := v_1_0_0
- if v_1_0_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
// match: (Rsh64Ux64 x y)
// result: (SRD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64x32 x (MOVDconst [c]))
- // cond: uint32(c) < 64
- // result: (SRADconst x [c&63])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SRADconst)
- v.AuxInt = int64ToAuxInt(c & 63)
- v.AddArg(x)
- return true
- }
// match: (Rsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
v.AddArg2(x, y)
return true
}
- // match: (Rsh64x64 x (AND y (MOVDconst [63])))
- // result: (SRAD x (Select0 <typ.Int64> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpPPC64AND {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- v_1_1 := v_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0, v_1_1 = _i0+1, v_1_1, v_1_0 {
- y := v_1_0
- if v_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.Int64)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh64x64 x (Select0 (ANDCCconst <typ.UInt> [63] y)))
- // result: (SRAD x (Select0 <typ.UInt> (ANDCCconst [63] y)))
- for {
- x := v_0
- if v_1.Op != OpSelect0 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64ANDCCconst || v_1_0.Type != typ.UInt || auxIntToInt64(v_1_0.AuxInt) != 63 {
- break
- }
- y := v_1_0.Args[0]
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v1.AuxInt = int64ToAuxInt(63)
- v1.AddArg(y)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (Select0 (ANDCCconst <typ.UInt> [63] y))))
- // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 64 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpSelect0 {
- break
- }
- v_1_1_0 := v_1_1.Args[0]
- if v_1_1_0.Op != OpPPC64ANDCCconst || v_1_1_0.Type != typ.UInt || auxIntToInt64(v_1_1_0.AuxInt) != 63 {
- break
- }
- y := v_1_1_0.Args[0]
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(63)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64x64 x (SUBFCconst <typ.UInt> [64] (Select0 (ANDCCconst <typ.UInt> [63] y))))
- // result: (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpSelect0 {
- break
- }
- v_1_0_0 := v_1_0.Args[0]
- if v_1_0_0.Op != OpPPC64ANDCCconst || v_1_0_0.Type != typ.UInt || auxIntToInt64(v_1_0_0.AuxInt) != 63 {
- break
- }
- y := v_1_0_0.Args[0]
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- // match: (Rsh64x64 x (SUB <typ.UInt> (MOVDconst [64]) (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRAD x (SUB <typ.UInt> (MOVDconst [64]) (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUB || v_1.Type != typ.UInt {
- break
- }
- _ = v_1.Args[1]
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0.AuxInt) != 64 {
- break
- }
- v_1_1 := v_1.Args[1]
- if v_1_1.Op != OpPPC64AND || v_1_1.Type != typ.UInt {
- break
- }
- _ = v_1_1.Args[1]
- v_1_1_0 := v_1_1.Args[0]
- v_1_1_1 := v_1_1.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_1_0, v_1_1_1 = _i0+1, v_1_1_1, v_1_1_0 {
- y := v_1_1_0
- if v_1_1_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_1_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUB, typ.UInt)
- v1 := b.NewValue0(v.Pos, OpPPC64MOVDconst, typ.Int64)
- v1.AuxInt = int64ToAuxInt(64)
- v2 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v3 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v3.AuxInt = int64ToAuxInt(63)
- v3.AddArg(y)
- v2.AddArg(v3)
- v0.AddArg2(v1, v2)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
- // match: (Rsh64x64 x (SUBFCconst <typ.UInt> [64] (AND <typ.UInt> y (MOVDconst [63]))))
- // result: (SRAD x (SUBFCconst <typ.UInt> [64] (Select0 <typ.UInt> (ANDCCconst [63] y))))
- for {
- x := v_0
- if v_1.Op != OpPPC64SUBFCconst || v_1.Type != typ.UInt || auxIntToInt64(v_1.AuxInt) != 64 {
- break
- }
- v_1_0 := v_1.Args[0]
- if v_1_0.Op != OpPPC64AND || v_1_0.Type != typ.UInt {
- break
- }
- _ = v_1_0.Args[1]
- v_1_0_0 := v_1_0.Args[0]
- v_1_0_1 := v_1_0.Args[1]
- for _i0 := 0; _i0 <= 1; _i0, v_1_0_0, v_1_0_1 = _i0+1, v_1_0_1, v_1_0_0 {
- y := v_1_0_0
- if v_1_0_1.Op != OpPPC64MOVDconst || auxIntToInt64(v_1_0_1.AuxInt) != 63 {
- continue
- }
- v.reset(OpPPC64SRAD)
- v0 := b.NewValue0(v.Pos, OpPPC64SUBFCconst, typ.UInt)
- v0.AuxInt = int64ToAuxInt(64)
- v1 := b.NewValue0(v.Pos, OpSelect0, typ.UInt)
- v2 := b.NewValue0(v.Pos, OpPPC64ANDCCconst, types.NewTuple(typ.Int, types.TypeFlags))
- v2.AuxInt = int64ToAuxInt(63)
- v2.AddArg(y)
- v1.AddArg(v2)
- v0.AddArg(v1)
- v.AddArg2(x, v0)
- return true
- }
- break
- }
// match: (Rsh64x64 x y)
// result: (SRAD x (ISEL [0] y (MOVDconst [-1]) (CMPU y (MOVDconst [64]))))
for {
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8Ux32 x (MOVDconst [c]))
- // cond: uint32(c) < 8
- // result: (SRWconst (ZeroExt8to32 x) [c&7])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = int64ToAuxInt(c & 7)
- v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh8Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8Ux64 _ (MOVDconst [c]))
- // cond: uint64(c) >= 8
- // result: (MOVDconst [0])
- for {
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint64(c) >= 8) {
- break
- }
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64ToAuxInt(0)
- return true
- }
// match: (Rsh8Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 8
// result: (SRWconst (ZeroExt8to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8x32 x (MOVDconst [c]))
- // cond: uint32(c) < 8
- // result: (SRAWconst (SignExt8to32 x) [c&7])
- for {
- x := v_0
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- c := auxIntToInt64(v_1.AuxInt)
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = int64ToAuxInt(c & 7)
- v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)