(Sqrt x) -> (FSQRT x)
+// Lowering constants
+(Const8 [val]) -> (MOVDconst [val])
+(Const16 [val]) -> (MOVDconst [val])
+(Const32 [val]) -> (MOVDconst [val])
+(Const64 [val]) -> (MOVDconst [val])
+(Const32F [val]) -> (FMOVSconst [val])
+(Const64F [val]) -> (FMOVDconst [val])
+(ConstNil) -> (MOVDconst [0])
+(ConstBool [b]) -> (MOVDconst [b])
+
+(Lsh64x64 x (Const64 [c])) && uint64(c) < 64 -> (SLDconst x [c])
+(Rsh64x64 x (Const64 [c])) && uint64(c) < 64 -> (SRADconst x [c])
+(Rsh64Ux64 x (Const64 [c])) && uint64(c) < 64 -> (SRDconst x [c])
+(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SLWconst x [c])
+(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SRAWconst x [c])
+(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 -> (SRWconst x [c])
+(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SLWconst x [c])
+(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
+(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
+(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SLWconst x [c])
+(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
+(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
+
+(Lsh64x32 x (Const64 [c])) && uint32(c) < 64 -> (SLDconst x [c])
+(Rsh64x32 x (Const64 [c])) && uint32(c) < 64 -> (SRADconst x [c])
+(Rsh64Ux32 x (Const64 [c])) && uint32(c) < 64 -> (SRDconst x [c])
+(Lsh32x32 x (Const64 [c])) && uint32(c) < 32 -> (SLWconst x [c])
+(Rsh32x32 x (Const64 [c])) && uint32(c) < 32 -> (SRAWconst x [c])
+(Rsh32Ux32 x (Const64 [c])) && uint32(c) < 32 -> (SRWconst x [c])
+(Lsh16x32 x (Const64 [c])) && uint32(c) < 16 -> (SLWconst x [c])
+(Rsh16x32 x (Const64 [c])) && uint32(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
+(Rsh16Ux32 x (Const64 [c])) && uint32(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
+(Lsh8x32 x (Const64 [c])) && uint32(c) < 8 -> (SLWconst x [c])
+(Rsh8x32 x (Const64 [c])) && uint32(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
+(Rsh8Ux32 x (Const64 [c])) && uint32(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
+
+// large constant shifts
+(Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (MOVDconst [0])
+(Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (MOVDconst [0])
+(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 -> (MOVDconst [0])
+(Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (MOVDconst [0])
+(Lsh16x64 _ (Const64 [c])) && uint64(c) >= 16 -> (MOVDconst [0])
+(Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (MOVDconst [0])
+(Lsh8x64 _ (Const64 [c])) && uint64(c) >= 8 -> (MOVDconst [0])
+(Rsh8Ux64 _ (Const64 [c])) && uint64(c) >= 8 -> (MOVDconst [0])
+
+// large constant signed right shift, we leave the sign bit
+(Rsh64x64 x (Const64 [c])) && uint64(c) >= 64 -> (SRADconst x [63])
+(Rsh32x64 x (Const64 [c])) && uint64(c) >= 32 -> (SRAWconst x [63])
+(Rsh16x64 x (Const64 [c])) && uint64(c) >= 16 -> (SRAWconst (SignExt16to32 x) [63])
+(Rsh8x64 x (Const64 [c])) && uint64(c) >= 8 -> (SRAWconst (SignExt8to32 x) [63])
+
+// constant shifts
+(Lsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SLDconst x [c])
+(Rsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRADconst x [c])
+(Rsh64Ux64 x (MOVDconst [c])) && uint64(c) < 64 -> (SRDconst x [c])
+(Lsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SLWconst x [c])
+(Rsh32x64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRAWconst x [c])
+(Rsh32Ux64 x (MOVDconst [c])) && uint64(c) < 32 -> (SRWconst x [c])
+(Lsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SLWconst x [c])
+(Rsh16x64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
+(Rsh16Ux64 x (MOVDconst [c])) && uint64(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
+(Lsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SLWconst x [c])
+(Rsh8x64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
+(Rsh8Ux64 x (MOVDconst [c])) && uint64(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
+
+(Lsh64x32 x (MOVDconst [c])) && uint32(c) < 64 -> (SLDconst x [c])
+(Rsh64x32 x (MOVDconst [c])) && uint32(c) < 64 -> (SRADconst x [c])
+(Rsh64Ux32 x (MOVDconst [c])) && uint32(c) < 64 -> (SRDconst x [c])
+(Lsh32x32 x (MOVDconst [c])) && uint32(c) < 32 -> (SLWconst x [c])
+(Rsh32x32 x (MOVDconst [c])) && uint32(c) < 32 -> (SRAWconst x [c])
+(Rsh32Ux32 x (MOVDconst [c])) && uint32(c) < 32 -> (SRWconst x [c])
+(Lsh16x32 x (MOVDconst [c])) && uint32(c) < 16 -> (SLWconst x [c])
+(Rsh16x32 x (MOVDconst [c])) && uint32(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
+(Rsh16Ux32 x (MOVDconst [c])) && uint32(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
+(Lsh8x32 x (MOVDconst [c])) && uint32(c) < 8 -> (SLWconst x [c])
+(Rsh8x32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
+(Rsh8Ux32 x (MOVDconst [c])) && uint32(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
+
(Rsh64x64 x y) -> (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
(Rsh64Ux64 x y) -> (SRD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
(Lsh64x64 x y) -> (SLD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
// (MaskIfNotCarry CarrySet) -> 0
// (MaskIfNotCarry CarrySet) -> -1
-// Lowering constants
-(Const8 [val]) -> (MOVDconst [val])
-(Const16 [val]) -> (MOVDconst [val])
-(Const32 [val]) -> (MOVDconst [val])
-(Const64 [val]) -> (MOVDconst [val])
-(Const32F [val]) -> (FMOVSconst [val])
-(Const64F [val]) -> (FMOVDconst [val])
-(ConstNil) -> (MOVDconst [0])
-(ConstBool [b]) -> (MOVDconst [b])
-
(Addr {sym} base) -> (MOVDaddr {sym} base)
// (Addr {sym} base) -> (ADDconst {sym} base)
(OffPtr [off] ptr) -> (ADD (MOVDconst <config.Frontend().TypeInt64()> [off]) ptr)
func rewriteValuePPC64_OpLsh16x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh16x32 x (Const64 [c]))
+ // cond: uint32(c) < 16
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh16x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 16
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh16x32 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpLsh16x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh16x64 x (Const64 [c]))
+ // cond: uint64(c) < 16
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh16x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 16
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 16) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Lsh16x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 16
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh16x64 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
func rewriteValuePPC64_OpLsh32x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh32x32 x (Const64 [c]))
+ // cond: uint32(c) < 32
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh32x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 32
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh32x32 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpLsh32x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh32x64 x (Const64 [c]))
+ // cond: uint64(c) < 32
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh32x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 32
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Lsh32x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 32
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh32x64 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
func rewriteValuePPC64_OpLsh64x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh64x32 x (Const64 [c]))
+ // cond: uint32(c) < 64
+ // result: (SLDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SLDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh64x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 64
+ // result: (SLDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SLDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh64x32 x y)
// cond:
// result: (SLD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpLsh64x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh64x64 x (Const64 [c]))
+ // cond: uint64(c) < 64
+ // result: (SLDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SLDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh64x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 64
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Lsh64x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 64
+ // result: (SLDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SLDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh64x64 x y)
// cond:
// result: (SLD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
func rewriteValuePPC64_OpLsh8x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh8x32 x (Const64 [c]))
+ // cond: uint32(c) < 8
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh8x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 8
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh8x32 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpLsh8x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Lsh8x64 x (Const64 [c]))
+ // cond: uint64(c) < 8
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Lsh8x64 _ (Const64 [c]))
+ // cond: uint64(c) >= 8
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 8) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Lsh8x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 8
+ // result: (SLWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Lsh8x64 x y)
// cond:
// result: (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
func rewriteValuePPC64_OpRsh16Ux32(v *Value, config *Config) bool {
b := v.Block
_ = b
- // match: (Rsh16Ux32 x y)
- // cond:
- // result: (SRW (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
+ // match: (Rsh16Ux32 x (Const64 [c]))
+ // cond: uint32(c) < 16
+ // result: (SRWconst (ZeroExt16to32 x) [c])
for {
x := v.Args[0]
- y := v.Args[1]
- v.reset(OpPPC64SRW)
- v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
- v0.AddArg(x)
- v.AddArg(v0)
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16Ux32 x (MOVDconst [c]))
+ // cond: uint32(c) < 16
+ // result: (SRWconst (ZeroExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16Ux32 x y)
+ // cond:
+ // result: (SRW (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
+ for {
+ x := v.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64SRW)
+ v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
v1 := b.NewValue0(v.Line, OpPPC64ORN, config.fe.TypeInt64())
v1.AddArg(y)
v2 := b.NewValue0(v.Line, OpPPC64MaskIfNotCarry, config.fe.TypeInt64())
func rewriteValuePPC64_OpRsh16Ux64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh16Ux64 x (Const64 [c]))
+ // cond: uint64(c) < 16
+ // result: (SRWconst (ZeroExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16Ux64 _ (Const64 [c]))
+ // cond: uint64(c) >= 16
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 16) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Rsh16Ux64 x (MOVDconst [c]))
+ // cond: uint64(c) < 16
+ // result: (SRWconst (ZeroExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt16to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh16Ux64 x y)
// cond:
// result: (SRW (ZeroExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
func rewriteValuePPC64_OpRsh16x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh16x32 x (Const64 [c]))
+ // cond: uint32(c) < 16
+ // result: (SRAWconst (SignExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 16
+ // result: (SRAWconst (SignExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh16x32 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh16x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh16x64 x (Const64 [c]))
+ // cond: uint64(c) < 16
+ // result: (SRAWconst (SignExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16x64 x (Const64 [c]))
+ // cond: uint64(c) >= 16
+ // result: (SRAWconst (SignExt16to32 x) [63])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = 63
+ v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh16x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 16
+ // result: (SRAWconst (SignExt16to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt16to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh16x64 x y)
// cond:
// result: (SRAW (SignExt16to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-16] y))))
func rewriteValuePPC64_OpRsh32Ux32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh32Ux32 x (Const64 [c]))
+ // cond: uint32(c) < 32
+ // result: (SRWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh32Ux32 x (MOVDconst [c]))
+ // cond: uint32(c) < 32
+ // result: (SRWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh32Ux32 x y)
// cond:
// result: (SRW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh32Ux64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh32Ux64 x (Const64 [c]))
+ // cond: uint64(c) < 32
+ // result: (SRWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh32Ux64 _ (Const64 [c]))
+ // cond: uint64(c) >= 32
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Rsh32Ux64 x (MOVDconst [c]))
+ // cond: uint64(c) < 32
+ // result: (SRWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh32Ux64 x y)
// cond:
// result: (SRW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
func rewriteValuePPC64_OpRsh32x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh32x32 x (Const64 [c]))
+ // cond: uint32(c) < 32
+ // result: (SRAWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh32x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 32
+ // result: (SRAWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh32x32 x y)
// cond:
// result: (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh32x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh32x64 x (Const64 [c]))
+ // cond: uint64(c) < 32
+ // result: (SRAWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh32x64 x (Const64 [c]))
+ // cond: uint64(c) >= 32
+ // result: (SRAWconst x [63])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 32) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = 63
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh32x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 32
+ // result: (SRAWconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 32) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh32x64 x y)
// cond:
// result: (SRAW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
v.AddArg(v0)
return true
}
-}
-func rewriteValuePPC64_OpRsh64Ux32(v *Value, config *Config) bool {
- b := v.Block
- _ = b
+}
+func rewriteValuePPC64_OpRsh64Ux32(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (Rsh64Ux32 x (Const64 [c]))
+ // cond: uint32(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64Ux32 x (MOVDconst [c]))
+ // cond: uint32(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh64Ux32 x y)
// cond:
// result: (SRD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh64Ux64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh64Ux64 x (Const64 [c]))
+ // cond: uint64(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64Ux64 _ (Const64 [c]))
+ // cond: uint64(c) >= 64
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Rsh64Ux64 x (MOVDconst [c]))
+ // cond: uint64(c) < 64
+ // result: (SRDconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh64Ux64 x y)
// cond:
// result: (SRD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
func rewriteValuePPC64_OpRsh64x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh64x32 x (Const64 [c]))
+ // cond: uint32(c) < 64
+ // result: (SRADconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 64
+ // result: (SRADconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh64x32 x y)
// cond:
// result: (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh64x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh64x64 x (Const64 [c]))
+ // cond: uint64(c) < 64
+ // result: (SRADconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64x64 x (Const64 [c]))
+ // cond: uint64(c) >= 64
+ // result: (SRADconst x [63])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 64) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = 63
+ v.AddArg(x)
+ return true
+ }
+ // match: (Rsh64x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 64
+ // result: (SRADconst x [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 64) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (Rsh64x64 x y)
// cond:
// result: (SRAD x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
func rewriteValuePPC64_OpRsh8Ux32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh8Ux32 x (Const64 [c]))
+ // cond: uint32(c) < 8
+ // result: (SRWconst (ZeroExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh8Ux32 x (MOVDconst [c]))
+ // cond: uint32(c) < 8
+ // result: (SRWconst (ZeroExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh8Ux32 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh8Ux64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh8Ux64 x (Const64 [c]))
+ // cond: uint64(c) < 8
+ // result: (SRWconst (ZeroExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh8Ux64 _ (Const64 [c]))
+ // cond: uint64(c) >= 8
+ // result: (MOVDconst [0])
+ for {
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 8) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (Rsh8Ux64 x (MOVDconst [c]))
+ // cond: uint64(c) < 8
+ // result: (SRWconst (ZeroExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpZeroExt8to32, config.fe.TypeUInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh8Ux64 x y)
// cond:
// result: (SRW (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))
func rewriteValuePPC64_OpRsh8x32(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh8x32 x (Const64 [c]))
+ // cond: uint32(c) < 8
+ // result: (SRAWconst (SignExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh8x32 x (MOVDconst [c]))
+ // cond: uint32(c) < 8
+ // result: (SRAWconst (SignExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint32(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh8x32 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt32to64 y)))))
func rewriteValuePPC64_OpRsh8x64(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (Rsh8x64 x (Const64 [c]))
+ // cond: uint64(c) < 8
+ // result: (SRAWconst (SignExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh8x64 x (Const64 [c]))
+ // cond: uint64(c) >= 8
+ // result: (SRAWconst (SignExt8to32 x) [63])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) >= 8) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = 63
+ v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
+ // match: (Rsh8x64 x (MOVDconst [c]))
+ // cond: uint64(c) < 8
+ // result: (SRAWconst (SignExt8to32 x) [c])
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(uint64(c) < 8) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v0 := b.NewValue0(v.Line, OpSignExt8to32, config.fe.TypeInt32())
+ v0.AddArg(x)
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh8x64 x y)
// cond:
// result: (SRAW (SignExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] y))))