(ROTLW x (MOVDconst [c])) -> (ROTLWconst x [c&31])
(ROTL x (MOVDconst [c])) -> (ROTLconst x [c&63])
-(Lsh64x64 x (Const64 [c])) && uint64(c) < 64 -> (SLDconst x [c])
-(Rsh64x64 x (Const64 [c])) && uint64(c) < 64 -> (SRADconst x [c])
-(Rsh64Ux64 x (Const64 [c])) && uint64(c) < 64 -> (SRDconst x [c])
-(Lsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SLWconst x [c])
-(Rsh32x64 x (Const64 [c])) && uint64(c) < 32 -> (SRAWconst x [c])
-(Rsh32Ux64 x (Const64 [c])) && uint64(c) < 32 -> (SRWconst x [c])
-(Lsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SLWconst x [c])
-(Rsh16x64 x (Const64 [c])) && uint64(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
-(Rsh16Ux64 x (Const64 [c])) && uint64(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
-(Lsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SLWconst x [c])
-(Rsh8x64 x (Const64 [c])) && uint64(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
-(Rsh8Ux64 x (Const64 [c])) && uint64(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
-
-(Lsh64x32 x (Const64 [c])) && uint32(c) < 64 -> (SLDconst x [c])
-(Rsh64x32 x (Const64 [c])) && uint32(c) < 64 -> (SRADconst x [c])
-(Rsh64Ux32 x (Const64 [c])) && uint32(c) < 64 -> (SRDconst x [c])
-(Lsh32x32 x (Const64 [c])) && uint32(c) < 32 -> (SLWconst x [c])
-(Rsh32x32 x (Const64 [c])) && uint32(c) < 32 -> (SRAWconst x [c])
-(Rsh32Ux32 x (Const64 [c])) && uint32(c) < 32 -> (SRWconst x [c])
-(Lsh16x32 x (Const64 [c])) && uint32(c) < 16 -> (SLWconst x [c])
-(Rsh16x32 x (Const64 [c])) && uint32(c) < 16 -> (SRAWconst (SignExt16to32 x) [c])
-(Rsh16Ux32 x (Const64 [c])) && uint32(c) < 16 -> (SRWconst (ZeroExt16to32 x) [c])
-(Lsh8x32 x (Const64 [c])) && uint32(c) < 8 -> (SLWconst x [c])
-(Rsh8x32 x (Const64 [c])) && uint32(c) < 8 -> (SRAWconst (SignExt8to32 x) [c])
-(Rsh8Ux32 x (Const64 [c])) && uint32(c) < 8 -> (SRWconst (ZeroExt8to32 x) [c])
// large constant shifts
-(Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (MOVDconst [0])
-(Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (MOVDconst [0])
-(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 -> (MOVDconst [0])
-(Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (MOVDconst [0])
-(Lsh16x64 _ (Const64 [c])) && uint64(c) >= 16 -> (MOVDconst [0])
-(Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (MOVDconst [0])
-(Lsh8x64 _ (Const64 [c])) && uint64(c) >= 8 -> (MOVDconst [0])
-(Rsh8Ux64 _ (Const64 [c])) && uint64(c) >= 8 -> (MOVDconst [0])
+(Lsh64x64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
+(Rsh64Ux64 _ (MOVDconst [c])) && uint64(c) >= 64 -> (MOVDconst [0])
+(Lsh32x64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
+(Rsh32Ux64 _ (MOVDconst [c])) && uint64(c) >= 32 -> (MOVDconst [0])
+(Lsh16x64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
+(Rsh16Ux64 _ (MOVDconst [c])) && uint64(c) >= 16 -> (MOVDconst [0])
+(Lsh8x64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
+(Rsh8Ux64 _ (MOVDconst [c])) && uint64(c) >= 8 -> (MOVDconst [0])
// large constant signed right shift, we leave the sign bit
-(Rsh64x64 x (Const64 [c])) && uint64(c) >= 64 -> (SRADconst x [63])
-(Rsh32x64 x (Const64 [c])) && uint64(c) >= 32 -> (SRAWconst x [63])
-(Rsh16x64 x (Const64 [c])) && uint64(c) >= 16 -> (SRAWconst (SignExt16to32 x) [63])
-(Rsh8x64 x (Const64 [c])) && uint64(c) >= 8 -> (SRAWconst (SignExt8to32 x) [63])
+(Rsh64x64 x (MOVDconst [c])) && uint64(c) >= 64 -> (SRADconst x [63])
+(Rsh32x64 x (MOVDconst [c])) && uint64(c) >= 32 -> (SRAWconst x [63])
+(Rsh16x64 x (MOVDconst [c])) && uint64(c) >= 16 -> (SRAWconst (SignExt16to32 x) [63])
+(Rsh8x64 x (MOVDconst [c])) && uint64(c) >= 8 -> (SRAWconst (SignExt8to32 x) [63])
// constant shifts
(Lsh64x64 x (MOVDconst [c])) && uint64(c) < 64 -> (SLDconst x [c])
(MaskIfNotCarry (ADDconstForCarry [c] (ANDconst [d] _))) && c < 0 && d > 0 && c + d < 0 -> (MOVDconst [-1])
(ORN x (MOVDconst [-1])) -> x
-// Potentially useful optimizing rewrites.
-// (ADDconstForCarry [k] c), k < 0 && (c < 0 || k+c >= 0) -> CarrySet
-// (ADDconstForCarry [k] c), K < 0 && (c >= 0 && k+c < 0) -> CarryClear
-// (MaskIfNotCarry CarrySet) -> 0
-// (MaskIfNotCarry CarrySet) -> -1
+(ADDconstForCarry [c] (MOVDconst [d])) && int64(int16(c)) < 0 && (int64(int16(c)) < 0 || int64(int16(c)) + d >= 0) -> (FlagCarryClear)
+(ADDconstForCarry [c] (MOVDconst [d])) && int64(int16(c)) < 0 && int64(int16(c)) >= 0 && int64(int16(c)) + d < 0 -> (FlagCarrySet)
+
+(MaskIfNotCarry (FlagCarrySet)) -> (MOVDconst [0])
+(MaskIfNotCarry (FlagCarryClear)) -> (MOVDconst [-1])
+
+(S(RAD|RAW|RD|RW|LD|LW) x (MOVDconst [c])) -> (S(RAD|RAW|RD|RW|LD|LW)const [c] x)
(Addr ...) -> (MOVDaddr ...)
(LocalAddr {sym} base _) -> (MOVDaddr {sym} base)
(AND (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c&d])
(OR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c|d])
(XOR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c^d])
+(ORN (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c|^d])
+(ANDN (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c&^d])
+(NOR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [^(c|d)])
// Discover consts
(AND x (MOVDconst [c])) && isU16Bit(c) -> (ANDconst [c] x)
return rewriteValuePPC64_OpPPC64ADD(v)
case OpPPC64ADDconst:
return rewriteValuePPC64_OpPPC64ADDconst(v)
+ case OpPPC64ADDconstForCarry:
+ return rewriteValuePPC64_OpPPC64ADDconstForCarry(v)
case OpPPC64AND:
return rewriteValuePPC64_OpPPC64AND(v)
+ case OpPPC64ANDN:
+ return rewriteValuePPC64_OpPPC64ANDN(v)
case OpPPC64ANDconst:
return rewriteValuePPC64_OpPPC64ANDconst(v)
case OpPPC64CMP:
return rewriteValuePPC64_OpPPC64MTVSRD(v)
case OpPPC64MaskIfNotCarry:
return rewriteValuePPC64_OpPPC64MaskIfNotCarry(v)
+ case OpPPC64NOR:
+ return rewriteValuePPC64_OpPPC64NOR(v)
case OpPPC64NotEqual:
return rewriteValuePPC64_OpPPC64NotEqual(v)
case OpPPC64OR:
return rewriteValuePPC64_OpPPC64ROTL(v)
case OpPPC64ROTLW:
return rewriteValuePPC64_OpPPC64ROTLW(v)
+ case OpPPC64SLD:
+ return rewriteValuePPC64_OpPPC64SLD(v)
+ case OpPPC64SLW:
+ return rewriteValuePPC64_OpPPC64SLW(v)
+ case OpPPC64SRAD:
+ return rewriteValuePPC64_OpPPC64SRAD(v)
+ case OpPPC64SRAW:
+ return rewriteValuePPC64_OpPPC64SRAW(v)
+ case OpPPC64SRD:
+ return rewriteValuePPC64_OpPPC64SRD(v)
+ case OpPPC64SRW:
+ return rewriteValuePPC64_OpPPC64SRW(v)
case OpPPC64SUB:
return rewriteValuePPC64_OpPPC64SUB(v)
case OpPPC64XOR:
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh16x32 x (Const64 [c]))
- // cond: uint32(c) < 16
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Lsh16x32 x (MOVDconst [c]))
// cond: uint32(c) < 16
// result: (SLWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh16x64 x (Const64 [c]))
- // cond: uint64(c) < 16
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh16x64 _ (Const64 [c]))
+ // match: (Lsh16x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh32x32 x (Const64 [c]))
- // cond: uint32(c) < 32
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Lsh32x32 x (MOVDconst [c]))
// cond: uint32(c) < 32
// result: (SLWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh32x64 x (Const64 [c]))
- // cond: uint64(c) < 32
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh32x64 _ (Const64 [c]))
+ // match: (Lsh32x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh64x32 x (Const64 [c]))
- // cond: uint32(c) < 64
- // result: (SLDconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SLDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Lsh64x32 x (MOVDconst [c]))
// cond: uint32(c) < 64
// result: (SLDconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh64x64 x (Const64 [c]))
- // cond: uint64(c) < 64
- // result: (SLDconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpPPC64SLDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh64x64 _ (Const64 [c]))
+ // match: (Lsh64x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh8x32 x (Const64 [c]))
- // cond: uint32(c) < 8
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Lsh8x32 x (MOVDconst [c]))
// cond: uint32(c) < 8
// result: (SLWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Lsh8x64 x (Const64 [c]))
- // cond: uint64(c) < 8
- // result: (SLWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpPPC64SLWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Lsh8x64 _ (Const64 [c]))
+ // match: (Lsh8x64 _ (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
}
return false
}
+func rewriteValuePPC64_OpPPC64ADDconstForCarry(v *Value) bool {
+ v_0 := v.Args[0]
+ // match: (ADDconstForCarry [c] (MOVDconst [d]))
+ // cond: int64(int16(c)) < 0 && (int64(int16(c)) < 0 || int64(int16(c)) + d >= 0)
+ // result: (FlagCarryClear)
+ for {
+ c := v.AuxInt
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_0.AuxInt
+ if !(int64(int16(c)) < 0 && (int64(int16(c)) < 0 || int64(int16(c))+d >= 0)) {
+ break
+ }
+ v.reset(OpPPC64FlagCarryClear)
+ return true
+ }
+ // match: (ADDconstForCarry [c] (MOVDconst [d]))
+ // cond: int64(int16(c)) < 0 && int64(int16(c)) >= 0 && int64(int16(c)) + d < 0
+ // result: (FlagCarrySet)
+ for {
+ c := v.AuxInt
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_0.AuxInt
+ if !(int64(int16(c)) < 0 && int64(int16(c)) >= 0 && int64(int16(c))+d < 0) {
+ break
+ }
+ v.reset(OpPPC64FlagCarrySet)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64AND(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
}
return false
}
+func rewriteValuePPC64_OpPPC64ANDN(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (ANDN (MOVDconst [c]) (MOVDconst [d]))
+ // result: (MOVDconst [c&^d])
+ for {
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c &^ d
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64ANDconst(v *Value) bool {
v_0 := v.Args[0]
// match: (ANDconst [c] (ANDconst [d] x))
v.AuxInt = -1
return true
}
+ // match: (MaskIfNotCarry (FlagCarrySet))
+ // result: (MOVDconst [0])
+ for {
+ if v_0.Op != OpPPC64FlagCarrySet {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = 0
+ return true
+ }
+ // match: (MaskIfNotCarry (FlagCarryClear))
+ // result: (MOVDconst [-1])
+ for {
+ if v_0.Op != OpPPC64FlagCarryClear {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = -1
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64NOR(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (NOR (MOVDconst [c]) (MOVDconst [d]))
+ // result: (MOVDconst [^(c|d)])
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpPPC64MOVDconst {
+ continue
+ }
+ c := v_0.AuxInt
+ if v_1.Op != OpPPC64MOVDconst {
+ continue
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = ^(c | d)
+ return true
+ }
+ break
+ }
return false
}
func rewriteValuePPC64_OpPPC64NotEqual(v *Value) bool {
v.copyOf(x)
return true
}
+ // match: (ORN (MOVDconst [c]) (MOVDconst [d]))
+ // result: (MOVDconst [c|^d])
+ for {
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c | ^d
+ return true
+ }
return false
}
func rewriteValuePPC64_OpPPC64ORconst(v *Value) bool {
}
return false
}
-func rewriteValuePPC64_OpPPC64SUB(v *Value) bool {
+func rewriteValuePPC64_OpPPC64SLD(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (SUB x (MOVDconst [c]))
- // cond: is32Bit(-c)
- // result: (ADDconst [-c] x)
+ // match: (SLD x (MOVDconst [c]))
+ // result: (SLDconst [c] x)
for {
x := v_0
if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
- if !(is32Bit(-c)) {
- break
- }
- v.reset(OpPPC64ADDconst)
- v.AuxInt = -c
+ v.reset(OpPPC64SLDconst)
+ v.AuxInt = c
v.AddArg(x)
return true
}
return false
}
-func rewriteValuePPC64_OpPPC64XOR(v *Value) bool {
+func rewriteValuePPC64_OpPPC64SLW(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- b := v.Block
- typ := &b.Func.Config.Types
- // match: (XOR (SLDconst x [c]) (SRDconst x [d]))
- // cond: d == 64-c
- // result: (ROTLconst [c] x)
+ // match: (SLW x (MOVDconst [c]))
+ // result: (SLWconst [c] x)
for {
- for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
- if v_0.Op != OpPPC64SLDconst {
- continue
- }
- c := v_0.AuxInt
- x := v_0.Args[0]
- if v_1.Op != OpPPC64SRDconst {
- continue
- }
- d := v_1.AuxInt
- if x != v_1.Args[0] || !(d == 64-c) {
- continue
- }
- v.reset(OpPPC64ROTLconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
}
- break
+ c := v_1.AuxInt
+ v.reset(OpPPC64SLWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
}
- // match: (XOR (SLWconst x [c]) (SRWconst x [d]))
- // cond: d == 32-c
- // result: (ROTLWconst [c] x)
- for {
- for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ return false
+}
+func rewriteValuePPC64_OpPPC64SRAD(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRAD x (MOVDconst [c]))
+ // result: (SRADconst [c] x)
+ for {
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64SRAW(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRAW x (MOVDconst [c]))
+ // result: (SRAWconst [c] x)
+ for {
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64SRD(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRD x (MOVDconst [c]))
+ // result: (SRDconst [c] x)
+ for {
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64SRDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64SRW(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SRW x (MOVDconst [c]))
+ // result: (SRWconst [c] x)
+ for {
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64SRWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64SUB(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SUB x (MOVDconst [c]))
+ // cond: is32Bit(-c)
+ // result: (ADDconst [-c] x)
+ for {
+ x := v_0
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(is32Bit(-c)) {
+ break
+ }
+ v.reset(OpPPC64ADDconst)
+ v.AuxInt = -c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64XOR(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ b := v.Block
+ typ := &b.Func.Config.Types
+ // match: (XOR (SLDconst x [c]) (SRDconst x [d]))
+ // cond: d == 64-c
+ // result: (ROTLconst [c] x)
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
+ if v_0.Op != OpPPC64SLDconst {
+ continue
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if v_1.Op != OpPPC64SRDconst {
+ continue
+ }
+ d := v_1.AuxInt
+ if x != v_1.Args[0] || !(d == 64-c) {
+ continue
+ }
+ v.reset(OpPPC64ROTLconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ break
+ }
+ // match: (XOR (SLWconst x [c]) (SRWconst x [d]))
+ // cond: d == 32-c
+ // result: (ROTLWconst [c] x)
+ for {
+ for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
if v_0.Op != OpPPC64SLWconst {
continue
}
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16Ux32 x (Const64 [c]))
- // cond: uint32(c) < 16
- // result: (SRWconst (ZeroExt16to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh16Ux32 x (MOVDconst [c]))
// cond: uint32(c) < 16
// result: (SRWconst (ZeroExt16to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16Ux64 x (Const64 [c]))
- // cond: uint64(c) < 16
- // result: (SRWconst (ZeroExt16to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh16Ux64 _ (Const64 [c]))
+ // match: (Rsh16Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16x32 x (Const64 [c]))
- // cond: uint32(c) < 16
- // result: (SRAWconst (SignExt16to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 16) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh16x32 x (MOVDconst [c]))
// cond: uint32(c) < 16
// result: (SRAWconst (SignExt16to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh16x64 x (Const64 [c]))
- // cond: uint64(c) < 16
- // result: (SRAWconst (SignExt16to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 16) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh16x64 x (Const64 [c]))
+ // match: (Rsh16x64 x (MOVDconst [c]))
// cond: uint64(c) >= 16
// result: (SRAWconst (SignExt16to32 x) [63])
for {
x := v_0
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32Ux32 x (Const64 [c]))
- // cond: uint32(c) < 32
- // result: (SRWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Rsh32Ux32 x (MOVDconst [c]))
// cond: uint32(c) < 32
// result: (SRWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32Ux64 x (Const64 [c]))
- // cond: uint64(c) < 32
- // result: (SRWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh32Ux64 _ (Const64 [c]))
+ // match: (Rsh32Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32x32 x (Const64 [c]))
- // cond: uint32(c) < 32
- // result: (SRAWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 32) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Rsh32x32 x (MOVDconst [c]))
// cond: uint32(c) < 32
// result: (SRAWconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh32x64 x (Const64 [c]))
- // cond: uint64(c) < 32
- // result: (SRAWconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 32) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh32x64 x (Const64 [c]))
+ // match: (Rsh32x64 x (MOVDconst [c]))
// cond: uint64(c) >= 32
// result: (SRAWconst x [63])
for {
x := v_0
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64Ux32 x (Const64 [c]))
- // cond: uint32(c) < 64
- // result: (SRDconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SRDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Rsh64Ux32 x (MOVDconst [c]))
// cond: uint32(c) < 64
// result: (SRDconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64Ux64 x (Const64 [c]))
- // cond: uint64(c) < 64
- // result: (SRDconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpPPC64SRDconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh64Ux64 _ (Const64 [c]))
+ // match: (Rsh64Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64x32 x (Const64 [c]))
- // cond: uint32(c) < 64
- // result: (SRADconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 64) {
- break
- }
- v.reset(OpPPC64SRADconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
// match: (Rsh64x32 x (MOVDconst [c]))
// cond: uint32(c) < 64
// result: (SRADconst x [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh64x64 x (Const64 [c]))
- // cond: uint64(c) < 64
- // result: (SRADconst x [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 64) {
- break
- }
- v.reset(OpPPC64SRADconst)
- v.AuxInt = c
- v.AddArg(x)
- return true
- }
- // match: (Rsh64x64 x (Const64 [c]))
+ // match: (Rsh64x64 x (MOVDconst [c]))
// cond: uint64(c) >= 64
// result: (SRADconst x [63])
for {
x := v_0
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8Ux32 x (Const64 [c]))
- // cond: uint32(c) < 8
- // result: (SRWconst (ZeroExt8to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh8Ux32 x (MOVDconst [c]))
// cond: uint32(c) < 8
// result: (SRWconst (ZeroExt8to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8Ux64 x (Const64 [c]))
- // cond: uint64(c) < 8
- // result: (SRWconst (ZeroExt8to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpPPC64SRWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh8Ux64 _ (Const64 [c]))
+ // match: (Rsh8Ux64 _ (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (MOVDconst [0])
for {
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8x32 x (Const64 [c]))
- // cond: uint32(c) < 8
- // result: (SRAWconst (SignExt8to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint32(c) < 8) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
// match: (Rsh8x32 x (MOVDconst [c]))
// cond: uint32(c) < 8
// result: (SRAWconst (SignExt8to32 x) [c])
v_0 := v.Args[0]
b := v.Block
typ := &b.Func.Config.Types
- // match: (Rsh8x64 x (Const64 [c]))
- // cond: uint64(c) < 8
- // result: (SRAWconst (SignExt8to32 x) [c])
- for {
- x := v_0
- if v_1.Op != OpConst64 {
- break
- }
- c := v_1.AuxInt
- if !(uint64(c) < 8) {
- break
- }
- v.reset(OpPPC64SRAWconst)
- v.AuxInt = c
- v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
- v0.AddArg(x)
- v.AddArg(v0)
- return true
- }
- // match: (Rsh8x64 x (Const64 [c]))
+ // match: (Rsh8x64 x (MOVDconst [c]))
// cond: uint64(c) >= 8
// result: (SRAWconst (SignExt8to32 x) [63])
for {
x := v_0
- if v_1.Op != OpConst64 {
+ if v_1.Op != OpPPC64MOVDconst {
break
}
c := v_1.AuxInt