(Rsh8Ux8 x y) -> (SRW (ZeroExt8to32 x) (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
(Lsh8x8 x y) -> (SLW x (ORN y <config.fe.TypeInt64()> (MaskIfNotCarry (ADDconstForCarry [-8] (ZeroExt8to64 y)))))
+// Cleaning up shift ops when input is masked
+(MaskIfNotCarry (ADDconstForCarry [c] (ANDconst [d] _))) && c < 0 && d > 0 && c + d < 0 -> (MOVDconst [-1])
+(ORN x (MOVDconst [-1])) -> x
+
// Potentially useful optimizing rewrites.
// (ADDconstForCarry [k] c), k < 0 && (c < 0 || k+c >= 0) -> CarrySet
// (ADDconstForCarry [k] c), K < 0 && (c >= 0 && k+c < 0) -> CarryClear
// (NE (CMPWconst [0] (FGreaterThan cc)) yes no) -> (FGT cc yes no)
// (NE (CMPWconst [0] (FGreaterEqual cc)) yes no) -> (FGE cc yes no)
+// Elide compares of bit tests // TODO need to make both CC and result of ANDCC available.
+(EQ (CMPconst [0] (ANDconst [c] x)) yes no) -> (EQ (ANDCCconst [c] x) yes no)
+(NE (CMPconst [0] (ANDconst [c] x)) yes no) -> (NE (ANDCCconst [c] x) yes no)
+(EQ (CMPWconst [0] (ANDconst [c] x)) yes no) -> (EQ (ANDCCconst [c] x) yes no)
+(NE (CMPWconst [0] (ANDconst [c] x)) yes no) -> (NE (ANDCCconst [c] x) yes no)
+
// absorb flag constants into branches
(EQ (FlagEQ) yes no) -> (First nil yes no)
(EQ (FlagLT) yes no) -> (First nil no yes)
(NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
// Optimizations
+// Note that PPC "logical" immediates come in 0:15 and 16:31 unsigned immediate forms,
+// so ORconst, XORconst easily expand into a pair.
+
+// Include very-large constants in the const-const case.
+(AND (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c&d])
+(OR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c|d])
+(XOR (MOVDconst [c]) (MOVDconst [d])) -> (MOVDconst [c^d])
+
+// Discover consts
+(AND x (MOVDconst [c])) && isU16Bit(c) -> (ANDconst [c] x)
+(XOR x (MOVDconst [c])) && isU32Bit(c) -> (XORconst [c] x)
+(OR x (MOVDconst [c])) && isU32Bit(c) -> (ORconst [c] x)
+(AND (MOVDconst [c]) x) && isU16Bit(c) -> (ANDconst [c] x)
+(XOR (MOVDconst [c]) x) && isU32Bit(c) -> (XORconst [c] x)
+(OR (MOVDconst [c]) x) && isU32Bit(c) -> (ORconst [c] x)
+
+// Simplify consts
+(ANDconst [c] (ANDconst [d] x)) -> (ANDconst [c&d] x)
+(ORconst [c] (ORconst [d] x)) -> (ORconst [c|d] x)
+(XORconst [c] (XORconst [d] x)) -> (XORconst [c^d] x)
+(ANDconst [-1] x) -> x
+(ANDconst [0] _) -> (MOVDconst [0])
+(XORconst [0] x) -> x
+(ORconst [-1] _) -> (MOVDconst [-1])
+(ORconst [0] x) -> x
+
+// zero-extend of small and -> small and
+(MOVBZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFF -> y
+(MOVHZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFF -> y
+(MOVWZreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFFFFFF -> y
+(MOVWZreg y:(AND (MOVDconst [c]) _)) && uint64(c) <= 0xFFFFFFFF -> y
+
+// sign extend of small-positive and -> small-positive-and
+(MOVBreg y:(ANDconst [c] _)) && uint64(c) <= 0x7F -> y
+(MOVHreg y:(ANDconst [c] _)) && uint64(c) <= 0x7FFF -> y
+(MOVWreg y:(ANDconst [c] _)) && uint64(c) <= 0xFFFF -> y // 0xFFFF is largest immediate constant, when regarded as 32-bit is > 0
+(MOVWreg y:(AND (MOVDconst [c]) _)) && uint64(c) <= 0x7FFFFFFF -> y
+
+// small and of zero-extend -> either zero-extend or small and
+ // degenerate-and
+(ANDconst [c] y:(MOVBZreg _)) && c&0xFF == 0xFF -> y
+(ANDconst [c] y:(MOVHZreg _)) && c&0xFFFF == 0xFFFF -> y
+(ANDconst [c] y:(MOVWZreg _)) && c&0xFFFFFFFF == 0xFFFFFFFF -> y
+ // normal case
+(ANDconst [c] (MOVBZreg x)) -> (ANDconst [c&0xFF] x)
+(ANDconst [c] (MOVHZreg x)) -> (ANDconst [c&0xFFFF] x)
+(ANDconst [c] (MOVWZreg x)) -> (ANDconst [c&0xFFFFFFFF] x)
+
+// Various redundant zero/sign extension combinations.
+(MOVBZreg y:(MOVBZreg _)) -> y // repeat
+(MOVBreg y:(MOVBreg _)) -> y // repeat
+(MOVBreg (MOVBZreg x)) -> (MOVBreg x)
+(MOVBZreg (MOVBreg x)) -> (MOVBZreg x)
+
+// H - there are more combinations than these
+
+(MOVHZreg y:(MOVHZreg _)) -> y // repeat
+(MOVHZreg y:(MOVBZreg _)) -> y // wide of narrow
+
+(MOVHreg y:(MOVHreg _)) -> y // repeat
+(MOVHreg y:(MOVBreg _)) -> y // wide of narrow
+
+(MOVHreg y:(MOVHZreg x)) -> (MOVHreg x)
+(MOVHZreg y:(MOVHreg x)) -> (MOVHZreg x)
+
+// W - there are more combinations than these
+
+(MOVWZreg y:(MOVWZreg _)) -> y // repeat
+(MOVWZreg y:(MOVHZreg _)) -> y // wide of narrow
+(MOVWZreg y:(MOVBZreg _)) -> y // wide of narrow
+
+(MOVWreg y:(MOVWreg _)) -> y // repeat
+(MOVWreg y:(MOVHreg _)) -> y // wide of narrow
+(MOVWreg y:(MOVBreg _)) -> y // wide of narrow
+
+(MOVWreg y:(MOVWZreg x)) -> (MOVWreg x)
+(MOVWZreg y:(MOVWreg x)) -> (MOVWZreg x)
+
+// Arithmetic constant ops
(ADD (MOVDconst [c]) x) && is32Bit(c) -> (ADDconst [c] x)
(ADD x (MOVDconst [c])) && is32Bit(c) -> (ADDconst [c] x)
(ADDconst [c] (ADDconst [d] x)) && is32Bit(c+d) -> (ADDconst [c+d] x)
-(ADDconst [c] (MOVDaddr [d] {sym} x)) -> (MOVDaddr [c+d] {sym} x)
(ADDconst [0] x) -> x
-(ANDconst [-1] x) -> x
-(ANDconst [0] _) -> (MOVDconst [0])
-(XORconst [0] x) -> x
+(SUB x (MOVDconst [c])) && is32Bit(-c) -> (ADDconst [-c] x)
+// TODO deal with subtract-from-const
-(XOR (MOVDconst [0]) x) -> x
-(XOR x (MOVDconst [0])) -> x
-(ADD (MOVDconst [0]) x) -> x
-(ADD x (MOVDconst [0])) -> x
+(ADDconst [c] (MOVDaddr [d] {sym} x)) -> (MOVDaddr [c+d] {sym} x)
// Fold offsets for stores.
(MOVDstore [off1] {sym} (ADDconst [off2] x) val mem) && is16Bit(off1+off2) -> (MOVDstore [off1+off2] {sym} x val mem)
(MOVHZreg (MOVDconst [c])) -> (MOVDconst [int64(uint16(c))])
(MOVHreg (MOVDconst [c])) -> (MOVDconst [int64(int16(c))])
+// Lose widening ops fed to to stores
(MOVBstore [off] {sym} ptr (MOVBreg x) mem) -> (MOVBstore [off] {sym} ptr x mem)
(MOVBstore [off] {sym} ptr (MOVBZreg x) mem) -> (MOVBstore [off] {sym} ptr x mem)
(MOVHstore [off] {sym} ptr (MOVHreg x) mem) -> (MOVHstore [off] {sym} ptr x mem)
(MOVHstore [off] {sym} ptr (MOVHZreg x) mem) -> (MOVHstore [off] {sym} ptr x mem)
+(MOVWstore [off] {sym} ptr (MOVWreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
+(MOVWstore [off] {sym} ptr (MOVWZreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
+
+// Lose W-widening ops fed to compare-W
+(CMPW x (MOVWreg y)) -> (CMPW x y)
+(CMPW (MOVWreg x) y) -> (CMPW x y)
+(CMPWU x (MOVWZreg y)) -> (CMPWU x y)
+(CMPWU (MOVWZreg x) y) -> (CMPWU x y)
(CMP x (MOVDconst [c])) && is16Bit(c) -> (CMPconst x [c])
(CMP (MOVDconst [c]) y) && is16Bit(c) -> (InvertFlags (CMPconst y [c]))
(CMPU (MOVDconst [c]) y) && isU16Bit(c) -> (InvertFlags (CMPUconst y [c]))
(CMPWU x (MOVDconst [c])) && isU16Bit(c) -> (CMPWUconst x [c])
(CMPWU (MOVDconst [c]) y) && isU16Bit(c) -> (InvertFlags (CMPWUconst y [c]))
+
+// A particular pattern seen in cgo code:
+(AND (MOVDconst [c]) x:(MOVBZload _ _)) -> (ANDconst [c&0xFF] x)
+(AND x:(MOVBZload _ _) (MOVDconst [c])) -> (ANDconst [c&0xFF] x)
return rewriteValuePPC64_OpPPC64ADD(v, config)
case OpPPC64ADDconst:
return rewriteValuePPC64_OpPPC64ADDconst(v, config)
+ case OpPPC64AND:
+ return rewriteValuePPC64_OpPPC64AND(v, config)
case OpPPC64ANDconst:
return rewriteValuePPC64_OpPPC64ANDconst(v, config)
case OpPPC64CMP:
return rewriteValuePPC64_OpPPC64MOVHstorezero(v, config)
case OpPPC64MOVWZload:
return rewriteValuePPC64_OpPPC64MOVWZload(v, config)
+ case OpPPC64MOVWZreg:
+ return rewriteValuePPC64_OpPPC64MOVWZreg(v, config)
case OpPPC64MOVWload:
return rewriteValuePPC64_OpPPC64MOVWload(v, config)
+ case OpPPC64MOVWreg:
+ return rewriteValuePPC64_OpPPC64MOVWreg(v, config)
case OpPPC64MOVWstore:
return rewriteValuePPC64_OpPPC64MOVWstore(v, config)
case OpPPC64MOVWstorezero:
return rewriteValuePPC64_OpPPC64MOVWstorezero(v, config)
+ case OpPPC64MaskIfNotCarry:
+ return rewriteValuePPC64_OpPPC64MaskIfNotCarry(v, config)
case OpPPC64NotEqual:
return rewriteValuePPC64_OpPPC64NotEqual(v, config)
+ case OpPPC64OR:
+ return rewriteValuePPC64_OpPPC64OR(v, config)
+ case OpPPC64ORN:
+ return rewriteValuePPC64_OpPPC64ORN(v, config)
+ case OpPPC64ORconst:
+ return rewriteValuePPC64_OpPPC64ORconst(v, config)
+ case OpPPC64SUB:
+ return rewriteValuePPC64_OpPPC64SUB(v, config)
case OpPPC64XOR:
return rewriteValuePPC64_OpPPC64XOR(v, config)
case OpPPC64XORconst:
v.AddArg(x)
return true
}
- // match: (ADD (MOVDconst [0]) x)
- // cond:
- // result: x
- for {
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
- break
- }
- if v_0.AuxInt != 0 {
- break
- }
- x := v.Args[1]
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
- // match: (ADD x (MOVDconst [0]))
- // cond:
- // result: x
- for {
- x := v.Args[0]
- v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
- break
- }
- if v_1.AuxInt != 0 {
- break
- }
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
return false
}
func rewriteValuePPC64_OpPPC64ADDconst(v *Value, config *Config) bool {
v.AddArg(x)
return true
}
+ // match: (ADDconst [0] x)
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 0 {
+ break
+ }
+ x := v.Args[0]
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
// match: (ADDconst [c] (MOVDaddr [d] {sym} x))
// cond:
// result: (MOVDaddr [c+d] {sym} x)
v.AddArg(x)
return true
}
- // match: (ADDconst [0] x)
+ return false
+}
+func rewriteValuePPC64_OpPPC64AND(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (AND (MOVDconst [c]) (MOVDconst [d]))
// cond:
- // result: x
+ // result: (MOVDconst [c&d])
for {
- if v.AuxInt != 0 {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
break
}
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c & d
+ return true
+ }
+ // match: (AND x (MOVDconst [c]))
+ // cond: isU16Bit(c)
+ // result: (ANDconst [c] x)
+ for {
x := v.Args[0]
- v.reset(OpCopy)
- v.Type = x.Type
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isU16Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND (MOVDconst [c]) x)
+ // cond: isU16Bit(c)
+ // result: (ANDconst [c] x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v.Args[1]
+ if !(isU16Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND (MOVDconst [c]) x:(MOVBZload _ _))
+ // cond:
+ // result: (ANDconst [c&0xFF] x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v.Args[1]
+ if x.Op != OpPPC64MOVBZload {
+ break
+ }
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & 0xFF
+ v.AddArg(x)
+ return true
+ }
+ // match: (AND x:(MOVBZload _ _) (MOVDconst [c]))
+ // cond:
+ // result: (ANDconst [c&0xFF] x)
+ for {
+ x := v.Args[0]
+ if x.Op != OpPPC64MOVBZload {
+ break
+ }
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & 0xFF
v.AddArg(x)
return true
}
func rewriteValuePPC64_OpPPC64ANDconst(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (ANDconst [c] (ANDconst [d] x))
+ // cond:
+ // result: (ANDconst [c&d] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ANDconst {
+ break
+ }
+ d := v_0.AuxInt
+ x := v_0.Args[0]
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & d
+ v.AddArg(x)
+ return true
+ }
// match: (ANDconst [-1] x)
// cond:
// result: x
v.AuxInt = 0
return true
}
+ // match: (ANDconst [c] y:(MOVBZreg _))
+ // cond: c&0xFF == 0xFF
+ // result: y
+ for {
+ c := v.AuxInt
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBZreg {
+ break
+ }
+ if !(c&0xFF == 0xFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ANDconst [c] y:(MOVHZreg _))
+ // cond: c&0xFFFF == 0xFFFF
+ // result: y
+ for {
+ c := v.AuxInt
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHZreg {
+ break
+ }
+ if !(c&0xFFFF == 0xFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ANDconst [c] y:(MOVWZreg _))
+ // cond: c&0xFFFFFFFF == 0xFFFFFFFF
+ // result: y
+ for {
+ c := v.AuxInt
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVWZreg {
+ break
+ }
+ if !(c&0xFFFFFFFF == 0xFFFFFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (ANDconst [c] (MOVBZreg x))
+ // cond:
+ // result: (ANDconst [c&0xFF] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVBZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & 0xFF
+ v.AddArg(x)
+ return true
+ }
+ // match: (ANDconst [c] (MOVHZreg x))
+ // cond:
+ // result: (ANDconst [c&0xFFFF] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVHZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & 0xFFFF
+ v.AddArg(x)
+ return true
+ }
+ // match: (ANDconst [c] (MOVWZreg x))
+ // cond:
+ // result: (ANDconst [c&0xFFFFFFFF] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpPPC64ANDconst)
+ v.AuxInt = c & 0xFFFFFFFF
+ v.AddArg(x)
+ return true
+ }
return false
}
func rewriteValuePPC64_OpPPC64CMP(v *Value, config *Config) bool {
func rewriteValuePPC64_OpPPC64CMPW(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (CMPW x (MOVWreg y))
+ // cond:
+ // result: (CMPW x y)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVWreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64CMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPW (MOVWreg x) y)
+ // cond:
+ // result: (CMPW x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVWreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64CMPW)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (CMPW x (MOVDconst [c]))
// cond: is16Bit(c)
// result: (CMPWconst x [c])
func rewriteValuePPC64_OpPPC64CMPWU(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (CMPWU x (MOVWZreg y))
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVWZreg {
+ break
+ }
+ y := v_1.Args[0]
+ v.reset(OpPPC64CMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
+ // match: (CMPWU (MOVWZreg x) y)
+ // cond:
+ // result: (CMPWU x y)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVWZreg {
+ break
+ }
+ x := v_0.Args[0]
+ y := v.Args[1]
+ v.reset(OpPPC64CMPWU)
+ v.AddArg(x)
+ v.AddArg(y)
+ return true
+ }
// match: (CMPWU x (MOVDconst [c]))
// cond: isU16Bit(c)
// result: (CMPWUconst x [c])
func rewriteValuePPC64_OpPPC64MOVBZreg(v *Value, config *Config) bool {
b := v.Block
_ = b
- // match: (MOVBZreg x:(MOVBZload _ _))
- // cond:
- // result: x
+ // match: (MOVBZreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0xFF
+ // result: y
for {
- x := v.Args[0]
- if x.Op != OpPPC64MOVBZload {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0xFF) {
break
}
v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
+ v.Type = y.Type
+ v.AddArg(y)
return true
}
- // match: (MOVBZreg (MOVDconst [c]))
+ // match: (MOVBZreg y:(MOVBZreg _))
// cond:
- // result: (MOVDconst [int64(uint8(c))])
+ // result: y
for {
- v_0 := v.Args[0]
- if v_0.Op != OpPPC64MOVDconst {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBZreg {
break
}
- c := v_0.AuxInt
- v.reset(OpPPC64MOVDconst)
- v.AuxInt = int64(uint8(c))
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVBZreg (MOVBreg x))
+ // cond:
+ // result: (MOVBZreg x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVBreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpPPC64MOVBZreg)
+ v.AddArg(x)
+ return true
+ }
+ // match: (MOVBZreg x:(MOVBZload _ _))
+ // cond:
+ // result: x
+ for {
+ x := v.Args[0]
+ if x.Op != OpPPC64MOVBZload {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ // match: (MOVBZreg (MOVDconst [c]))
+ // cond:
+ // result: (MOVDconst [int64(uint8(c))])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = int64(uint8(c))
return true
}
return false
func rewriteValuePPC64_OpPPC64MOVBreg(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVBreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0x7F
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0x7F) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVBreg y:(MOVBreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVBreg (MOVBZreg x))
+ // cond:
+ // result: (MOVBreg x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVBZreg {
+ break
+ }
+ x := v_0.Args[0]
+ v.reset(OpPPC64MOVBreg)
+ v.AddArg(x)
+ return true
+ }
// match: (MOVBreg (MOVDconst [c]))
// cond:
// result: (MOVDconst [int64(int8(c))])
func rewriteValuePPC64_OpPPC64MOVHZreg(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVHZreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0xFFFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0xFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHZreg y:(MOVHZreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHZreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHZreg y:(MOVBZreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBZreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHZreg y:(MOVHreg x))
+ // cond:
+ // result: (MOVHZreg x)
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHreg {
+ break
+ }
+ x := y.Args[0]
+ v.reset(OpPPC64MOVHZreg)
+ v.AddArg(x)
+ return true
+ }
// match: (MOVHZreg x:(MOVHZload _ _))
// cond:
// result: x
func rewriteValuePPC64_OpPPC64MOVHreg(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (MOVHreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0x7FFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0x7FFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHreg y:(MOVHreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHreg y:(MOVBreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVHreg y:(MOVHZreg x))
+ // cond:
+ // result: (MOVHreg x)
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHZreg {
+ break
+ }
+ x := y.Args[0]
+ v.reset(OpPPC64MOVHreg)
+ v.AddArg(x)
+ return true
+ }
// match: (MOVHreg x:(MOVHload _ _))
// cond:
// result: x
}
return false
}
+func rewriteValuePPC64_OpPPC64MOVWZreg(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (MOVWZreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0xFFFFFFFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0xFFFFFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWZreg y:(AND (MOVDconst [c]) _))
+ // cond: uint64(c) <= 0xFFFFFFFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64AND {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := y_0.AuxInt
+ if !(uint64(c) <= 0xFFFFFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWZreg y:(MOVWZreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVWZreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWZreg y:(MOVHZreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHZreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWZreg y:(MOVBZreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBZreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWZreg y:(MOVWreg x))
+ // cond:
+ // result: (MOVWZreg x)
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVWreg {
+ break
+ }
+ x := y.Args[0]
+ v.reset(OpPPC64MOVWZreg)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64MOVWload(v *Value, config *Config) bool {
b := v.Block
_ = b
}
return false
}
+func rewriteValuePPC64_OpPPC64MOVWreg(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (MOVWreg y:(ANDconst [c] _))
+ // cond: uint64(c) <= 0xFFFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64ANDconst {
+ break
+ }
+ c := y.AuxInt
+ if !(uint64(c) <= 0xFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWreg y:(AND (MOVDconst [c]) _))
+ // cond: uint64(c) <= 0x7FFFFFFF
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64AND {
+ break
+ }
+ y_0 := y.Args[0]
+ if y_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := y_0.AuxInt
+ if !(uint64(c) <= 0x7FFFFFFF) {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWreg y:(MOVWreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVWreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWreg y:(MOVHreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVHreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWreg y:(MOVBreg _))
+ // cond:
+ // result: y
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVBreg {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = y.Type
+ v.AddArg(y)
+ return true
+ }
+ // match: (MOVWreg y:(MOVWZreg x))
+ // cond:
+ // result: (MOVWreg x)
+ for {
+ y := v.Args[0]
+ if y.Op != OpPPC64MOVWZreg {
+ break
+ }
+ x := y.Args[0]
+ v.reset(OpPPC64MOVWreg)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64MOVWstore(v *Value, config *Config) bool {
b := v.Block
_ = b
v.AddArg(mem)
return true
}
- // match: (MOVWstore [off] {sym} ptr (MOVDconst [c]) mem)
- // cond: c == 0
- // result: (MOVWstorezero [off] {sym} ptr mem)
+ // match: (MOVWstore [off] {sym} ptr (MOVDconst [c]) mem)
+ // cond: c == 0
+ // result: (MOVWstorezero [off] {sym} ptr mem)
+ for {
+ off := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ mem := v.Args[2]
+ if !(c == 0) {
+ break
+ }
+ v.reset(OpPPC64MOVWstorezero)
+ v.AuxInt = off
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
+ // cond:
+ // result: (MOVWstore [off] {sym} ptr x mem)
+ for {
+ off := v.AuxInt
+ sym := v.Aux
+ ptr := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVWreg {
+ break
+ }
+ x := v_1.Args[0]
+ mem := v.Args[2]
+ v.reset(OpPPC64MOVWstore)
+ v.AuxInt = off
+ v.Aux = sym
+ v.AddArg(ptr)
+ v.AddArg(x)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (MOVWstore [off] {sym} ptr (MOVWZreg x) mem)
+ // cond:
+ // result: (MOVWstore [off] {sym} ptr x mem)
for {
off := v.AuxInt
sym := v.Aux
ptr := v.Args[0]
v_1 := v.Args[1]
- if v_1.Op != OpPPC64MOVDconst {
+ if v_1.Op != OpPPC64MOVWZreg {
break
}
- c := v_1.AuxInt
+ x := v_1.Args[0]
mem := v.Args[2]
- if !(c == 0) {
- break
- }
- v.reset(OpPPC64MOVWstorezero)
+ v.reset(OpPPC64MOVWstore)
v.AuxInt = off
v.Aux = sym
v.AddArg(ptr)
+ v.AddArg(x)
v.AddArg(mem)
return true
}
}
return false
}
+func rewriteValuePPC64_OpPPC64MaskIfNotCarry(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (MaskIfNotCarry (ADDconstForCarry [c] (ANDconst [d] _)))
+ // cond: c < 0 && d > 0 && c + d < 0
+ // result: (MOVDconst [-1])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ADDconstForCarry {
+ break
+ }
+ c := v_0.AuxInt
+ v_0_0 := v_0.Args[0]
+ if v_0_0.Op != OpPPC64ANDconst {
+ break
+ }
+ d := v_0_0.AuxInt
+ if !(c < 0 && d > 0 && c+d < 0) {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = -1
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64NotEqual(v *Value, config *Config) bool {
b := v.Block
_ = b
}
return false
}
-func rewriteValuePPC64_OpPPC64XOR(v *Value, config *Config) bool {
+func rewriteValuePPC64_OpPPC64OR(v *Value, config *Config) bool {
b := v.Block
_ = b
- // match: (XOR (MOVDconst [0]) x)
+ // match: (OR (MOVDconst [c]) (MOVDconst [d]))
// cond:
- // result: x
+ // result: (MOVDconst [c|d])
for {
v_0 := v.Args[0]
if v_0.Op != OpPPC64MOVDconst {
break
}
- if v_0.AuxInt != 0 {
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c | d
+ return true
+ }
+ // match: (OR x (MOVDconst [c]))
+ // cond: isU32Bit(c)
+ // result: (ORconst [c] x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isU32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ORconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (OR (MOVDconst [c]) x)
+ // cond: isU32Bit(c)
+ // result: (ORconst [c] x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
break
}
+ c := v_0.AuxInt
x := v.Args[1]
- v.reset(OpCopy)
- v.Type = x.Type
+ if !(isU32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64ORconst)
+ v.AuxInt = c
v.AddArg(x)
return true
}
- // match: (XOR x (MOVDconst [0]))
+ return false
+}
+func rewriteValuePPC64_OpPPC64ORN(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (ORN x (MOVDconst [-1]))
// cond:
// result: x
for {
if v_1.Op != OpPPC64MOVDconst {
break
}
- if v_1.AuxInt != 0 {
+ if v_1.AuxInt != -1 {
+ break
+ }
+ v.reset(OpCopy)
+ v.Type = x.Type
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64ORconst(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (ORconst [c] (ORconst [d] x))
+ // cond:
+ // result: (ORconst [c|d] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ORconst {
+ break
+ }
+ d := v_0.AuxInt
+ x := v_0.Args[0]
+ v.reset(OpPPC64ORconst)
+ v.AuxInt = c | d
+ v.AddArg(x)
+ return true
+ }
+ // match: (ORconst [-1] _)
+ // cond:
+ // result: (MOVDconst [-1])
+ for {
+ if v.AuxInt != -1 {
+ break
+ }
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = -1
+ return true
+ }
+ // match: (ORconst [0] x)
+ // cond:
+ // result: x
+ for {
+ if v.AuxInt != 0 {
break
}
+ x := v.Args[0]
v.reset(OpCopy)
v.Type = x.Type
v.AddArg(x)
}
return false
}
+func rewriteValuePPC64_OpPPC64SUB(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (SUB x (MOVDconst [c]))
+ // cond: is32Bit(-c)
+ // result: (ADDconst [-c] x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(is32Bit(-c)) {
+ break
+ }
+ v.reset(OpPPC64ADDconst)
+ v.AuxInt = -c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
+func rewriteValuePPC64_OpPPC64XOR(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (XOR (MOVDconst [c]) (MOVDconst [d]))
+ // cond:
+ // result: (MOVDconst [c^d])
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ d := v_1.AuxInt
+ v.reset(OpPPC64MOVDconst)
+ v.AuxInt = c ^ d
+ return true
+ }
+ // match: (XOR x (MOVDconst [c]))
+ // cond: isU32Bit(c)
+ // result: (XORconst [c] x)
+ for {
+ x := v.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_1.AuxInt
+ if !(isU32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ // match: (XOR (MOVDconst [c]) x)
+ // cond: isU32Bit(c)
+ // result: (XORconst [c] x)
+ for {
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64MOVDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v.Args[1]
+ if !(isU32Bit(c)) {
+ break
+ }
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64XORconst(v *Value, config *Config) bool {
b := v.Block
_ = b
+ // match: (XORconst [c] (XORconst [d] x))
+ // cond:
+ // result: (XORconst [c^d] x)
+ for {
+ c := v.AuxInt
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64XORconst {
+ break
+ }
+ d := v_0.AuxInt
+ x := v_0.Args[0]
+ v.reset(OpPPC64XORconst)
+ v.AuxInt = c ^ d
+ v.AddArg(x)
+ return true
+ }
// match: (XORconst [0] x)
// cond:
// result: x
func rewriteBlockPPC64(b *Block, config *Config) bool {
switch b.Kind {
case BlockPPC64EQ:
+ // match: (EQ (CMPconst [0] (ANDconst [c] x)) yes no)
+ // cond:
+ // result: (EQ (ANDCCconst [c] x) yes no)
+ for {
+ v := b.Control
+ if v.Op != OpPPC64CMPconst {
+ break
+ }
+ if v.AuxInt != 0 {
+ break
+ }
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ANDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ yes := b.Succs[0]
+ no := b.Succs[1]
+ b.Kind = BlockPPC64EQ
+ v0 := b.NewValue0(v.Line, OpPPC64ANDCCconst, TypeFlags)
+ v0.AuxInt = c
+ v0.AddArg(x)
+ b.SetControl(v0)
+ _ = yes
+ _ = no
+ return true
+ }
+ // match: (EQ (CMPWconst [0] (ANDconst [c] x)) yes no)
+ // cond:
+ // result: (EQ (ANDCCconst [c] x) yes no)
+ for {
+ v := b.Control
+ if v.Op != OpPPC64CMPWconst {
+ break
+ }
+ if v.AuxInt != 0 {
+ break
+ }
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ANDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ yes := b.Succs[0]
+ no := b.Succs[1]
+ b.Kind = BlockPPC64EQ
+ v0 := b.NewValue0(v.Line, OpPPC64ANDCCconst, TypeFlags)
+ v0.AuxInt = c
+ v0.AddArg(x)
+ b.SetControl(v0)
+ _ = yes
+ _ = no
+ return true
+ }
// match: (EQ (FlagEQ) yes no)
// cond:
// result: (First nil yes no)
_ = no
return true
}
+ // match: (NE (CMPconst [0] (ANDconst [c] x)) yes no)
+ // cond:
+ // result: (NE (ANDCCconst [c] x) yes no)
+ for {
+ v := b.Control
+ if v.Op != OpPPC64CMPconst {
+ break
+ }
+ if v.AuxInt != 0 {
+ break
+ }
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ANDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ yes := b.Succs[0]
+ no := b.Succs[1]
+ b.Kind = BlockPPC64NE
+ v0 := b.NewValue0(v.Line, OpPPC64ANDCCconst, TypeFlags)
+ v0.AuxInt = c
+ v0.AddArg(x)
+ b.SetControl(v0)
+ _ = yes
+ _ = no
+ return true
+ }
+ // match: (NE (CMPWconst [0] (ANDconst [c] x)) yes no)
+ // cond:
+ // result: (NE (ANDCCconst [c] x) yes no)
+ for {
+ v := b.Control
+ if v.Op != OpPPC64CMPWconst {
+ break
+ }
+ if v.AuxInt != 0 {
+ break
+ }
+ v_0 := v.Args[0]
+ if v_0.Op != OpPPC64ANDconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ yes := b.Succs[0]
+ no := b.Succs[1]
+ b.Kind = BlockPPC64NE
+ v0 := b.NewValue0(v.Line, OpPPC64ANDCCconst, TypeFlags)
+ v0.AuxInt = c
+ v0.AddArg(x)
+ b.SetControl(v0)
+ _ = yes
+ _ = no
+ return true
+ }
// match: (NE (FlagEQ) yes no)
// cond:
// result: (First nil no yes)