(LocalAddr <t> {sym} base mem) && t.Elem().HasPointers() => (MOVDaddr {sym} (SPanchored base mem))
(LocalAddr <t> {sym} base _) && !t.Elem().HasPointers() => (MOVDaddr {sym} base)
(OffPtr [off] ptr) => (ADD (MOVDconst <typ.Int64> [off]) ptr)
+(MOVDaddr {sym} [n] p:(ADD x y)) && sym == nil && n == 0 => p
+(MOVDaddr {sym} [n] ptr) && sym == nil && n == 0 && (ptr.Op == OpArgIntReg || ptr.Op == OpPhi) => ptr
// TODO: optimize these cases?
(Ctz32NonZero ...) => (Ctz32 ...)
return rewriteValuePPC64_OpPPC64MOVBstoreidx(v)
case OpPPC64MOVBstorezero:
return rewriteValuePPC64_OpPPC64MOVBstorezero(v)
+ case OpPPC64MOVDaddr:
+ return rewriteValuePPC64_OpPPC64MOVDaddr(v)
case OpPPC64MOVDload:
return rewriteValuePPC64_OpPPC64MOVDload(v)
case OpPPC64MOVDloadidx:
}
return false
}
+func rewriteValuePPC64_OpPPC64MOVDaddr(v *Value) bool {
+ v_0 := v.Args[0]
+ // match: (MOVDaddr {sym} [n] p:(ADD x y))
+ // cond: sym == nil && n == 0
+ // result: p
+ for {
+ n := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ p := v_0
+ if p.Op != OpPPC64ADD {
+ break
+ }
+ if !(sym == nil && n == 0) {
+ break
+ }
+ v.copyOf(p)
+ return true
+ }
+ // match: (MOVDaddr {sym} [n] ptr)
+ // cond: sym == nil && n == 0 && (ptr.Op == OpArgIntReg || ptr.Op == OpPhi)
+ // result: ptr
+ for {
+ n := auxIntToInt32(v.AuxInt)
+ sym := auxToSym(v.Aux)
+ ptr := v_0
+ if !(sym == nil && n == 0 && (ptr.Op == OpArgIntReg || ptr.Op == OpPhi)) {
+ break
+ }
+ v.copyOf(ptr)
+ return true
+ }
+ return false
+}
func rewriteValuePPC64_OpPPC64MOVDload(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]