// Lowering loads
(Load <t> ptr mem) && (is64BitInt(t) || isPtr(t)) -> (MOVDload ptr mem)
-(Load <t> ptr mem) && is32BitInt(t) -> (MOVWZload ptr mem)
-(Load <t> ptr mem) && is16BitInt(t) -> (MOVHZload ptr mem)
-(Load <t> ptr mem) && (t.IsBoolean() || is8BitInt(t)) -> (MOVBZload ptr mem)
+(Load <t> ptr mem) && is32BitInt(t) && isSigned(t) -> (MOVWload ptr mem)
+(Load <t> ptr mem) && is32BitInt(t) && !isSigned(t) -> (MOVWZload ptr mem)
+(Load <t> ptr mem) && is16BitInt(t) && isSigned(t) -> (MOVHload ptr mem)
+(Load <t> ptr mem) && is16BitInt(t) && !isSigned(t) -> (MOVHZload ptr mem)
+(Load <t> ptr mem) && is8BitInt(t) && isSigned(t) -> (MOVBload ptr mem)
+(Load <t> ptr mem) && (t.IsBoolean() || (is8BitInt(t) && !isSigned(t))) -> (MOVBZload ptr mem)
(Load <t> ptr mem) && is32BitFloat(t) -> (FMOVSload ptr mem)
(Load <t> ptr mem) && is64BitFloat(t) -> (FMOVDload ptr mem)
// ***************************
// TODO: Should the optimizations be a separate pass?
+// if a register move has only 1 use, just use the same register without emitting instruction
+// MOVDnop doesn't emit instruction, only for ensuring the type.
+(MOVDreg x) && x.Uses == 1 -> (MOVDnop x)
+
// Fold sign extensions into conditional moves of constants.
// Designed to remove the MOVBZreg inserted by the If lowering.
-(MOVBZreg x:(MOVDLT (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDLE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDGT (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDGE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDEQ (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDNE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDGTnoinv (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
-(MOVBZreg x:(MOVDGEnoinv (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> x
+(MOVBZreg x:(MOVDLT (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDLE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDGT (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDGE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDEQ (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDNE (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDGTnoinv (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
+(MOVBZreg x:(MOVDGEnoinv (MOVDconst [c]) (MOVDconst [d]) _)) && int64(uint8(c)) == c && int64(uint8(d)) == d -> (MOVDreg x)
// Fold boolean tests into blocks.
(NE (CMPWconst [0] (MOVDLT (MOVDconst [0]) (MOVDconst [1]) cmp)) yes no) -> (LT cmp yes no)
(MOVDNE x y (InvertFlags cmp)) -> (MOVDNE x y cmp)
// don't extend after proper load
-(MOVBreg x:(MOVBload _ _)) -> x
-(MOVBZreg x:(MOVBZload _ _)) -> x
-(MOVHreg x:(MOVBload _ _)) -> x
-(MOVHreg x:(MOVBZload _ _)) -> x
-(MOVHreg x:(MOVHload _ _)) -> x
-(MOVHZreg x:(MOVBZload _ _)) -> x
-(MOVHZreg x:(MOVHZload _ _)) -> x
-(MOVWreg x:(MOVBload _ _)) -> x
-(MOVWreg x:(MOVBZload _ _)) -> x
-(MOVWreg x:(MOVHload _ _)) -> x
-(MOVWreg x:(MOVHZload _ _)) -> x
-(MOVWreg x:(MOVWload _ _)) -> x
-(MOVWZreg x:(MOVBZload _ _)) -> x
-(MOVWZreg x:(MOVHZload _ _)) -> x
-(MOVWZreg x:(MOVWZload _ _)) -> x
+(MOVBreg x:(MOVBload _ _)) -> (MOVDreg x)
+(MOVBZreg x:(MOVBZload _ _)) -> (MOVDreg x)
+(MOVHreg x:(MOVBload _ _)) -> (MOVDreg x)
+(MOVHreg x:(MOVBZload _ _)) -> (MOVDreg x)
+(MOVHreg x:(MOVHload _ _)) -> (MOVDreg x)
+(MOVHZreg x:(MOVBZload _ _)) -> (MOVDreg x)
+(MOVHZreg x:(MOVHZload _ _)) -> (MOVDreg x)
+(MOVWreg x:(MOVBload _ _)) -> (MOVDreg x)
+(MOVWreg x:(MOVBZload _ _)) -> (MOVDreg x)
+(MOVWreg x:(MOVHload _ _)) -> (MOVDreg x)
+(MOVWreg x:(MOVHZload _ _)) -> (MOVDreg x)
+(MOVWreg x:(MOVWload _ _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVBZload _ _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVHZload _ _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVWZload _ _)) -> (MOVDreg x)
// don't extend if argument is already extended
-(MOVBreg x:(Arg <t>)) && is8BitInt(t) && isSigned(t) -> x
-(MOVBZreg x:(Arg <t>)) && is8BitInt(t) && !isSigned(t) -> x
-(MOVHreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t)) && isSigned(t) -> x
-(MOVHZreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t)) && !isSigned(t) -> x
-(MOVWreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && isSigned(t) -> x
-(MOVWZreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && !isSigned(t) -> x
+(MOVBreg x:(Arg <t>)) && is8BitInt(t) && isSigned(t) -> (MOVDreg x)
+(MOVBZreg x:(Arg <t>)) && is8BitInt(t) && !isSigned(t) -> (MOVDreg x)
+(MOVHreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t)) && isSigned(t) -> (MOVDreg x)
+(MOVHZreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t)) && !isSigned(t) -> (MOVDreg x)
+(MOVWreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && isSigned(t) -> (MOVDreg x)
+(MOVWZreg x:(Arg <t>)) && (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && !isSigned(t) -> (MOVDreg x)
// fold double extensions
-(MOVBreg x:(MOVBreg _)) -> x
-(MOVBZreg x:(MOVBZreg _)) -> x
-(MOVHreg x:(MOVBreg _)) -> x
-(MOVHreg x:(MOVBZreg _)) -> x
-(MOVHreg x:(MOVHreg _)) -> x
-(MOVHZreg x:(MOVBZreg _)) -> x
-(MOVHZreg x:(MOVHZreg _)) -> x
-(MOVWreg x:(MOVBreg _)) -> x
-(MOVWreg x:(MOVBZreg _)) -> x
-(MOVWreg x:(MOVHreg _)) -> x
-(MOVWreg x:(MOVHreg _)) -> x
-(MOVWreg x:(MOVWreg _)) -> x
-(MOVWZreg x:(MOVBZreg _)) -> x
-(MOVWZreg x:(MOVHZreg _)) -> x
-(MOVWZreg x:(MOVWZreg _)) -> x
+(MOVBreg x:(MOVBreg _)) -> (MOVDreg x)
+(MOVBZreg x:(MOVBZreg _)) -> (MOVDreg x)
+(MOVHreg x:(MOVBreg _)) -> (MOVDreg x)
+(MOVHreg x:(MOVBZreg _)) -> (MOVDreg x)
+(MOVHreg x:(MOVHreg _)) -> (MOVDreg x)
+(MOVHZreg x:(MOVBZreg _)) -> (MOVDreg x)
+(MOVHZreg x:(MOVHZreg _)) -> (MOVDreg x)
+(MOVWreg x:(MOVBreg _)) -> (MOVDreg x)
+(MOVWreg x:(MOVBZreg _)) -> (MOVDreg x)
+(MOVWreg x:(MOVHreg _)) -> (MOVDreg x)
+(MOVWreg x:(MOVHreg _)) -> (MOVDreg x)
+(MOVWreg x:(MOVWreg _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVBZreg _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVHZreg _)) -> (MOVDreg x)
+(MOVWZreg x:(MOVWZreg _)) -> (MOVDreg x)
// fold extensions into constants
(MOVBreg (MOVDconst [c])) -> (MOVDconst [int64(int8(c))])
(MOVWZreg x:(MOVWZloadidx [off] {sym} ptr idx mem)) && x.Uses == 1 && clobber(x) -> @x.Block (MOVWZloadidx <v.Type> [off] {sym} ptr idx mem)
// replace load from same location as preceding store with copy
-(MOVBZload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
-(MOVHZload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
-(MOVWZload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
-(MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> x
+(MOVBZload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> (MOVDreg x)
+(MOVHZload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> (MOVDreg x)
+(MOVWZload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> (MOVDreg x)
+(MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _)) && sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) -> (MOVDreg x)
// Don't extend before storing
(MOVWstore [off] {sym} ptr (MOVWreg x) mem) -> (MOVWstore [off] {sym} ptr x mem)
return rewriteValueS390X_OpS390XMOVDload(v, config)
case OpS390XMOVDloadidx:
return rewriteValueS390X_OpS390XMOVDloadidx(v, config)
+ case OpS390XMOVDreg:
+ return rewriteValueS390X_OpS390XMOVDreg(v, config)
case OpS390XMOVDstore:
return rewriteValueS390X_OpS390XMOVDstore(v, config)
case OpS390XMOVDstoreconst:
return true
}
// match: (Load <t> ptr mem)
- // cond: is32BitInt(t)
+ // cond: is32BitInt(t) && isSigned(t)
+ // result: (MOVWload ptr mem)
+ for {
+ t := v.Type
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(is32BitInt(t) && isSigned(t)) {
+ break
+ }
+ v.reset(OpS390XMOVWload)
+ v.AddArg(ptr)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: is32BitInt(t) && !isSigned(t)
// result: (MOVWZload ptr mem)
for {
t := v.Type
ptr := v.Args[0]
mem := v.Args[1]
- if !(is32BitInt(t)) {
+ if !(is32BitInt(t) && !isSigned(t)) {
break
}
v.reset(OpS390XMOVWZload)
return true
}
// match: (Load <t> ptr mem)
- // cond: is16BitInt(t)
+ // cond: is16BitInt(t) && isSigned(t)
+ // result: (MOVHload ptr mem)
+ for {
+ t := v.Type
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(is16BitInt(t) && isSigned(t)) {
+ break
+ }
+ v.reset(OpS390XMOVHload)
+ v.AddArg(ptr)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: is16BitInt(t) && !isSigned(t)
// result: (MOVHZload ptr mem)
for {
t := v.Type
ptr := v.Args[0]
mem := v.Args[1]
- if !(is16BitInt(t)) {
+ if !(is16BitInt(t) && !isSigned(t)) {
break
}
v.reset(OpS390XMOVHZload)
return true
}
// match: (Load <t> ptr mem)
- // cond: (t.IsBoolean() || is8BitInt(t))
+ // cond: is8BitInt(t) && isSigned(t)
+ // result: (MOVBload ptr mem)
+ for {
+ t := v.Type
+ ptr := v.Args[0]
+ mem := v.Args[1]
+ if !(is8BitInt(t) && isSigned(t)) {
+ break
+ }
+ v.reset(OpS390XMOVBload)
+ v.AddArg(ptr)
+ v.AddArg(mem)
+ return true
+ }
+ // match: (Load <t> ptr mem)
+ // cond: (t.IsBoolean() || (is8BitInt(t) && !isSigned(t)))
// result: (MOVBZload ptr mem)
for {
t := v.Type
ptr := v.Args[0]
mem := v.Args[1]
- if !(t.IsBoolean() || is8BitInt(t)) {
+ if !(t.IsBoolean() || (is8BitInt(t) && !isSigned(t))) {
break
}
v.reset(OpS390XMOVBZload)
_ = b
// match: (MOVBZload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
- // result: x
+ // result: (MOVDreg x)
for {
off := v.AuxInt
sym := v.Aux
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVBZreg x:(MOVDLT (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDLT {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDLE (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDLE {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDGT (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDGT {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDGE (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDGE {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDEQ (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDEQ {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDNE (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDNE {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDGTnoinv (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDGTnoinv {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVDGEnoinv (MOVDconst [c]) (MOVDconst [d]) _))
// cond: int64(uint8(c)) == c && int64(uint8(d)) == d
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVDGEnoinv {
if !(int64(uint8(c)) == c && int64(uint8(d)) == d) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVBZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(Arg <t>))
// cond: is8BitInt(t) && !isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !(is8BitInt(t) && !isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBZreg x:(MOVBZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVBreg x:(MOVBload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBreg x:(Arg <t>))
// cond: is8BitInt(t) && isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !(is8BitInt(t) && isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVBreg x:(MOVBreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
- // result: x
+ // result: (MOVDreg x)
for {
off := v.AuxInt
sym := v.Aux
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
}
return false
}
+func rewriteValueS390X_OpS390XMOVDreg(v *Value, config *Config) bool {
+ b := v.Block
+ _ = b
+ // match: (MOVDreg x)
+ // cond: x.Uses == 1
+ // result: (MOVDnop x)
+ for {
+ x := v.Args[0]
+ if !(x.Uses == 1) {
+ break
+ }
+ v.reset(OpS390XMOVDnop)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueS390X_OpS390XMOVDstore(v *Value, config *Config) bool {
b := v.Block
_ = b
_ = b
// match: (MOVHZload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
- // result: x
+ // result: (MOVDreg x)
for {
off := v.AuxInt
sym := v.Aux
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVHZreg x:(MOVBZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHZreg x:(MOVHZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHZreg x:(Arg <t>))
// cond: (is8BitInt(t) || is16BitInt(t)) && !isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !((is8BitInt(t) || is16BitInt(t)) && !isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHZreg x:(MOVBZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHZreg x:(MOVHZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVHreg x:(MOVBload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVBZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVHload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(Arg <t>))
// cond: (is8BitInt(t) || is16BitInt(t)) && isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !((is8BitInt(t) || is16BitInt(t)) && isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVBreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVBZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVHreg x:(MOVHreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVWZload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
- // result: x
+ // result: (MOVDreg x)
for {
off := v.AuxInt
sym := v.Aux
if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVWZreg x:(MOVBZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(MOVHZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(MOVWZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVWZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(Arg <t>))
// cond: (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && !isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !((is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && !isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(MOVBZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(MOVHZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWZreg x:(MOVWZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVWZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
_ = b
// match: (MOVWreg x:(MOVBload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVBZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVHload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVHZload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHZload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVWload _ _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVWload {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(Arg <t>))
// cond: (is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && isSigned(t)
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpArg {
if !((is8BitInt(t) || is16BitInt(t) || is32BitInt(t)) && isSigned(t)) {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVBreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVBZreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVBZreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVHreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVHreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVHreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}
// match: (MOVWreg x:(MOVWreg _))
// cond:
- // result: x
+ // result: (MOVDreg x)
for {
x := v.Args[0]
if x.Op != OpS390XMOVWreg {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ v.reset(OpS390XMOVDreg)
v.AddArg(x)
return true
}