(BSFQ (ORQconst <t> [1<<16] (MOVWQZX x))) -> (BSFQ (ORQconst <t> [1<<16] x))
// Redundant sign/zero extensions
-(MOVLQSX x:(MOVLQSX _)) -> x
-(MOVLQSX x:(MOVWQSX _)) -> x
-(MOVLQSX x:(MOVBQSX _)) -> x
-(MOVWQSX x:(MOVWQSX _)) -> x
-(MOVWQSX x:(MOVBQSX _)) -> x
-(MOVBQSX x:(MOVBQSX _)) -> x
-(MOVLQZX x:(MOVLQZX _)) -> x
-(MOVLQZX x:(MOVWQZX _)) -> x
-(MOVLQZX x:(MOVBQZX _)) -> x
-(MOVWQZX x:(MOVWQZX _)) -> x
-(MOVWQZX x:(MOVBQZX _)) -> x
-(MOVBQZX x:(MOVBQZX _)) -> x
+// Note: see issue 21963. We have to make sure we use the right type on
+// the resulting extension (the outer type, not the inner type).
+(MOVLQSX (MOVLQSX x)) -> (MOVLQSX x)
+(MOVLQSX (MOVWQSX x)) -> (MOVWQSX x)
+(MOVLQSX (MOVBQSX x)) -> (MOVBQSX x)
+(MOVWQSX (MOVWQSX x)) -> (MOVWQSX x)
+(MOVWQSX (MOVBQSX x)) -> (MOVBQSX x)
+(MOVBQSX (MOVBQSX x)) -> (MOVBQSX x)
+(MOVLQZX (MOVLQZX x)) -> (MOVLQZX x)
+(MOVLQZX (MOVWQZX x)) -> (MOVWQZX x)
+(MOVLQZX (MOVBQZX x)) -> (MOVBQZX x)
+(MOVWQZX (MOVWQZX x)) -> (MOVWQZX x)
+(MOVWQZX (MOVBQZX x)) -> (MOVBQZX x)
+(MOVBQZX (MOVBQZX x)) -> (MOVBQZX x)
(MOVQstore [off] {sym} ptr a:(ADDQconst [c] l:(MOVQload [off] {sym} ptr2 mem)) mem)
&& isSamePtr(ptr, ptr2) && a.Uses == 1 && l.Uses == 1 && validValAndOff(c,off) ->
v.AddArg(x)
return true
}
- // match: (MOVBQSX x:(MOVBQSX _))
+ // match: (MOVBQSX (MOVBQSX x))
// cond:
- // result: x
+ // result: (MOVBQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQSX)
v.AddArg(x)
return true
}
v.AddArg(x)
return true
}
- // match: (MOVBQZX x:(MOVBQZX _))
+ // match: (MOVBQZX (MOVBQZX x))
// cond:
- // result: x
+ // result: (MOVBQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQZX)
v.AddArg(x)
return true
}
v.AddArg(x)
return true
}
- // match: (MOVLQSX x:(MOVLQSX _))
+ // match: (MOVLQSX (MOVLQSX x))
// cond:
- // result: x
+ // result: (MOVLQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVLQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVLQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVLQSX)
v.AddArg(x)
return true
}
- // match: (MOVLQSX x:(MOVWQSX _))
+ // match: (MOVLQSX (MOVWQSX x))
// cond:
- // result: x
+ // result: (MOVWQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVWQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVWQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVWQSX)
v.AddArg(x)
return true
}
- // match: (MOVLQSX x:(MOVBQSX _))
+ // match: (MOVLQSX (MOVBQSX x))
// cond:
- // result: x
+ // result: (MOVBQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQSX)
v.AddArg(x)
return true
}
v.AddArg(x)
return true
}
- // match: (MOVLQZX x:(MOVLQZX _))
+ // match: (MOVLQZX (MOVLQZX x))
// cond:
- // result: x
+ // result: (MOVLQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVLQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVLQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVLQZX)
v.AddArg(x)
return true
}
- // match: (MOVLQZX x:(MOVWQZX _))
+ // match: (MOVLQZX (MOVWQZX x))
// cond:
- // result: x
+ // result: (MOVWQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVWQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVWQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVWQZX)
v.AddArg(x)
return true
}
- // match: (MOVLQZX x:(MOVBQZX _))
+ // match: (MOVLQZX (MOVBQZX x))
// cond:
- // result: x
+ // result: (MOVBQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQZX)
v.AddArg(x)
return true
}
v.AddArg(x)
return true
}
- // match: (MOVWQSX x:(MOVWQSX _))
+ // match: (MOVWQSX (MOVWQSX x))
// cond:
- // result: x
+ // result: (MOVWQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVWQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVWQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVWQSX)
v.AddArg(x)
return true
}
- // match: (MOVWQSX x:(MOVBQSX _))
+ // match: (MOVWQSX (MOVBQSX x))
// cond:
- // result: x
+ // result: (MOVBQSX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQSX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQSX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQSX)
v.AddArg(x)
return true
}
v.AddArg(x)
return true
}
- // match: (MOVWQZX x:(MOVWQZX _))
+ // match: (MOVWQZX (MOVWQZX x))
// cond:
- // result: x
+ // result: (MOVWQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVWQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVWQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVWQZX)
v.AddArg(x)
return true
}
- // match: (MOVWQZX x:(MOVBQZX _))
+ // match: (MOVWQZX (MOVBQZX x))
// cond:
- // result: x
+ // result: (MOVBQZX x)
for {
- x := v.Args[0]
- if x.Op != OpAMD64MOVBQZX {
+ v_0 := v.Args[0]
+ if v_0.Op != OpAMD64MOVBQZX {
break
}
- v.reset(OpCopy)
- v.Type = x.Type
+ x := v_0.Args[0]
+ v.reset(OpAMD64MOVBQZX)
v.AddArg(x)
return true
}