(MOVBZreg (SRDconst [c] x)) && c>=56 -> (SRDconst [c] x)
(MOVBreg (SRDconst [c] x)) && c>56 -> (SRDconst [c] x)
(MOVBreg (SRDconst [c] x)) && c==56 -> (SRADconst [c] x)
+(MOVBreg (SRADconst [c] x)) && c>=56 -> (SRADconst [c] x)
(MOVBZreg (SRWconst [c] x)) && c>=24 -> (SRWconst [c] x)
(MOVBreg (SRWconst [c] x)) && c>24 -> (SRWconst [c] x)
(MOVBreg (SRWconst [c] x)) && c==24 -> (SRAWconst [c] x)
+(MOVBreg (SRAWconst [c] x)) && c>=24 -> (SRAWconst [c] x)
(MOVHZreg (SRDconst [c] x)) && c>=48 -> (SRDconst [c] x)
(MOVHreg (SRDconst [c] x)) && c>48 -> (SRDconst [c] x)
(MOVHreg (SRDconst [c] x)) && c==48 -> (SRADconst [c] x)
+(MOVHreg (SRADconst [c] x)) && c>=48 -> (SRADconst [c] x)
(MOVHZreg (SRWconst [c] x)) && c>=16 -> (SRWconst [c] x)
(MOVHreg (SRWconst [c] x)) && c>16 -> (SRWconst [c] x)
+(MOVHreg (SRAWconst [c] x)) && c>=16 -> (SRAWconst [c] x)
(MOVHreg (SRWconst [c] x)) && c==16 -> (SRAWconst [c] x)
(MOVWZreg (SRDconst [c] x)) && c>=32 -> (SRDconst [c] x)
(MOVWreg (SRDconst [c] x)) && c>32 -> (SRDconst [c] x)
+(MOVWreg (SRADconst [c] x)) && c>=32 -> (SRADconst [c] x)
(MOVWreg (SRDconst [c] x)) && c==32 -> (SRADconst [c] x)
// Various redundant zero/sign extension combinations.
v.AddArg(x)
return true
}
+ // match: (MOVBreg (SRADconst [c] x))
+ // cond: c>=56
+ // result: (SRADconst [c] x)
+ for {
+ if v_0.Op != OpPPC64SRADconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(c >= 56) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (MOVBreg (SRWconst [c] x))
// cond: c>24
// result: (SRWconst [c] x)
v.AddArg(x)
return true
}
+ // match: (MOVBreg (SRAWconst [c] x))
+ // cond: c>=24
+ // result: (SRAWconst [c] x)
+ for {
+ if v_0.Op != OpPPC64SRAWconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(c >= 24) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (MOVBreg y:(MOVBreg _))
// result: y
for {
v.AddArg(x)
return true
}
+ // match: (MOVHreg (SRADconst [c] x))
+ // cond: c>=48
+ // result: (SRADconst [c] x)
+ for {
+ if v_0.Op != OpPPC64SRADconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(c >= 48) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (MOVHreg (SRWconst [c] x))
// cond: c>16
// result: (SRWconst [c] x)
v.AddArg(x)
return true
}
+ // match: (MOVHreg (SRAWconst [c] x))
+ // cond: c>=16
+ // result: (SRAWconst [c] x)
+ for {
+ if v_0.Op != OpPPC64SRAWconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(c >= 16) {
+ break
+ }
+ v.reset(OpPPC64SRAWconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (MOVHreg (SRWconst [c] x))
// cond: c==16
// result: (SRAWconst [c] x)
v.AddArg(x)
return true
}
+ // match: (MOVWreg (SRADconst [c] x))
+ // cond: c>=32
+ // result: (SRADconst [c] x)
+ for {
+ if v_0.Op != OpPPC64SRADconst {
+ break
+ }
+ c := v_0.AuxInt
+ x := v_0.Args[0]
+ if !(c >= 32) {
+ break
+ }
+ v.reset(OpPPC64SRADconst)
+ v.AuxInt = c
+ v.AddArg(x)
+ return true
+ }
// match: (MOVWreg (SRDconst [c] x))
// cond: c==32
// result: (SRADconst [c] x)