(Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 <t> [c+d]))
(Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64 x (Const64 <t> [c+d]))
+// Remove signed right shift before an unsigned right shift that extracts the sign bit.
+(Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7] )) -> (Rsh8Ux64 x (Const64 <t> [7] ))
+(Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15])) -> (Rsh16Ux64 x (Const64 <t> [15]))
+(Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31])) -> (Rsh32Ux64 x (Const64 <t> [31]))
+(Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63])) -> (Rsh64Ux64 x (Const64 <t> [63]))
+
// ((x >> c1) << c2) >> c3
(Rsh(64|32|16|8)Ux64 (Lsh(64|32|16|8)x64 (Rsh(64|32|16|8)Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
&& uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
v.AddArg(v0)
return true
}
+ // match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 <t> [15]))
+ // cond:
+ // result: (Rsh16Ux64 x (Const64 <t> [15]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpRsh16x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ t := v_1.Type
+ if v_1.AuxInt != 15 {
+ break
+ }
+ v.reset(OpRsh16Ux64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = 15
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh16Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg(v0)
return true
}
+ // match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 <t> [31]))
+ // cond:
+ // result: (Rsh32Ux64 x (Const64 <t> [31]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpRsh32x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ t := v_1.Type
+ if v_1.AuxInt != 31 {
+ break
+ }
+ v.reset(OpRsh32Ux64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = 31
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh32Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg(v0)
return true
}
+ // match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 <t> [63]))
+ // cond:
+ // result: (Rsh64Ux64 x (Const64 <t> [63]))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpRsh64x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ t := v_1.Type
+ if v_1.AuxInt != 63 {
+ break
+ }
+ v.reset(OpRsh64Ux64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = 63
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh64Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))
v.AddArg(v0)
return true
}
+ // match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 <t> [7]))
+ // cond:
+ // result: (Rsh8Ux64 x (Const64 <t> [7] ))
+ for {
+ _ = v.Args[1]
+ v_0 := v.Args[0]
+ if v_0.Op != OpRsh8x64 {
+ break
+ }
+ _ = v_0.Args[1]
+ x := v_0.Args[0]
+ v_1 := v.Args[1]
+ if v_1.Op != OpConst64 {
+ break
+ }
+ t := v_1.Type
+ if v_1.AuxInt != 7 {
+ break
+ }
+ v.reset(OpRsh8Ux64)
+ v.AddArg(x)
+ v0 := b.NewValue0(v.Pos, OpConst64, t)
+ v0.AuxInt = 7
+ v.AddArg(v0)
+ return true
+ }
// match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3]))
// cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)
// result: (Rsh8Ux64 x (Const64 <typ.UInt64> [c1-c2+c3]))