From: Martin Möhrmann Date: Sat, 2 Jun 2018 18:55:20 +0000 (+0200) Subject: cmd/compile: remove superfluous signed right shift used for signed division by 2 X-Git-Tag: go1.12beta1~1291 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=379d2dea72e475288da97ad6c665105fb731a34d;p=gostls13.git cmd/compile: remove superfluous signed right shift used for signed division by 2 A signed right shift before an unsigned right shift by register width-1 (extracts the sign bit) is superflous. trigger counts during ./make.bash 0 (Rsh8U (Rsh8 x _) 7 ) -> (Rsh8U x 7 ) 0 (Rsh16U (Rsh16 x _) 15 ) -> (Rsh16U x 15) 2 (Rsh32U (Rsh32 x _) 31 ) -> (Rsh32U x 31) 251 (Rsh64U (Rsh64 x _) 63 ) -> (Rsh64U x 63) Changes the instructions generated on AMD64 for x / 2 where x is a signed integer from: MOVQ AX, CX SARQ $63, AX SHRQ $63, AX ADDQ CX, AX SARQ $1, AX to: MOVQ AX, CX SHRQ $63, AX ADDQ CX, AX SARQ $1, AX Change-Id: I86321ae8fc9dc24b8fa9eb80aa5c7299eff8c9dc Reviewed-on: https://go-review.googlesource.com/115956 Run-TryBot: Martin Möhrmann TryBot-Result: Gobot Gobot Reviewed-by: Keith Randall --- diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index b1a0775e4a..96051414dc 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -371,6 +371,12 @@ (Rsh16Ux64 (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 [c+d])) (Rsh8Ux64 (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64 x (Const64 [c+d])) +// Remove signed right shift before an unsigned right shift that extracts the sign bit. +(Rsh8Ux64 (Rsh8x64 x _) (Const64 [7] )) -> (Rsh8Ux64 x (Const64 [7] )) +(Rsh16Ux64 (Rsh16x64 x _) (Const64 [15])) -> (Rsh16Ux64 x (Const64 [15])) +(Rsh32Ux64 (Rsh32x64 x _) (Const64 [31])) -> (Rsh32Ux64 x (Const64 [31])) +(Rsh64Ux64 (Rsh64x64 x _) (Const64 [63])) -> (Rsh64Ux64 x (Const64 [63])) + // ((x >> c1) << c2) >> c3 (Rsh(64|32|16|8)Ux64 (Lsh(64|32|16|8)x64 (Rsh(64|32|16|8)Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index 5ad53dd0b6..343b3581c1 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -24905,6 +24905,32 @@ func rewriteValuegeneric_OpRsh16Ux64_0(v *Value) bool { v.AddArg(v0) return true } + // match: (Rsh16Ux64 (Rsh16x64 x _) (Const64 [15])) + // cond: + // result: (Rsh16Ux64 x (Const64 [15])) + for { + _ = v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpRsh16x64 { + break + } + _ = v_0.Args[1] + x := v_0.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpConst64 { + break + } + t := v_1.Type + if v_1.AuxInt != 15 { + break + } + v.reset(OpRsh16Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Pos, OpConst64, t) + v0.AuxInt = 15 + v.AddArg(v0) + return true + } // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Rsh16Ux64 x (Const64 [c1-c2+c3])) @@ -25449,6 +25475,32 @@ func rewriteValuegeneric_OpRsh32Ux64_0(v *Value) bool { v.AddArg(v0) return true } + // match: (Rsh32Ux64 (Rsh32x64 x _) (Const64 [31])) + // cond: + // result: (Rsh32Ux64 x (Const64 [31])) + for { + _ = v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpRsh32x64 { + break + } + _ = v_0.Args[1] + x := v_0.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpConst64 { + break + } + t := v_1.Type + if v_1.AuxInt != 31 { + break + } + v.reset(OpRsh32Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Pos, OpConst64, t) + v0.AuxInt = 31 + v.AddArg(v0) + return true + } // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Rsh32Ux64 x (Const64 [c1-c2+c3])) @@ -26055,6 +26107,32 @@ func rewriteValuegeneric_OpRsh64Ux64_0(v *Value) bool { v.AddArg(v0) return true } + // match: (Rsh64Ux64 (Rsh64x64 x _) (Const64 [63])) + // cond: + // result: (Rsh64Ux64 x (Const64 [63])) + for { + _ = v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpRsh64x64 { + break + } + _ = v_0.Args[1] + x := v_0.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpConst64 { + break + } + t := v_1.Type + if v_1.AuxInt != 63 { + break + } + v.reset(OpRsh64Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Pos, OpConst64, t) + v0.AuxInt = 63 + v.AddArg(v0) + return true + } // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Rsh64Ux64 x (Const64 [c1-c2+c3])) @@ -26723,6 +26801,32 @@ func rewriteValuegeneric_OpRsh8Ux64_0(v *Value) bool { v.AddArg(v0) return true } + // match: (Rsh8Ux64 (Rsh8x64 x _) (Const64 [7])) + // cond: + // result: (Rsh8Ux64 x (Const64 [7] )) + for { + _ = v.Args[1] + v_0 := v.Args[0] + if v_0.Op != OpRsh8x64 { + break + } + _ = v_0.Args[1] + x := v_0.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpConst64 { + break + } + t := v_1.Type + if v_1.AuxInt != 7 { + break + } + v.reset(OpRsh8Ux64) + v.AddArg(x) + v0 := b.NewValue0(v.Pos, OpConst64, t) + v0.AuxInt = 7 + v.AddArg(v0) + return true + } // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) // result: (Rsh8Ux64 x (Const64 [c1-c2+c3]))