From bc1fb32e9d4e5dd239907839f5ffcbe524ac7e25 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Alexandru=20Mo=C8=99oi?= Date: Wed, 17 Feb 2016 14:08:36 +0100 Subject: [PATCH] [dev.ssa] cmd/compile/internal/ssa: fix the type of constant shift folding. Also throw in a few more shift constant folding. Change-Id: Iabe00596987f594e0686fbac3d76376d94612340 Reviewed-on: https://go-review.googlesource.com/19543 Run-TryBot: David Chase TryBot-Result: Gobot Gobot Reviewed-by: David Chase --- .../compile/internal/ssa/gen/generic.rules | 25 ++- .../compile/internal/ssa/rewritegeneric.go | 192 ++++++++++++++++-- 2 files changed, 199 insertions(+), 18 deletions(-) diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index 3971794d1a..09ab918787 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -53,6 +53,19 @@ (Rsh8x64 (Const8 [c]) (Const64 [d])) -> (Const8 [int64(int8(c) >> uint64(d))]) (Rsh8Ux64 (Const8 [c]) (Const64 [d])) -> (Const8 [int64(uint8(c) >> uint64(d))]) +(Lsh64x64 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64x64 (Const64 [0]) _) -> (Const64 [0]) +(Rsh64Ux64 (Const64 [0]) _) -> (Const64 [0]) +(Lsh32x64 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32x64 (Const32 [0]) _) -> (Const32 [0]) +(Rsh32Ux64 (Const32 [0]) _) -> (Const32 [0]) +(Lsh16x64 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16x64 (Const16 [0]) _) -> (Const16 [0]) +(Rsh16Ux64 (Const16 [0]) _) -> (Const16 [0]) +(Lsh8x64 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8x64 (Const8 [0]) _) -> (Const8 [0]) +(Rsh8Ux64 (Const8 [0]) _) -> (Const8 [0]) + (IsInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(inBounds32(c,d))]) (IsInBounds (Const64 [c]) (Const64 [d])) -> (ConstBool [b2i(inBounds64(c,d))]) (IsSliceInBounds (Const32 [c]) (Const32 [d])) -> (ConstBool [b2i(sliceInBounds32(c,d))]) @@ -191,12 +204,12 @@ // large left shifts of all values, and right shifts of unsigned values (Lsh64x64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) (Rsh64Ux64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) -(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const64 [0]) -(Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const64 [0]) -(Lsh16x64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const64 [0]) -(Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const64 [0]) -(Lsh8x64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const64 [0]) -(Rsh8Ux64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const64 [0]) +(Lsh32x64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0]) +(Rsh32Ux64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0]) +(Lsh16x64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0]) +(Rsh16Ux64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0]) +(Lsh8x64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const8 [0]) +(Rsh8Ux64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const8 [0]) // combine const shifts diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index 72b3553c30..c6fcb22565 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -3135,6 +3135,20 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { v.AuxInt = int64(int16(c) << uint64(d)) return true } + // match: (Lsh16x64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } // match: (Lsh16x64 x (Const64 [0])) // cond: // result: x @@ -3153,7 +3167,7 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { } // match: (Lsh16x64 _ (Const64 [c])) // cond: uint64(c) >= 16 - // result: (Const64 [0]) + // result: (Const16 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -3162,7 +3176,7 @@ func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { if !(uint64(c) >= 16) { break } - v.reset(OpConst64) + v.reset(OpConst16) v.AuxInt = 0 return true } @@ -3280,6 +3294,20 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { v.AuxInt = int64(int32(c) << uint64(d)) return true } + // match: (Lsh32x64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } // match: (Lsh32x64 x (Const64 [0])) // cond: // result: x @@ -3298,7 +3326,7 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { } // match: (Lsh32x64 _ (Const64 [c])) // cond: uint64(c) >= 32 - // result: (Const64 [0]) + // result: (Const32 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -3307,7 +3335,7 @@ func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { if !(uint64(c) >= 32) { break } - v.reset(OpConst64) + v.reset(OpConst32) v.AuxInt = 0 return true } @@ -3453,6 +3481,20 @@ func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { v.AuxInt = c << uint64(d) return true } + // match: (Lsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } // match: (Lsh64x64 x (Const64 [0])) // cond: // result: x @@ -3626,6 +3668,20 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { v.AuxInt = int64(int8(c) << uint64(d)) return true } + // match: (Lsh8x64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } // match: (Lsh8x64 x (Const64 [0])) // cond: // result: x @@ -3644,7 +3700,7 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { } // match: (Lsh8x64 _ (Const64 [c])) // cond: uint64(c) >= 8 - // result: (Const64 [0]) + // result: (Const8 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -3653,7 +3709,7 @@ func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { if !(uint64(c) >= 8) { break } - v.reset(OpConst64) + v.reset(OpConst8) v.AuxInt = 0 return true } @@ -4979,6 +5035,20 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { v.AuxInt = int64(uint16(c) >> uint64(d)) return true } + // match: (Rsh16Ux64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } // match: (Rsh16Ux64 x (Const64 [0])) // cond: // result: x @@ -4997,7 +5067,7 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { } // match: (Rsh16Ux64 _ (Const64 [c])) // cond: uint64(c) >= 16 - // result: (Const64 [0]) + // result: (Const16 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -5006,7 +5076,7 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { if !(uint64(c) >= 16) { break } - v.reset(OpConst64) + v.reset(OpConst16) v.AuxInt = 0 return true } @@ -5124,6 +5194,20 @@ func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { v.AuxInt = int64(int16(c) >> uint64(d)) return true } + // match: (Rsh16x64 (Const16 [0]) _) + // cond: + // result: (Const16 [0]) + for { + if v.Args[0].Op != OpConst16 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst16) + v.AuxInt = 0 + return true + } // match: (Rsh16x64 x (Const64 [0])) // cond: // result: x @@ -5254,6 +5338,20 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { v.AuxInt = int64(uint32(c) >> uint64(d)) return true } + // match: (Rsh32Ux64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } // match: (Rsh32Ux64 x (Const64 [0])) // cond: // result: x @@ -5272,7 +5370,7 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { } // match: (Rsh32Ux64 _ (Const64 [c])) // cond: uint64(c) >= 32 - // result: (Const64 [0]) + // result: (Const32 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -5281,7 +5379,7 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { if !(uint64(c) >= 32) { break } - v.reset(OpConst64) + v.reset(OpConst32) v.AuxInt = 0 return true } @@ -5399,6 +5497,20 @@ func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { v.AuxInt = int64(int32(c) >> uint64(d)) return true } + // match: (Rsh32x64 (Const32 [0]) _) + // cond: + // result: (Const32 [0]) + for { + if v.Args[0].Op != OpConst32 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst32) + v.AuxInt = 0 + return true + } // match: (Rsh32x64 x (Const64 [0])) // cond: // result: x @@ -5557,6 +5669,20 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { v.AuxInt = int64(uint64(c) >> uint64(d)) return true } + // match: (Rsh64Ux64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } // match: (Rsh64Ux64 x (Const64 [0])) // cond: // result: x @@ -5758,6 +5884,20 @@ func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { v.AuxInt = c >> uint64(d) return true } + // match: (Rsh64x64 (Const64 [0]) _) + // cond: + // result: (Const64 [0]) + for { + if v.Args[0].Op != OpConst64 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst64) + v.AuxInt = 0 + return true + } // match: (Rsh64x64 x (Const64 [0])) // cond: // result: x @@ -5916,6 +6056,20 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { v.AuxInt = int64(uint8(c) >> uint64(d)) return true } + // match: (Rsh8Ux64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } // match: (Rsh8Ux64 x (Const64 [0])) // cond: // result: x @@ -5934,7 +6088,7 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { } // match: (Rsh8Ux64 _ (Const64 [c])) // cond: uint64(c) >= 8 - // result: (Const64 [0]) + // result: (Const8 [0]) for { if v.Args[1].Op != OpConst64 { break @@ -5943,7 +6097,7 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { if !(uint64(c) >= 8) { break } - v.reset(OpConst64) + v.reset(OpConst8) v.AuxInt = 0 return true } @@ -6061,6 +6215,20 @@ func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { v.AuxInt = int64(int8(c) >> uint64(d)) return true } + // match: (Rsh8x64 (Const8 [0]) _) + // cond: + // result: (Const8 [0]) + for { + if v.Args[0].Op != OpConst8 { + break + } + if v.Args[0].AuxInt != 0 { + break + } + v.reset(OpConst8) + v.AuxInt = 0 + return true + } // match: (Rsh8x64 x (Const64 [0])) // cond: // result: x -- 2.48.1