From: Michael Munday Date: Fri, 7 Nov 2025 00:00:50 +0000 (+0000) Subject: cmd/compile: remove unnecessary casts and types from riscv64 rules X-Git-Tag: go1.26rc1~288 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=1e5e6663e958dcc9579fb38ffcd8a1999d75128d;p=gostls13.git cmd/compile: remove unnecessary casts and types from riscv64 rules This change shouldn't have any impact on codegen. It removes some unnecessary int8 and int64 casts from the riscv64 rewrite rules. It also removes explicit typing where the types already match: `(OldOp ) => (NewOp )` is the same as `(OldOp) => (NewOp)`. Change-Id: Ic02b65da8f548c8b9ad9ccb6627a03b7bf6f050f Reviewed-on: https://go-review.googlesource.com/c/go/+/719220 Reviewed-by: Junyang Shao Auto-Submit: Keith Randall LUCI-TryBot-Result: Go LUCI Reviewed-by: Keith Randall Reviewed-by: Keith Randall Commit-Queue: Junyang Shao --- diff --git a/src/cmd/compile/internal/ssa/_gen/RISCV64.rules b/src/cmd/compile/internal/ssa/_gen/RISCV64.rules index 646948f2df..6166bd5584 100644 --- a/src/cmd/compile/internal/ssa/_gen/RISCV64.rules +++ b/src/cmd/compile/internal/ssa/_gen/RISCV64.rules @@ -689,36 +689,36 @@ (MOVDnop (MOVDconst [c])) => (MOVDconst [c]) // Avoid unnecessary zero and sign extension when right shifting. -(SRAI [x] (MOVWreg y)) && x >= 0 && x <= 31 => (SRAIW [int64(x)] y) -(SRLI [x] (MOVWUreg y)) && x >= 0 && x <= 31 => (SRLIW [int64(x)] y) +(SRAI [x] (MOVWreg y)) && x >= 0 && x <= 31 => (SRAIW [x] y) +(SRLI [x] (MOVWUreg y)) && x >= 0 && x <= 31 => (SRLIW [x] y) // Replace right shifts that exceed size of signed type. (SRAI [x] (MOVBreg y)) && x >= 8 => (SRAI [63] (SLLI [56] y)) (SRAI [x] (MOVHreg y)) && x >= 16 => (SRAI [63] (SLLI [48] y)) -(SRAI [x] (MOVWreg y)) && x >= 32 => (SRAIW [31] y) +(SRAI [x] (MOVWreg y)) && x >= 32 => (SRAIW [31] y) // Eliminate right shifts that exceed size of unsigned type. -(SRLI [x] (MOVBUreg y)) && x >= 8 => (MOVDconst [0]) -(SRLI [x] (MOVHUreg y)) && x >= 16 => (MOVDconst [0]) -(SRLI [x] (MOVWUreg y)) && x >= 32 => (MOVDconst [0]) +(SRLI [x] (MOVBUreg y)) && x >= 8 => (MOVDconst [0]) +(SRLI [x] (MOVHUreg y)) && x >= 16 => (MOVDconst [0]) +(SRLI [x] (MOVWUreg y)) && x >= 32 => (MOVDconst [0]) // Fold constant into immediate instructions where possible. (ADD (MOVDconst [val]) x) && is32Bit(val) && !t.IsPtr() => (ADDI [val] x) (AND (MOVDconst [val]) x) && is32Bit(val) => (ANDI [val] x) (OR (MOVDconst [val]) x) && is32Bit(val) => (ORI [val] x) (XOR (MOVDconst [val]) x) && is32Bit(val) => (XORI [val] x) -(ROL x (MOVDconst [val])) => (RORI [int64(int8(-val)&63)] x) -(ROLW x (MOVDconst [val])) => (RORIW [int64(int8(-val)&31)] x) -(ROR x (MOVDconst [val])) => (RORI [int64(val&63)] x) -(RORW x (MOVDconst [val])) => (RORIW [int64(val&31)] x) -(SLL x (MOVDconst [val])) => (SLLI [int64(val&63)] x) -(SRL x (MOVDconst [val])) => (SRLI [int64(val&63)] x) -(SLLW x (MOVDconst [val])) => (SLLIW [int64(val&31)] x) -(SRLW x (MOVDconst [val])) => (SRLIW [int64(val&31)] x) -(SRA x (MOVDconst [val])) => (SRAI [int64(val&63)] x) -(SRAW x (MOVDconst [val])) => (SRAIW [int64(val&31)] x) -(SLT x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTI [val] x) -(SLTU x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTIU [val] x) +(ROL x (MOVDconst [val])) => (RORI [-val&63] x) +(ROLW x (MOVDconst [val])) => (RORIW [-val&31] x) +(ROR x (MOVDconst [val])) => (RORI [val&63] x) +(RORW x (MOVDconst [val])) => (RORIW [val&31] x) +(SLL x (MOVDconst [val])) => (SLLI [val&63] x) +(SLLW x (MOVDconst [val])) => (SLLIW [val&31] x) +(SRL x (MOVDconst [val])) => (SRLI [val&63] x) +(SRLW x (MOVDconst [val])) => (SRLIW [val&31] x) +(SRA x (MOVDconst [val])) => (SRAI [val&63] x) +(SRAW x (MOVDconst [val])) => (SRAIW [val&31] x) +(SLT x (MOVDconst [val])) && is12Bit(val) => (SLTI [val] x) +(SLTU x (MOVDconst [val])) && is12Bit(val) => (SLTIU [val] x) // Replace negated left rotation with right rotation. (ROL x (NEG y)) => (ROR x y) @@ -782,7 +782,7 @@ (SRAI [x] (MOVDconst [y])) => (MOVDconst [int64(y) >> uint32(x)]) // Combine doubling via addition with shift. -(SLLI [c] (ADD x x)) && c < t.Size() * 8 - 1 => (SLLI [c+1] x) +(SLLI [c] (ADD x x)) && c < t.Size() * 8 - 1 => (SLLI [c+1] x) (SLLI [c] (ADD x x)) && c >= t.Size() * 8 - 1 => (MOVDconst [0]) // SLTI/SLTIU with constants. diff --git a/src/cmd/compile/internal/ssa/rewriteRISCV64.go b/src/cmd/compile/internal/ssa/rewriteRISCV64.go index 191c7b3d48..24fef3fe72 100644 --- a/src/cmd/compile/internal/ssa/rewriteRISCV64.go +++ b/src/cmd/compile/internal/ssa/rewriteRISCV64.go @@ -7027,7 +7027,7 @@ func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (ROL x (MOVDconst [val])) - // result: (RORI [int64(int8(-val)&63)] x) + // result: (RORI [-val&63] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7035,7 +7035,7 @@ func rewriteValueRISCV64_OpRISCV64ROL(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64RORI) - v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63)) + v.AuxInt = int64ToAuxInt(-val & 63) v.AddArg(x) return true } @@ -7057,7 +7057,7 @@ func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (ROLW x (MOVDconst [val])) - // result: (RORIW [int64(int8(-val)&31)] x) + // result: (RORIW [-val&31] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7065,7 +7065,7 @@ func rewriteValueRISCV64_OpRISCV64ROLW(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64RORIW) - v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31)) + v.AuxInt = int64ToAuxInt(-val & 31) v.AddArg(x) return true } @@ -7087,7 +7087,7 @@ func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (ROR x (MOVDconst [val])) - // result: (RORI [int64(val&63)] x) + // result: (RORI [val&63] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7095,7 +7095,7 @@ func rewriteValueRISCV64_OpRISCV64ROR(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64RORI) - v.AuxInt = int64ToAuxInt(int64(val & 63)) + v.AuxInt = int64ToAuxInt(val & 63) v.AddArg(x) return true } @@ -7105,7 +7105,7 @@ func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (RORW x (MOVDconst [val])) - // result: (RORIW [int64(val&31)] x) + // result: (RORIW [val&31] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7113,7 +7113,7 @@ func rewriteValueRISCV64_OpRISCV64RORW(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64RORIW) - v.AuxInt = int64ToAuxInt(int64(val & 31)) + v.AuxInt = int64ToAuxInt(val & 31) v.AddArg(x) return true } @@ -7212,7 +7212,7 @@ func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SLL x (MOVDconst [val])) - // result: (SLLI [int64(val&63)] x) + // result: (SLLI [val&63] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7220,7 +7220,7 @@ func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SLLI) - v.AuxInt = int64ToAuxInt(int64(val & 63)) + v.AuxInt = int64ToAuxInt(val & 63) v.AddArg(x) return true } @@ -7246,7 +7246,7 @@ func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool { } // match: (SLLI [c] (ADD x x)) // cond: c < t.Size() * 8 - 1 - // result: (SLLI [c+1] x) + // result: (SLLI [c+1] x) for { t := v.Type c := auxIntToInt64(v.AuxInt) @@ -7258,7 +7258,6 @@ func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool { break } v.reset(OpRISCV64SLLI) - v.Type = t v.AuxInt = int64ToAuxInt(c + 1) v.AddArg(x) return true @@ -7286,7 +7285,7 @@ func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SLLW x (MOVDconst [val])) - // result: (SLLIW [int64(val&31)] x) + // result: (SLLIW [val&31] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7294,7 +7293,7 @@ func rewriteValueRISCV64_OpRISCV64SLLW(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SLLIW) - v.AuxInt = int64ToAuxInt(int64(val & 31)) + v.AuxInt = int64ToAuxInt(val & 31) v.AddArg(x) return true } @@ -7304,7 +7303,7 @@ func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SLT x (MOVDconst [val])) - // cond: val >= -2048 && val <= 2047 + // cond: is12Bit(val) // result: (SLTI [val] x) for { x := v_0 @@ -7312,7 +7311,7 @@ func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool { break } val := auxIntToInt64(v_1.AuxInt) - if !(val >= -2048 && val <= 2047) { + if !(is12Bit(val)) { break } v.reset(OpRISCV64SLTI) @@ -7433,7 +7432,7 @@ func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SLTU x (MOVDconst [val])) - // cond: val >= -2048 && val <= 2047 + // cond: is12Bit(val) // result: (SLTIU [val] x) for { x := v_0 @@ -7441,7 +7440,7 @@ func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool { break } val := auxIntToInt64(v_1.AuxInt) - if !(val >= -2048 && val <= 2047) { + if !(is12Bit(val)) { break } v.reset(OpRISCV64SLTIU) @@ -7555,7 +7554,7 @@ func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SRA x (MOVDconst [val])) - // result: (SRAI [int64(val&63)] x) + // result: (SRAI [val&63] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7563,7 +7562,7 @@ func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SRAI) - v.AuxInt = int64ToAuxInt(int64(val & 63)) + v.AuxInt = int64ToAuxInt(val & 63) v.AddArg(x) return true } @@ -7572,11 +7571,10 @@ func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool { func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool { v_0 := v.Args[0] b := v.Block - // match: (SRAI [x] (MOVWreg y)) + // match: (SRAI [x] (MOVWreg y)) // cond: x >= 0 && x <= 31 - // result: (SRAIW [int64(x)] y) + // result: (SRAIW [x] y) for { - t := v.Type x := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWreg { break @@ -7586,8 +7584,7 @@ func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool { break } v.reset(OpRISCV64SRAIW) - v.Type = t - v.AuxInt = int64ToAuxInt(int64(x)) + v.AuxInt = int64ToAuxInt(x) v.AddArg(y) return true } @@ -7633,7 +7630,7 @@ func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool { v.AddArg(v0) return true } - // match: (SRAI [x] (MOVWreg y)) + // match: (SRAI [x] (MOVWreg y)) // cond: x >= 32 // result: (SRAIW [31] y) for { @@ -7668,7 +7665,7 @@ func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SRAW x (MOVDconst [val])) - // result: (SRAIW [int64(val&31)] x) + // result: (SRAIW [val&31] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7676,7 +7673,7 @@ func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SRAIW) - v.AuxInt = int64ToAuxInt(int64(val & 31)) + v.AuxInt = int64ToAuxInt(val & 31) v.AddArg(x) return true } @@ -7686,7 +7683,7 @@ func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SRL x (MOVDconst [val])) - // result: (SRLI [int64(val&63)] x) + // result: (SRLI [val&63] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7694,7 +7691,7 @@ func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SRLI) - v.AuxInt = int64ToAuxInt(int64(val & 63)) + v.AuxInt = int64ToAuxInt(val & 63) v.AddArg(x) return true } @@ -7702,11 +7699,10 @@ func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool { } func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool { v_0 := v.Args[0] - // match: (SRLI [x] (MOVWUreg y)) + // match: (SRLI [x] (MOVWUreg y)) // cond: x >= 0 && x <= 31 - // result: (SRLIW [int64(x)] y) + // result: (SRLIW [x] y) for { - t := v.Type x := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWUreg { break @@ -7716,16 +7712,14 @@ func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool { break } v.reset(OpRISCV64SRLIW) - v.Type = t - v.AuxInt = int64ToAuxInt(int64(x)) + v.AuxInt = int64ToAuxInt(x) v.AddArg(y) return true } - // match: (SRLI [x] (MOVBUreg y)) + // match: (SRLI [x] (MOVBUreg y)) // cond: x >= 8 - // result: (MOVDconst [0]) + // result: (MOVDconst [0]) for { - t := v.Type x := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVBUreg { break @@ -7734,15 +7728,13 @@ func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool { break } v.reset(OpRISCV64MOVDconst) - v.Type = t v.AuxInt = int64ToAuxInt(0) return true } - // match: (SRLI [x] (MOVHUreg y)) + // match: (SRLI [x] (MOVHUreg y)) // cond: x >= 16 - // result: (MOVDconst [0]) + // result: (MOVDconst [0]) for { - t := v.Type x := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVHUreg { break @@ -7751,15 +7743,13 @@ func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool { break } v.reset(OpRISCV64MOVDconst) - v.Type = t v.AuxInt = int64ToAuxInt(0) return true } - // match: (SRLI [x] (MOVWUreg y)) + // match: (SRLI [x] (MOVWUreg y)) // cond: x >= 32 - // result: (MOVDconst [0]) + // result: (MOVDconst [0]) for { - t := v.Type x := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWUreg { break @@ -7768,7 +7758,6 @@ func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool { break } v.reset(OpRISCV64MOVDconst) - v.Type = t v.AuxInt = int64ToAuxInt(0) return true } @@ -7790,7 +7779,7 @@ func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool { v_1 := v.Args[1] v_0 := v.Args[0] // match: (SRLW x (MOVDconst [val])) - // result: (SRLIW [int64(val&31)] x) + // result: (SRLIW [val&31] x) for { x := v_0 if v_1.Op != OpRISCV64MOVDconst { @@ -7798,7 +7787,7 @@ func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool { } val := auxIntToInt64(v_1.AuxInt) v.reset(OpRISCV64SRLIW) - v.AuxInt = int64ToAuxInt(int64(val & 31)) + v.AuxInt = int64ToAuxInt(val & 31) v.AddArg(x) return true }