// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Optimizations TODO:
-// * Use SLTI and SLTIU for comparisons to constants, instead of SLT/SLTU with constants in registers
-// * Use the zero register instead of moving 0 into a register.
-// * Add rules to avoid generating a temp bool value for (If (SLT[U] ...) ...).
-// * Arrange for non-trivial Zero and Move lowerings to use aligned loads and stores.
-// * Avoid using Neq32 for writeBarrier.enabled checks.
-
// Lowering arithmetic
(Add64 ...) => (ADD ...)
(AddPtr ...) => (ADD ...)
(BNEZ (SLT x y) yes no) => (BLT x y yes no)
(BEQZ (SLTU x y) yes no) => (BGEU x y yes no)
(BNEZ (SLTU x y) yes no) => (BLTU x y yes no)
+(BEQZ (SLTI [x] y) yes no) => (BGE y (MOVDconst [x]) yes no)
+(BNEZ (SLTI [x] y) yes no) => (BLT y (MOVDconst [x]) yes no)
+(BEQZ (SLTIU [x] y) yes no) => (BGEU y (MOVDconst [x]) yes no)
+(BNEZ (SLTIU [x] y) yes no) => (BLTU y (MOVDconst [x]) yes no)
// Convert branch with zero to more optimal branch zero.
(BEQ (MOVDconst [0]) cond yes no) => (BEQZ cond yes no)
(AND (MOVDconst [val]) x) && is32Bit(val) => (ANDI [val] x)
(OR (MOVDconst [val]) x) && is32Bit(val) => (ORI [val] x)
(XOR (MOVDconst [val]) x) && is32Bit(val) => (XORI [val] x)
-(SLL x (MOVDconst [val])) => (SLLI [int64(val&63)] x)
-(SRL x (MOVDconst [val])) => (SRLI [int64(val&63)] x)
-(SRA x (MOVDconst [val])) => (SRAI [int64(val&63)] x)
+(SLL x (MOVDconst [val])) => (SLLI [int64(val&63)] x)
+(SRL x (MOVDconst [val])) => (SRLI [int64(val&63)] x)
+(SRA x (MOVDconst [val])) => (SRAI [int64(val&63)] x)
+(SLT x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTI [val] x)
+(SLTU x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTIU [val] x)
// Convert const subtraction into ADDI with negative immediate, where possible.
(SUB x (MOVDconst [val])) && is32Bit(-val) => (ADDI [-val] x)
func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
+ // match: (SLT x (MOVDconst [val]))
+ // cond: val >= -2048 && val <= 2047
+ // result: (SLTI [val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVDconst {
+ break
+ }
+ val := auxIntToInt64(v_1.AuxInt)
+ if !(val >= -2048 && val <= 2047) {
+ break
+ }
+ v.reset(OpRISCV64SLTI)
+ v.AuxInt = int64ToAuxInt(val)
+ v.AddArg(x)
+ return true
+ }
// match: (SLT x x)
// result: (MOVDconst [0])
for {
func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
+ // match: (SLTU x (MOVDconst [val]))
+ // cond: val >= -2048 && val <= 2047
+ // result: (SLTIU [val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVDconst {
+ break
+ }
+ val := auxIntToInt64(v_1.AuxInt)
+ if !(val >= -2048 && val <= 2047) {
+ break
+ }
+ v.reset(OpRISCV64SLTIU)
+ v.AuxInt = int64ToAuxInt(val)
+ v.AddArg(x)
+ return true
+ }
// match: (SLTU x x)
// result: (MOVDconst [0])
for {
b.resetWithControl2(BlockRISCV64BGEU, x, y)
return true
}
+ // match: (BEQZ (SLTI [x] y) yes no)
+ // result: (BGE y (MOVDconst [x]) yes no)
+ for b.Controls[0].Op == OpRISCV64SLTI {
+ v_0 := b.Controls[0]
+ x := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(x)
+ b.resetWithControl2(BlockRISCV64BGE, y, v0)
+ return true
+ }
+ // match: (BEQZ (SLTIU [x] y) yes no)
+ // result: (BGEU y (MOVDconst [x]) yes no)
+ for b.Controls[0].Op == OpRISCV64SLTIU {
+ v_0 := b.Controls[0]
+ x := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(x)
+ b.resetWithControl2(BlockRISCV64BGEU, y, v0)
+ return true
+ }
case BlockRISCV64BGE:
// match: (BGE (MOVDconst [0]) cond yes no)
// result: (BLEZ cond yes no)
b.resetWithControl2(BlockRISCV64BLTU, x, y)
return true
}
+ // match: (BNEZ (SLTI [x] y) yes no)
+ // result: (BLT y (MOVDconst [x]) yes no)
+ for b.Controls[0].Op == OpRISCV64SLTI {
+ v_0 := b.Controls[0]
+ x := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(x)
+ b.resetWithControl2(BlockRISCV64BLT, y, v0)
+ return true
+ }
+ // match: (BNEZ (SLTIU [x] y) yes no)
+ // result: (BLTU y (MOVDconst [x]) yes no)
+ for b.Controls[0].Op == OpRISCV64SLTIU {
+ v_0 := b.Controls[0]
+ x := auxIntToInt64(v_0.AuxInt)
+ y := v_0.Args[0]
+ v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
+ v0.AuxInt = int64ToAuxInt(x)
+ b.resetWithControl2(BlockRISCV64BLTU, y, v0)
+ return true
+ }
case BlockIf:
// match: (If cond yes no)
// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)