(MOVDnop (MOVDconst [c])) => (MOVDconst [c])
// Avoid unnecessary zero and sign extension when right shifting.
-(SRAI <t> [x] (MOVWreg y)) && x >= 0 && x <= 31 => (SRAIW <t> [int64(x)] y)
-(SRLI <t> [x] (MOVWUreg y)) && x >= 0 && x <= 31 => (SRLIW <t> [int64(x)] y)
+(SRAI [x] (MOVWreg y)) && x >= 0 && x <= 31 => (SRAIW [x] y)
+(SRLI [x] (MOVWUreg y)) && x >= 0 && x <= 31 => (SRLIW [x] y)
// Replace right shifts that exceed size of signed type.
(SRAI <t> [x] (MOVBreg y)) && x >= 8 => (SRAI [63] (SLLI <t> [56] y))
(SRAI <t> [x] (MOVHreg y)) && x >= 16 => (SRAI [63] (SLLI <t> [48] y))
-(SRAI <t> [x] (MOVWreg y)) && x >= 32 => (SRAIW [31] y)
+(SRAI [x] (MOVWreg y)) && x >= 32 => (SRAIW [31] y)
// Eliminate right shifts that exceed size of unsigned type.
-(SRLI <t> [x] (MOVBUreg y)) && x >= 8 => (MOVDconst <t> [0])
-(SRLI <t> [x] (MOVHUreg y)) && x >= 16 => (MOVDconst <t> [0])
-(SRLI <t> [x] (MOVWUreg y)) && x >= 32 => (MOVDconst <t> [0])
+(SRLI [x] (MOVBUreg y)) && x >= 8 => (MOVDconst [0])
+(SRLI [x] (MOVHUreg y)) && x >= 16 => (MOVDconst [0])
+(SRLI [x] (MOVWUreg y)) && x >= 32 => (MOVDconst [0])
// Fold constant into immediate instructions where possible.
(ADD (MOVDconst <t> [val]) x) && is32Bit(val) && !t.IsPtr() => (ADDI [val] x)
(AND (MOVDconst [val]) x) && is32Bit(val) => (ANDI [val] x)
(OR (MOVDconst [val]) x) && is32Bit(val) => (ORI [val] x)
(XOR (MOVDconst [val]) x) && is32Bit(val) => (XORI [val] x)
-(ROL x (MOVDconst [val])) => (RORI [int64(int8(-val)&63)] x)
-(ROLW x (MOVDconst [val])) => (RORIW [int64(int8(-val)&31)] x)
-(ROR x (MOVDconst [val])) => (RORI [int64(val&63)] x)
-(RORW x (MOVDconst [val])) => (RORIW [int64(val&31)] x)
-(SLL x (MOVDconst [val])) => (SLLI [int64(val&63)] x)
-(SRL x (MOVDconst [val])) => (SRLI [int64(val&63)] x)
-(SLLW x (MOVDconst [val])) => (SLLIW [int64(val&31)] x)
-(SRLW x (MOVDconst [val])) => (SRLIW [int64(val&31)] x)
-(SRA x (MOVDconst [val])) => (SRAI [int64(val&63)] x)
-(SRAW x (MOVDconst [val])) => (SRAIW [int64(val&31)] x)
-(SLT x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTI [val] x)
-(SLTU x (MOVDconst [val])) && val >= -2048 && val <= 2047 => (SLTIU [val] x)
+(ROL x (MOVDconst [val])) => (RORI [-val&63] x)
+(ROLW x (MOVDconst [val])) => (RORIW [-val&31] x)
+(ROR x (MOVDconst [val])) => (RORI [val&63] x)
+(RORW x (MOVDconst [val])) => (RORIW [val&31] x)
+(SLL x (MOVDconst [val])) => (SLLI [val&63] x)
+(SLLW x (MOVDconst [val])) => (SLLIW [val&31] x)
+(SRL x (MOVDconst [val])) => (SRLI [val&63] x)
+(SRLW x (MOVDconst [val])) => (SRLIW [val&31] x)
+(SRA x (MOVDconst [val])) => (SRAI [val&63] x)
+(SRAW x (MOVDconst [val])) => (SRAIW [val&31] x)
+(SLT x (MOVDconst [val])) && is12Bit(val) => (SLTI [val] x)
+(SLTU x (MOVDconst [val])) && is12Bit(val) => (SLTIU [val] x)
// Replace negated left rotation with right rotation.
(ROL x (NEG y)) => (ROR x y)
(SRAI [x] (MOVDconst [y])) => (MOVDconst [int64(y) >> uint32(x)])
// Combine doubling via addition with shift.
-(SLLI <t> [c] (ADD x x)) && c < t.Size() * 8 - 1 => (SLLI <t> [c+1] x)
+(SLLI <t> [c] (ADD x x)) && c < t.Size() * 8 - 1 => (SLLI [c+1] x)
(SLLI <t> [c] (ADD x x)) && c >= t.Size() * 8 - 1 => (MOVDconst [0])
// SLTI/SLTIU with constants.
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ROL x (MOVDconst [val]))
- // result: (RORI [int64(int8(-val)&63)] x)
+ // result: (RORI [-val&63] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64RORI)
- v.AuxInt = int64ToAuxInt(int64(int8(-val) & 63))
+ v.AuxInt = int64ToAuxInt(-val & 63)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ROLW x (MOVDconst [val]))
- // result: (RORIW [int64(int8(-val)&31)] x)
+ // result: (RORIW [-val&31] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64RORIW)
- v.AuxInt = int64ToAuxInt(int64(int8(-val) & 31))
+ v.AuxInt = int64ToAuxInt(-val & 31)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (ROR x (MOVDconst [val]))
- // result: (RORI [int64(val&63)] x)
+ // result: (RORI [val&63] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64RORI)
- v.AuxInt = int64ToAuxInt(int64(val & 63))
+ v.AuxInt = int64ToAuxInt(val & 63)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (RORW x (MOVDconst [val]))
- // result: (RORIW [int64(val&31)] x)
+ // result: (RORIW [val&31] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64RORIW)
- v.AuxInt = int64ToAuxInt(int64(val & 31))
+ v.AuxInt = int64ToAuxInt(val & 31)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SLL x (MOVDconst [val]))
- // result: (SLLI [int64(val&63)] x)
+ // result: (SLLI [val&63] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SLLI)
- v.AuxInt = int64ToAuxInt(int64(val & 63))
+ v.AuxInt = int64ToAuxInt(val & 63)
v.AddArg(x)
return true
}
}
// match: (SLLI <t> [c] (ADD x x))
// cond: c < t.Size() * 8 - 1
- // result: (SLLI <t> [c+1] x)
+ // result: (SLLI [c+1] x)
for {
t := v.Type
c := auxIntToInt64(v.AuxInt)
break
}
v.reset(OpRISCV64SLLI)
- v.Type = t
v.AuxInt = int64ToAuxInt(c + 1)
v.AddArg(x)
return true
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SLLW x (MOVDconst [val]))
- // result: (SLLIW [int64(val&31)] x)
+ // result: (SLLIW [val&31] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SLLIW)
- v.AuxInt = int64ToAuxInt(int64(val & 31))
+ v.AuxInt = int64ToAuxInt(val & 31)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SLT x (MOVDconst [val]))
- // cond: val >= -2048 && val <= 2047
+ // cond: is12Bit(val)
// result: (SLTI [val] x)
for {
x := v_0
break
}
val := auxIntToInt64(v_1.AuxInt)
- if !(val >= -2048 && val <= 2047) {
+ if !(is12Bit(val)) {
break
}
v.reset(OpRISCV64SLTI)
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SLTU x (MOVDconst [val]))
- // cond: val >= -2048 && val <= 2047
+ // cond: is12Bit(val)
// result: (SLTIU [val] x)
for {
x := v_0
break
}
val := auxIntToInt64(v_1.AuxInt)
- if !(val >= -2048 && val <= 2047) {
+ if !(is12Bit(val)) {
break
}
v.reset(OpRISCV64SLTIU)
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SRA x (MOVDconst [val]))
- // result: (SRAI [int64(val&63)] x)
+ // result: (SRAI [val&63] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SRAI)
- v.AuxInt = int64ToAuxInt(int64(val & 63))
+ v.AuxInt = int64ToAuxInt(val & 63)
v.AddArg(x)
return true
}
func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
- // match: (SRAI <t> [x] (MOVWreg y))
+ // match: (SRAI [x] (MOVWreg y))
// cond: x >= 0 && x <= 31
- // result: (SRAIW <t> [int64(x)] y)
+ // result: (SRAIW [x] y)
for {
- t := v.Type
x := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWreg {
break
break
}
v.reset(OpRISCV64SRAIW)
- v.Type = t
- v.AuxInt = int64ToAuxInt(int64(x))
+ v.AuxInt = int64ToAuxInt(x)
v.AddArg(y)
return true
}
v.AddArg(v0)
return true
}
- // match: (SRAI <t> [x] (MOVWreg y))
+ // match: (SRAI [x] (MOVWreg y))
// cond: x >= 32
// result: (SRAIW [31] y)
for {
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SRAW x (MOVDconst [val]))
- // result: (SRAIW [int64(val&31)] x)
+ // result: (SRAIW [val&31] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SRAIW)
- v.AuxInt = int64ToAuxInt(int64(val & 31))
+ v.AuxInt = int64ToAuxInt(val & 31)
v.AddArg(x)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SRL x (MOVDconst [val]))
- // result: (SRLI [int64(val&63)] x)
+ // result: (SRLI [val&63] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SRLI)
- v.AuxInt = int64ToAuxInt(int64(val & 63))
+ v.AuxInt = int64ToAuxInt(val & 63)
v.AddArg(x)
return true
}
}
func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
v_0 := v.Args[0]
- // match: (SRLI <t> [x] (MOVWUreg y))
+ // match: (SRLI [x] (MOVWUreg y))
// cond: x >= 0 && x <= 31
- // result: (SRLIW <t> [int64(x)] y)
+ // result: (SRLIW [x] y)
for {
- t := v.Type
x := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWUreg {
break
break
}
v.reset(OpRISCV64SRLIW)
- v.Type = t
- v.AuxInt = int64ToAuxInt(int64(x))
+ v.AuxInt = int64ToAuxInt(x)
v.AddArg(y)
return true
}
- // match: (SRLI <t> [x] (MOVBUreg y))
+ // match: (SRLI [x] (MOVBUreg y))
// cond: x >= 8
- // result: (MOVDconst <t> [0])
+ // result: (MOVDconst [0])
for {
- t := v.Type
x := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVBUreg {
break
break
}
v.reset(OpRISCV64MOVDconst)
- v.Type = t
v.AuxInt = int64ToAuxInt(0)
return true
}
- // match: (SRLI <t> [x] (MOVHUreg y))
+ // match: (SRLI [x] (MOVHUreg y))
// cond: x >= 16
- // result: (MOVDconst <t> [0])
+ // result: (MOVDconst [0])
for {
- t := v.Type
x := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVHUreg {
break
break
}
v.reset(OpRISCV64MOVDconst)
- v.Type = t
v.AuxInt = int64ToAuxInt(0)
return true
}
- // match: (SRLI <t> [x] (MOVWUreg y))
+ // match: (SRLI [x] (MOVWUreg y))
// cond: x >= 32
- // result: (MOVDconst <t> [0])
+ // result: (MOVDconst [0])
for {
- t := v.Type
x := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWUreg {
break
break
}
v.reset(OpRISCV64MOVDconst)
- v.Type = t
v.AuxInt = int64ToAuxInt(0)
return true
}
v_1 := v.Args[1]
v_0 := v.Args[0]
// match: (SRLW x (MOVDconst [val]))
- // result: (SRLIW [int64(val&31)] x)
+ // result: (SRLIW [val&31] x)
for {
x := v_0
if v_1.Op != OpRISCV64MOVDconst {
}
val := auxIntToInt64(v_1.AuxInt)
v.reset(OpRISCV64SRLIW)
- v.AuxInt = int64ToAuxInt(int64(val & 31))
+ v.AuxInt = int64ToAuxInt(val & 31)
v.AddArg(x)
return true
}