(MOVDconst <t> [c]) && !is32Bit(c) && int32(c) < 0 -> (ADD (SLLI <t> [32] (MOVDconst [c>>32+1])) (MOVDconst [int64(int32(c))]))
(MOVDconst <t> [c]) && !is32Bit(c) && int32(c) >= 0 -> (ADD (SLLI <t> [32] (MOVDconst [c>>32+0])) (MOVDconst [int64(int32(c))]))
-// Fold ADD+MOVDconst into ADDI where possible.
-(ADD (MOVDconst [off]) ptr) && is32Bit(off) -> (ADDI [off] ptr)
-
(Addr ...) -> (MOVaddr ...)
(LocalAddr {sym} base _) -> (MOVaddr {sym} base)
(ClosureCall ...) -> (CALLclosure ...)
(InterCall ...) -> (CALLinter ...)
+// Optimizations
+
+// Fold ADD+MOVDconst into ADDI where possible.
+(ADD (MOVDconst [off]) ptr) && is32Bit(off) -> (ADDI [off] ptr)
+
+// Convert subtraction of a const into ADDI with negative immediate, where possible.
+(SUB x (MOVBconst [val])) && is32Bit(-val) -> (ADDI [-val] x)
+(SUB x (MOVHconst [val])) && is32Bit(-val) -> (ADDI [-val] x)
+(SUB x (MOVWconst [val])) && is32Bit(-val) -> (ADDI [-val] x)
+(SUB x (MOVDconst [val])) && is32Bit(-val) -> (ADDI [-val] x)
+
// remove redundant *const ops
(ADDI [0] x) -> x
return rewriteValueRISCV64_OpRISCV64MOVWload(v)
case OpRISCV64MOVWstore:
return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
+ case OpRISCV64SUB:
+ return rewriteValueRISCV64_OpRISCV64SUB(v)
case OpRotateLeft16:
return rewriteValueRISCV64_OpRotateLeft16(v)
case OpRotateLeft32:
}
return false
}
+func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
+ v_1 := v.Args[1]
+ v_0 := v.Args[0]
+ // match: (SUB x (MOVBconst [val]))
+ // cond: is32Bit(-val)
+ // result: (ADDI [-val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVBconst {
+ break
+ }
+ val := v_1.AuxInt
+ if !(is32Bit(-val)) {
+ break
+ }
+ v.reset(OpRISCV64ADDI)
+ v.AuxInt = -val
+ v.AddArg(x)
+ return true
+ }
+ // match: (SUB x (MOVHconst [val]))
+ // cond: is32Bit(-val)
+ // result: (ADDI [-val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVHconst {
+ break
+ }
+ val := v_1.AuxInt
+ if !(is32Bit(-val)) {
+ break
+ }
+ v.reset(OpRISCV64ADDI)
+ v.AuxInt = -val
+ v.AddArg(x)
+ return true
+ }
+ // match: (SUB x (MOVWconst [val]))
+ // cond: is32Bit(-val)
+ // result: (ADDI [-val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVWconst {
+ break
+ }
+ val := v_1.AuxInt
+ if !(is32Bit(-val)) {
+ break
+ }
+ v.reset(OpRISCV64ADDI)
+ v.AuxInt = -val
+ v.AddArg(x)
+ return true
+ }
+ // match: (SUB x (MOVDconst [val]))
+ // cond: is32Bit(-val)
+ // result: (ADDI [-val] x)
+ for {
+ x := v_0
+ if v_1.Op != OpRISCV64MOVDconst {
+ break
+ }
+ val := v_1.AuxInt
+ if !(is32Bit(-val)) {
+ break
+ }
+ v.reset(OpRISCV64ADDI)
+ v.AuxInt = -val
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]