(SRLVconst [rc] (MOVHUreg x)) && rc >= 16 => (MOVVconst [0])
(SRLVconst [rc] (MOVBUreg x)) && rc >= 8 => (MOVVconst [0])
+// (x + x) << c -> x << c+1
+((SLLV|SLL)const [c] (ADDV x x)) => ((SLLV|SLL)const [c+1] x)
+
// mul by constant
(MULV _ (MOVVconst [0])) => (MOVVconst [0])
(MULV x (MOVVconst [1])) => x
return rewriteValueLOONG64_OpLOONG64SLLV(v)
case OpLOONG64SLLVconst:
return rewriteValueLOONG64_OpLOONG64SLLVconst(v)
+ case OpLOONG64SLLconst:
+ return rewriteValueLOONG64_OpLOONG64SLLconst(v)
case OpLOONG64SRA:
return rewriteValueLOONG64_OpLOONG64SRA(v)
case OpLOONG64SRAV:
}
func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool {
v_0 := v.Args[0]
+ // match: (SLLVconst [c] (ADDV x x))
+ // result: (SLLVconst [c+1] x)
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64ADDV {
+ break
+ }
+ x := v_0.Args[1]
+ if x != v_0.Args[0] {
+ break
+ }
+ v.reset(OpLOONG64SLLVconst)
+ v.AuxInt = int64ToAuxInt(c + 1)
+ v.AddArg(x)
+ return true
+ }
// match: (SLLVconst [c] (MOVVconst [d]))
// result: (MOVVconst [d<<uint64(c)])
for {
}
return false
}
+func rewriteValueLOONG64_OpLOONG64SLLconst(v *Value) bool {
+ v_0 := v.Args[0]
+ // match: (SLLconst [c] (ADDV x x))
+ // result: (SLLconst [c+1] x)
+ for {
+ c := auxIntToInt64(v.AuxInt)
+ if v_0.Op != OpLOONG64ADDV {
+ break
+ }
+ x := v_0.Args[1]
+ if x != v_0.Args[0] {
+ break
+ }
+ v.reset(OpLOONG64SLLconst)
+ v.AuxInt = int64ToAuxInt(c + 1)
+ v.AddArg(x)
+ return true
+ }
+ return false
+}
func rewriteValueLOONG64_OpLOONG64SRA(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
func lshConst32x1Add(x int32) int32 {
// amd64:"SHLL\t[$]2"
+ // loong64:"SLL\t[$]2"
return (x + x) << 1
}
func lshConst64x1Add(x int64) int64 {
// amd64:"SHLQ\t[$]2"
+ // loong64:"SLLV\t[$]2"
return (x + x) << 1
}
func lshConst32x2Add(x int32) int32 {
// amd64:"SHLL\t[$]3"
+ // loong64:"SLL\t[$]3"
return (x + x) << 2
}
func lshConst64x2Add(x int64) int64 {
// amd64:"SHLQ\t[$]3"
+ // loong64:"SLLV\t[$]3"
return (x + x) << 2
}