// * Avoid using Neq32 for writeBarrier.enabled checks.
// Lowering arithmetic
-(Add64 x y) -> (ADD x y)
-(AddPtr x y) -> (ADD x y)
-(Add32 x y) -> (ADD x y)
-(Add16 x y) -> (ADD x y)
-(Add8 x y) -> (ADD x y)
-(Add32F x y) -> (FADDS x y)
-(Add64F x y) -> (FADDD x y)
-
-(Sub64 x y) -> (SUB x y)
-(SubPtr x y) -> (SUB x y)
-(Sub32 x y) -> (SUB x y)
-(Sub16 x y) -> (SUB x y)
-(Sub8 x y) -> (SUB x y)
-(Sub32F x y) -> (FSUBS x y)
-(Sub64F x y) -> (FSUBD x y)
-
-(Mul64 x y) -> (MUL x y)
-(Mul32 x y) -> (MULW x y)
+(Add64 ...) -> (ADD ...)
+(AddPtr ...) -> (ADD ...)
+(Add32 ...) -> (ADD ...)
+(Add16 ...) -> (ADD ...)
+(Add8 ...) -> (ADD ...)
+(Add32F ...) -> (FADDS ...)
+(Add64F ...) -> (FADDD ...)
+
+(Sub64 ...) -> (SUB ...)
+(SubPtr ...) -> (SUB ...)
+(Sub32 ...) -> (SUB ...)
+(Sub16 ...) -> (SUB ...)
+(Sub8 ...) -> (SUB ...)
+(Sub32F ...) -> (FSUBS ...)
+(Sub64F ...) -> (FSUBD ...)
+
+(Mul64 ...) -> (MUL ...)
+(Mul32 ...) -> (MULW ...)
(Mul16 x y) -> (MULW (SignExt16to32 x) (SignExt16to32 y))
(Mul8 x y) -> (MULW (SignExt8to32 x) (SignExt8to32 y))
-(Mul32F x y) -> (FMULS x y)
-(Mul64F x y) -> (FMULD x y)
+(Mul32F ...) -> (FMULS ...)
+(Mul64F ...) -> (FMULD ...)
-(Div32F x y) -> (FDIVS x y)
-(Div64F x y) -> (FDIVD x y)
+(Div32F ...) -> (FDIVS ...)
+(Div64F ...) -> (FDIVD ...)
-(Div64 x y) -> (DIV x y)
-(Div64u x y) -> (DIVU x y)
-(Div32 x y) -> (DIVW x y)
-(Div32u x y) -> (DIVUW x y)
+(Div64 [a] x y) -> (DIV x y)
+(Div64u ...) -> (DIVU ...)
+(Div32 [a] x y) -> (DIVW x y)
+(Div32u ...) -> (DIVUW ...)
(Div16 x y) -> (DIVW (SignExt16to32 x) (SignExt16to32 y))
(Div16u x y) -> (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
(Div8 x y) -> (DIVW (SignExt8to32 x) (SignExt8to32 y))
(Div8u x y) -> (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
-(Hmul64 x y) -> (MULH x y)
-(Hmul64u x y) -> (MULHU x y)
+(Hmul64 ...) -> (MULH ...)
+(Hmul64u ...) -> (MULHU ...)
(Hmul32 x y) -> (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
(Hmul32u x y) -> (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
// (x + y) / 2 -> (x / 2) + (y / 2) + (x & y & 1)
(Avg64u <t> x y) -> (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
-(Mod64 x y) -> (REM x y)
-(Mod64u x y) -> (REMU x y)
-(Mod32 x y) -> (REMW x y)
-(Mod32u x y) -> (REMUW x y)
+(Mod64 [a] x y) -> (REM x y)
+(Mod64u ...) -> (REMU ...)
+(Mod32 [a] x y) -> (REMW x y)
+(Mod32u ...) -> (REMUW ...)
(Mod16 x y) -> (REMW (SignExt16to32 x) (SignExt16to32 y))
(Mod16u x y) -> (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
(Mod8 x y) -> (REMW (SignExt8to32 x) (SignExt8to32 y))
(Mod8u x y) -> (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
-(And64 x y) -> (AND x y)
-(And32 x y) -> (AND x y)
-(And16 x y) -> (AND x y)
-(And8 x y) -> (AND x y)
+(And64 ...) -> (AND ...)
+(And32 ...) -> (AND ...)
+(And16 ...) -> (AND ...)
+(And8 ...) -> (AND ...)
-(Or64 x y) -> (OR x y)
-(Or32 x y) -> (OR x y)
-(Or16 x y) -> (OR x y)
-(Or8 x y) -> (OR x y)
+(Or64 ...) -> (OR ...)
+(Or32 ...) -> (OR ...)
+(Or16 ...) -> (OR ...)
+(Or8 ...) -> (OR ...)
-(Xor64 x y) -> (XOR x y)
-(Xor32 x y) -> (XOR x y)
-(Xor16 x y) -> (XOR x y)
-(Xor8 x y) -> (XOR x y)
+(Xor64 ...) -> (XOR ...)
+(Xor32 ...) -> (XOR ...)
+(Xor16 ...) -> (XOR ...)
+(Xor8 ...) -> (XOR ...)
(Neg64 x) -> (SUB (MOVDconst) x)
(Neg32 x) -> (SUB (MOVWconst) x)
(Neg16 x) -> (SUB (MOVHconst) x)
(Neg8 x) -> (SUB (MOVBconst) x)
-(Neg32F x) -> (FNEGS x)
-(Neg64F x) -> (FNEGD x)
+(Neg32F ...) -> (FNEGS ...)
+(Neg64F ...) -> (FNEGD ...)
(Com64 x) -> (XORI [int64(-1)] x)
(Com32 x) -> (XORI [int64(-1)] x)
(Com16 x) -> (XORI [int64(-1)] x)
(Com8 x) -> (XORI [int64(-1)] x)
-(Sqrt x) -> (FSQRTD x)
+(Sqrt ...) -> (FSQRTD ...)
// Zero and sign extension
// Shift left until the bits we want are at the top of the register.
(ZeroExt16to64 <t> x) -> (SRLI [48] (SLLI <t> [48] x))
(ZeroExt32to64 <t> x) -> (SRLI [32] (SLLI <t> [32] x))
-(Cvt32to32F x) -> (FCVTSW x)
-(Cvt32to64F x) -> (FCVTDW x)
-(Cvt64to32F x) -> (FCVTSL x)
-(Cvt64to64F x) -> (FCVTDL x)
+(Cvt32to32F ...) -> (FCVTSW ...)
+(Cvt32to64F ...) -> (FCVTDW ...)
+(Cvt64to32F ...) -> (FCVTSL ...)
+(Cvt64to64F ...) -> (FCVTDL ...)
-(Cvt32Fto32 x) -> (FCVTWS x)
-(Cvt32Fto64 x) -> (FCVTLS x)
-(Cvt64Fto32 x) -> (FCVTWD x)
-(Cvt64Fto64 x) -> (FCVTLD x)
+(Cvt32Fto32 ...) -> (FCVTWS ...)
+(Cvt32Fto64 ...) -> (FCVTLS ...)
+(Cvt64Fto32 ...) -> (FCVTWD ...)
+(Cvt64Fto64 ...) -> (FCVTLD ...)
-(Cvt32Fto64F x) -> (FCVTDS x)
-(Cvt64Fto32F x) -> (FCVTSD x)
+(Cvt32Fto64F ...) -> (FCVTDS ...)
+(Cvt64Fto32F ...) -> (FCVTSD ...)
-(Round32F x) -> x
-(Round64F x) -> x
+(Round32F ...) -> (Copy ...)
+(Round64F ...) -> (Copy ...)
// From genericOps.go:
// "0 if arg0 == 0, -1 if arg0 > 0, undef if arg0<0"
// Truncations
// We ignore the unused high parts of registers, so truncates are just copies.
-(Trunc16to8 x) -> x
-(Trunc32to8 x) -> x
-(Trunc32to16 x) -> x
-(Trunc64to8 x) -> x
-(Trunc64to16 x) -> x
-(Trunc64to32 x) -> x
+(Trunc16to8 ...) -> (Copy ...)
+(Trunc32to8 ...) -> (Copy ...)
+(Trunc32to16 ...) -> (Copy ...)
+(Trunc64to8 ...) -> (Copy ...)
+(Trunc64to16 ...) -> (Copy ...)
+(Trunc64to32 ...) -> (Copy ...)
// Shifts
(RotateLeft32 <t> x (MOVWconst [c])) -> (Or32 (Lsh32x64 <t> x (MOVWconst [c&31])) (Rsh32Ux64 <t> x (MOVWconst [-c&31])))
(RotateLeft64 <t> x (MOVDconst [c])) -> (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
-(Less64 x y) -> (SLT x y)
+(Less64 ...) -> (SLT ...)
(Less32 x y) -> (SLT (SignExt32to64 x) (SignExt32to64 y))
(Less16 x y) -> (SLT (SignExt16to64 x) (SignExt16to64 y))
(Less8 x y) -> (SLT (SignExt8to64 x) (SignExt8to64 y))
-(Less64U x y) -> (SLTU x y)
+(Less64U ...) -> (SLTU ...)
(Less32U x y) -> (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
(Less16U x y) -> (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
(Less8U x y) -> (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
-(Less64F x y) -> (FLTD x y)
-(Less32F x y) -> (FLTS x y)
+(Less64F ...) -> (FLTD ...)
+(Less32F ...) -> (FLTS ...)
// Convert x <= y to !(y > x).
(Leq64 x y) -> (Not (Less64 y x))
(Leq32U x y) -> (Not (Less32U y x))
(Leq16U x y) -> (Not (Less16U y x))
(Leq8U x y) -> (Not (Less8U y x))
-(Leq64F x y) -> (FLED x y)
-(Leq32F x y) -> (FLES x y)
+(Leq64F ...) -> (FLED ...)
+(Leq32F ...) -> (FLES ...)
// Convert x > y to y < x.
(Greater64 x y) -> (Less64 y x)
(Eq32 x y) -> (SEQZ (ZeroExt32to64 (SUB <x.Type> x y)))
(Eq16 x y) -> (SEQZ (ZeroExt16to64 (SUB <x.Type> x y)))
(Eq8 x y) -> (SEQZ (ZeroExt8to64 (SUB <x.Type> x y)))
-(Eq64F x y) -> (FEQD x y)
-(Eq32F x y) -> (FEQS x y)
+(Eq64F ...) -> (FEQD ...)
+(Eq32F ...) -> (FEQS ...)
(NeqPtr x y) -> (SNEZ (SUB <x.Type> x y))
(Neq64 x y) -> (SNEZ (SUB <x.Type> x y))
(Neq32 x y) -> (SNEZ (ZeroExt32to64 (SUB <x.Type> x y)))
(Neq16 x y) -> (SNEZ (ZeroExt16to64 (SUB <x.Type> x y)))
(Neq8 x y) -> (SNEZ (ZeroExt8to64 (SUB <x.Type> x y)))
-(Neq64F x y) -> (FNED x y)
-(Neq32F x y) -> (FNES x y)
+(Neq64F ...) -> (FNED ...)
+(Neq32F ...) -> (FNES ...)
// Loads
(Load <t> ptr mem) && t.IsBoolean() -> (MOVBUload ptr mem)
(ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.(*types.Type).Alignment(), config)]))
mem)
-(Convert x mem) -> (MOVconvert x mem)
+(Convert ...) -> (MOVconvert ...)
// Checks
(IsNonNil p) -> (NeqPtr (MOVDconst) p)
-(IsInBounds idx len) -> (Less64U idx len)
-(IsSliceInBounds idx len) -> (Leq64U idx len)
+(IsInBounds ...) -> (Less64U ...)
+(IsSliceInBounds ...) -> (Leq64U ...)
// Trivial lowering
-(NilCheck ptr mem) -> (LoweredNilCheck ptr mem)
-(GetClosurePtr) -> (LoweredGetClosurePtr)
-(GetCallerSP) -> (LoweredGetCallerSP)
-(GetCallerPC) -> (LoweredGetCallerPC)
+(NilCheck ...) -> (LoweredNilCheck ...)
+(GetClosurePtr ...) -> (LoweredGetClosurePtr ...)
+(GetCallerSP ...) -> (LoweredGetCallerSP ...)
+(GetCallerPC ...) -> (LoweredGetCallerPC ...)
// Write barrier.
-(WB {fn} destptr srcptr mem) -> (LoweredWB {fn} destptr srcptr mem)
+(WB ...) -> (LoweredWB ...)
(PanicBounds [kind] x y mem) && boundsABI(kind) == 0 -> (LoweredPanicBoundsA [kind] x y mem)
(PanicBounds [kind] x y mem) && boundsABI(kind) == 1 -> (LoweredPanicBoundsB [kind] x y mem)
mem)
// Boolean ops; 0=false, 1=true
-(AndB x y) -> (AND x y)
-(OrB x y) -> (OR x y)
+(AndB ...) -> (AND ...)
+(OrB ...) -> (OR ...)
(EqB x y) -> (XORI [1] (XOR <typ.Bool> x y))
-(NeqB x y) -> (XOR x y)
+(NeqB ...) -> (XOR ...)
(Not x) -> (XORI [1] x)
// Lowering pointer arithmetic
(OffPtr [off] ptr) && is32Bit(off) -> (ADDI [off] ptr)
(OffPtr [off] ptr) -> (ADD (MOVDconst [off]) ptr)
-(Const8 [val]) -> (MOVBconst [val])
-(Const16 [val]) -> (MOVHconst [val])
-(Const32 [val]) -> (MOVWconst [val])
-(Const64 [val]) -> (MOVDconst [val])
+(Const8 ...) -> (MOVBconst ...)
+(Const16 ...) -> (MOVHconst ...)
+(Const32 ...) -> (MOVWconst ...)
+(Const64 ...) -> (MOVDconst ...)
(Const32F [val]) -> (FMVSX (MOVWconst [int64(int32(math.Float32bits(float32(math.Float64frombits(uint64(val))))))]))
(Const64F [val]) -> (FMVDX (MOVDconst [val]))
(ConstNil) -> (MOVDconst [0])
-(ConstBool [b]) -> (MOVBconst [b])
+(ConstBool ...) -> (MOVBconst ...)
// Convert 64 bit immediate to two 32 bit immediates, combine with add and shift.
// The lower 32 bit immediate will be treated as signed,
// Fold ADD+MOVDconst into ADDI where possible.
(ADD (MOVDconst [off]) ptr) && is32Bit(off) -> (ADDI [off] ptr)
-(Addr {sym} base) -> (MOVaddr {sym} base)
+(Addr ...) -> (MOVaddr ...)
(LocalAddr {sym} base _) -> (MOVaddr {sym} base)
// Conditional branches
(If cond yes no) -> (BNE cond yes no)
// Calls
-(StaticCall [argwid] {target} mem) -> (CALLstatic [argwid] {target} mem)
-(ClosureCall [argwid] entry closure mem) -> (CALLclosure [argwid] entry closure mem)
-(InterCall [argwid] entry mem) -> (CALLinter [argwid] entry mem)
+(StaticCall ...) -> (CALLstatic ...)
+(ClosureCall ...) -> (CALLclosure ...)
+(InterCall ...) -> (CALLinter ...)
// remove redundant *const ops
(ADDI [0] x) -> x
func rewriteValueRISCV64(v *Value) bool {
switch v.Op {
case OpAdd16:
- return rewriteValueRISCV64_OpAdd16(v)
+ v.Op = OpRISCV64ADD
+ return true
case OpAdd32:
- return rewriteValueRISCV64_OpAdd32(v)
+ v.Op = OpRISCV64ADD
+ return true
case OpAdd32F:
- return rewriteValueRISCV64_OpAdd32F(v)
+ v.Op = OpRISCV64FADDS
+ return true
case OpAdd64:
- return rewriteValueRISCV64_OpAdd64(v)
+ v.Op = OpRISCV64ADD
+ return true
case OpAdd64F:
- return rewriteValueRISCV64_OpAdd64F(v)
+ v.Op = OpRISCV64FADDD
+ return true
case OpAdd8:
- return rewriteValueRISCV64_OpAdd8(v)
+ v.Op = OpRISCV64ADD
+ return true
case OpAddPtr:
- return rewriteValueRISCV64_OpAddPtr(v)
+ v.Op = OpRISCV64ADD
+ return true
case OpAddr:
- return rewriteValueRISCV64_OpAddr(v)
+ v.Op = OpRISCV64MOVaddr
+ return true
case OpAnd16:
- return rewriteValueRISCV64_OpAnd16(v)
+ v.Op = OpRISCV64AND
+ return true
case OpAnd32:
- return rewriteValueRISCV64_OpAnd32(v)
+ v.Op = OpRISCV64AND
+ return true
case OpAnd64:
- return rewriteValueRISCV64_OpAnd64(v)
+ v.Op = OpRISCV64AND
+ return true
case OpAnd8:
- return rewriteValueRISCV64_OpAnd8(v)
+ v.Op = OpRISCV64AND
+ return true
case OpAndB:
- return rewriteValueRISCV64_OpAndB(v)
+ v.Op = OpRISCV64AND
+ return true
case OpAvg64u:
return rewriteValueRISCV64_OpAvg64u(v)
case OpClosureCall:
- return rewriteValueRISCV64_OpClosureCall(v)
+ v.Op = OpRISCV64CALLclosure
+ return true
case OpCom16:
return rewriteValueRISCV64_OpCom16(v)
case OpCom32:
case OpCom8:
return rewriteValueRISCV64_OpCom8(v)
case OpConst16:
- return rewriteValueRISCV64_OpConst16(v)
+ v.Op = OpRISCV64MOVHconst
+ return true
case OpConst32:
- return rewriteValueRISCV64_OpConst32(v)
+ v.Op = OpRISCV64MOVWconst
+ return true
case OpConst32F:
return rewriteValueRISCV64_OpConst32F(v)
case OpConst64:
- return rewriteValueRISCV64_OpConst64(v)
+ v.Op = OpRISCV64MOVDconst
+ return true
case OpConst64F:
return rewriteValueRISCV64_OpConst64F(v)
case OpConst8:
- return rewriteValueRISCV64_OpConst8(v)
+ v.Op = OpRISCV64MOVBconst
+ return true
case OpConstBool:
- return rewriteValueRISCV64_OpConstBool(v)
+ v.Op = OpRISCV64MOVBconst
+ return true
case OpConstNil:
return rewriteValueRISCV64_OpConstNil(v)
case OpConvert:
- return rewriteValueRISCV64_OpConvert(v)
+ v.Op = OpRISCV64MOVconvert
+ return true
case OpCvt32Fto32:
- return rewriteValueRISCV64_OpCvt32Fto32(v)
+ v.Op = OpRISCV64FCVTWS
+ return true
case OpCvt32Fto64:
- return rewriteValueRISCV64_OpCvt32Fto64(v)
+ v.Op = OpRISCV64FCVTLS
+ return true
case OpCvt32Fto64F:
- return rewriteValueRISCV64_OpCvt32Fto64F(v)
+ v.Op = OpRISCV64FCVTDS
+ return true
case OpCvt32to32F:
- return rewriteValueRISCV64_OpCvt32to32F(v)
+ v.Op = OpRISCV64FCVTSW
+ return true
case OpCvt32to64F:
- return rewriteValueRISCV64_OpCvt32to64F(v)
+ v.Op = OpRISCV64FCVTDW
+ return true
case OpCvt64Fto32:
- return rewriteValueRISCV64_OpCvt64Fto32(v)
+ v.Op = OpRISCV64FCVTWD
+ return true
case OpCvt64Fto32F:
- return rewriteValueRISCV64_OpCvt64Fto32F(v)
+ v.Op = OpRISCV64FCVTSD
+ return true
case OpCvt64Fto64:
- return rewriteValueRISCV64_OpCvt64Fto64(v)
+ v.Op = OpRISCV64FCVTLD
+ return true
case OpCvt64to32F:
- return rewriteValueRISCV64_OpCvt64to32F(v)
+ v.Op = OpRISCV64FCVTSL
+ return true
case OpCvt64to64F:
- return rewriteValueRISCV64_OpCvt64to64F(v)
+ v.Op = OpRISCV64FCVTDL
+ return true
case OpDiv16:
return rewriteValueRISCV64_OpDiv16(v)
case OpDiv16u:
case OpDiv32:
return rewriteValueRISCV64_OpDiv32(v)
case OpDiv32F:
- return rewriteValueRISCV64_OpDiv32F(v)
+ v.Op = OpRISCV64FDIVS
+ return true
case OpDiv32u:
- return rewriteValueRISCV64_OpDiv32u(v)
+ v.Op = OpRISCV64DIVUW
+ return true
case OpDiv64:
return rewriteValueRISCV64_OpDiv64(v)
case OpDiv64F:
- return rewriteValueRISCV64_OpDiv64F(v)
+ v.Op = OpRISCV64FDIVD
+ return true
case OpDiv64u:
- return rewriteValueRISCV64_OpDiv64u(v)
+ v.Op = OpRISCV64DIVU
+ return true
case OpDiv8:
return rewriteValueRISCV64_OpDiv8(v)
case OpDiv8u:
case OpEq32:
return rewriteValueRISCV64_OpEq32(v)
case OpEq32F:
- return rewriteValueRISCV64_OpEq32F(v)
+ v.Op = OpRISCV64FEQS
+ return true
case OpEq64:
return rewriteValueRISCV64_OpEq64(v)
case OpEq64F:
- return rewriteValueRISCV64_OpEq64F(v)
+ v.Op = OpRISCV64FEQD
+ return true
case OpEq8:
return rewriteValueRISCV64_OpEq8(v)
case OpEqB:
case OpGeq8U:
return rewriteValueRISCV64_OpGeq8U(v)
case OpGetCallerPC:
- return rewriteValueRISCV64_OpGetCallerPC(v)
+ v.Op = OpRISCV64LoweredGetCallerPC
+ return true
case OpGetCallerSP:
- return rewriteValueRISCV64_OpGetCallerSP(v)
+ v.Op = OpRISCV64LoweredGetCallerSP
+ return true
case OpGetClosurePtr:
- return rewriteValueRISCV64_OpGetClosurePtr(v)
+ v.Op = OpRISCV64LoweredGetClosurePtr
+ return true
case OpGreater16:
return rewriteValueRISCV64_OpGreater16(v)
case OpGreater16U:
case OpHmul32u:
return rewriteValueRISCV64_OpHmul32u(v)
case OpHmul64:
- return rewriteValueRISCV64_OpHmul64(v)
+ v.Op = OpRISCV64MULH
+ return true
case OpHmul64u:
- return rewriteValueRISCV64_OpHmul64u(v)
+ v.Op = OpRISCV64MULHU
+ return true
case OpInterCall:
- return rewriteValueRISCV64_OpInterCall(v)
+ v.Op = OpRISCV64CALLinter
+ return true
case OpIsInBounds:
- return rewriteValueRISCV64_OpIsInBounds(v)
+ v.Op = OpLess64U
+ return true
case OpIsNonNil:
return rewriteValueRISCV64_OpIsNonNil(v)
case OpIsSliceInBounds:
- return rewriteValueRISCV64_OpIsSliceInBounds(v)
+ v.Op = OpLeq64U
+ return true
case OpLeq16:
return rewriteValueRISCV64_OpLeq16(v)
case OpLeq16U:
case OpLeq32:
return rewriteValueRISCV64_OpLeq32(v)
case OpLeq32F:
- return rewriteValueRISCV64_OpLeq32F(v)
+ v.Op = OpRISCV64FLES
+ return true
case OpLeq32U:
return rewriteValueRISCV64_OpLeq32U(v)
case OpLeq64:
return rewriteValueRISCV64_OpLeq64(v)
case OpLeq64F:
- return rewriteValueRISCV64_OpLeq64F(v)
+ v.Op = OpRISCV64FLED
+ return true
case OpLeq64U:
return rewriteValueRISCV64_OpLeq64U(v)
case OpLeq8:
case OpLess32:
return rewriteValueRISCV64_OpLess32(v)
case OpLess32F:
- return rewriteValueRISCV64_OpLess32F(v)
+ v.Op = OpRISCV64FLTS
+ return true
case OpLess32U:
return rewriteValueRISCV64_OpLess32U(v)
case OpLess64:
- return rewriteValueRISCV64_OpLess64(v)
+ v.Op = OpRISCV64SLT
+ return true
case OpLess64F:
- return rewriteValueRISCV64_OpLess64F(v)
+ v.Op = OpRISCV64FLTD
+ return true
case OpLess64U:
- return rewriteValueRISCV64_OpLess64U(v)
+ v.Op = OpRISCV64SLTU
+ return true
case OpLess8:
return rewriteValueRISCV64_OpLess8(v)
case OpLess8U:
case OpMod32:
return rewriteValueRISCV64_OpMod32(v)
case OpMod32u:
- return rewriteValueRISCV64_OpMod32u(v)
+ v.Op = OpRISCV64REMUW
+ return true
case OpMod64:
return rewriteValueRISCV64_OpMod64(v)
case OpMod64u:
- return rewriteValueRISCV64_OpMod64u(v)
+ v.Op = OpRISCV64REMU
+ return true
case OpMod8:
return rewriteValueRISCV64_OpMod8(v)
case OpMod8u:
case OpMul16:
return rewriteValueRISCV64_OpMul16(v)
case OpMul32:
- return rewriteValueRISCV64_OpMul32(v)
+ v.Op = OpRISCV64MULW
+ return true
case OpMul32F:
- return rewriteValueRISCV64_OpMul32F(v)
+ v.Op = OpRISCV64FMULS
+ return true
case OpMul64:
- return rewriteValueRISCV64_OpMul64(v)
+ v.Op = OpRISCV64MUL
+ return true
case OpMul64F:
- return rewriteValueRISCV64_OpMul64F(v)
+ v.Op = OpRISCV64FMULD
+ return true
case OpMul8:
return rewriteValueRISCV64_OpMul8(v)
case OpNeg16:
case OpNeg32:
return rewriteValueRISCV64_OpNeg32(v)
case OpNeg32F:
- return rewriteValueRISCV64_OpNeg32F(v)
+ v.Op = OpRISCV64FNEGS
+ return true
case OpNeg64:
return rewriteValueRISCV64_OpNeg64(v)
case OpNeg64F:
- return rewriteValueRISCV64_OpNeg64F(v)
+ v.Op = OpRISCV64FNEGD
+ return true
case OpNeg8:
return rewriteValueRISCV64_OpNeg8(v)
case OpNeq16:
case OpNeq32:
return rewriteValueRISCV64_OpNeq32(v)
case OpNeq32F:
- return rewriteValueRISCV64_OpNeq32F(v)
+ v.Op = OpRISCV64FNES
+ return true
case OpNeq64:
return rewriteValueRISCV64_OpNeq64(v)
case OpNeq64F:
- return rewriteValueRISCV64_OpNeq64F(v)
+ v.Op = OpRISCV64FNED
+ return true
case OpNeq8:
return rewriteValueRISCV64_OpNeq8(v)
case OpNeqB:
- return rewriteValueRISCV64_OpNeqB(v)
+ v.Op = OpRISCV64XOR
+ return true
case OpNeqPtr:
return rewriteValueRISCV64_OpNeqPtr(v)
case OpNilCheck:
- return rewriteValueRISCV64_OpNilCheck(v)
+ v.Op = OpRISCV64LoweredNilCheck
+ return true
case OpNot:
return rewriteValueRISCV64_OpNot(v)
case OpOffPtr:
return rewriteValueRISCV64_OpOffPtr(v)
case OpOr16:
- return rewriteValueRISCV64_OpOr16(v)
+ v.Op = OpRISCV64OR
+ return true
case OpOr32:
- return rewriteValueRISCV64_OpOr32(v)
+ v.Op = OpRISCV64OR
+ return true
case OpOr64:
- return rewriteValueRISCV64_OpOr64(v)
+ v.Op = OpRISCV64OR
+ return true
case OpOr8:
- return rewriteValueRISCV64_OpOr8(v)
+ v.Op = OpRISCV64OR
+ return true
case OpOrB:
- return rewriteValueRISCV64_OpOrB(v)
+ v.Op = OpRISCV64OR
+ return true
case OpPanicBounds:
return rewriteValueRISCV64_OpPanicBounds(v)
case OpRISCV64ADD:
case OpRotateLeft8:
return rewriteValueRISCV64_OpRotateLeft8(v)
case OpRound32F:
- return rewriteValueRISCV64_OpRound32F(v)
+ v.Op = OpCopy
+ return true
case OpRound64F:
- return rewriteValueRISCV64_OpRound64F(v)
+ v.Op = OpCopy
+ return true
case OpRsh16Ux16:
return rewriteValueRISCV64_OpRsh16Ux16(v)
case OpRsh16Ux32:
case OpSlicemask:
return rewriteValueRISCV64_OpSlicemask(v)
case OpSqrt:
- return rewriteValueRISCV64_OpSqrt(v)
+ v.Op = OpRISCV64FSQRTD
+ return true
case OpStaticCall:
- return rewriteValueRISCV64_OpStaticCall(v)
+ v.Op = OpRISCV64CALLstatic
+ return true
case OpStore:
return rewriteValueRISCV64_OpStore(v)
case OpSub16:
- return rewriteValueRISCV64_OpSub16(v)
+ v.Op = OpRISCV64SUB
+ return true
case OpSub32:
- return rewriteValueRISCV64_OpSub32(v)
+ v.Op = OpRISCV64SUB
+ return true
case OpSub32F:
- return rewriteValueRISCV64_OpSub32F(v)
+ v.Op = OpRISCV64FSUBS
+ return true
case OpSub64:
- return rewriteValueRISCV64_OpSub64(v)
+ v.Op = OpRISCV64SUB
+ return true
case OpSub64F:
- return rewriteValueRISCV64_OpSub64F(v)
+ v.Op = OpRISCV64FSUBD
+ return true
case OpSub8:
- return rewriteValueRISCV64_OpSub8(v)
+ v.Op = OpRISCV64SUB
+ return true
case OpSubPtr:
- return rewriteValueRISCV64_OpSubPtr(v)
+ v.Op = OpRISCV64SUB
+ return true
case OpTrunc16to8:
- return rewriteValueRISCV64_OpTrunc16to8(v)
+ v.Op = OpCopy
+ return true
case OpTrunc32to16:
- return rewriteValueRISCV64_OpTrunc32to16(v)
+ v.Op = OpCopy
+ return true
case OpTrunc32to8:
- return rewriteValueRISCV64_OpTrunc32to8(v)
+ v.Op = OpCopy
+ return true
case OpTrunc64to16:
- return rewriteValueRISCV64_OpTrunc64to16(v)
+ v.Op = OpCopy
+ return true
case OpTrunc64to32:
- return rewriteValueRISCV64_OpTrunc64to32(v)
+ v.Op = OpCopy
+ return true
case OpTrunc64to8:
- return rewriteValueRISCV64_OpTrunc64to8(v)
+ v.Op = OpCopy
+ return true
case OpWB:
- return rewriteValueRISCV64_OpWB(v)
+ v.Op = OpRISCV64LoweredWB
+ return true
case OpXor16:
- return rewriteValueRISCV64_OpXor16(v)
+ v.Op = OpRISCV64XOR
+ return true
case OpXor32:
- return rewriteValueRISCV64_OpXor32(v)
+ v.Op = OpRISCV64XOR
+ return true
case OpXor64:
- return rewriteValueRISCV64_OpXor64(v)
+ v.Op = OpRISCV64XOR
+ return true
case OpXor8:
- return rewriteValueRISCV64_OpXor8(v)
+ v.Op = OpRISCV64XOR
+ return true
case OpZero:
return rewriteValueRISCV64_OpZero(v)
case OpZeroExt16to32:
}
return false
}
-func rewriteValueRISCV64_OpAdd16(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add16 x y)
- // result: (ADD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64ADD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAdd32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add32 x y)
- // result: (ADD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64ADD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAdd32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add32F x y)
- // result: (FADDS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FADDS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAdd64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add64 x y)
- // result: (ADD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64ADD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAdd64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add64F x y)
- // result: (FADDD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FADDD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAdd8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Add8 x y)
- // result: (ADD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64ADD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAddPtr(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (AddPtr x y)
- // result: (ADD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64ADD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAddr(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Addr {sym} base)
- // result: (MOVaddr {sym} base)
- for {
- sym := v.Aux
- base := v_0
- v.reset(OpRISCV64MOVaddr)
- v.Aux = sym
- v.AddArg(base)
- return true
- }
-}
-func rewriteValueRISCV64_OpAnd16(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (And16 x y)
- // result: (AND x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64AND)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAnd32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (And32 x y)
- // result: (AND x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64AND)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAnd64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (And64 x y)
- // result: (AND x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64AND)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAnd8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (And8 x y)
- // result: (AND x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64AND)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpAndB(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (AndB x y)
- // result: (AND x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64AND)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpClosureCall(v *Value) bool {
- v_2 := v.Args[2]
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (ClosureCall [argwid] entry closure mem)
- // result: (CALLclosure [argwid] entry closure mem)
- for {
- argwid := v.AuxInt
- entry := v_0
- closure := v_1
- mem := v_2
- v.reset(OpRISCV64CALLclosure)
- v.AuxInt = argwid
- v.AddArg(entry)
- v.AddArg(closure)
- v.AddArg(mem)
- return true
- }
-}
func rewriteValueRISCV64_OpCom16(v *Value) bool {
v_0 := v.Args[0]
// match: (Com16 x)
return true
}
}
-func rewriteValueRISCV64_OpConst16(v *Value) bool {
- // match: (Const16 [val])
- // result: (MOVHconst [val])
- for {
- val := v.AuxInt
- v.reset(OpRISCV64MOVHconst)
- v.AuxInt = val
- return true
- }
-}
-func rewriteValueRISCV64_OpConst32(v *Value) bool {
- // match: (Const32 [val])
- // result: (MOVWconst [val])
- for {
- val := v.AuxInt
- v.reset(OpRISCV64MOVWconst)
- v.AuxInt = val
- return true
- }
-}
func rewriteValueRISCV64_OpConst32F(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
return true
}
}
-func rewriteValueRISCV64_OpConst64(v *Value) bool {
- // match: (Const64 [val])
- // result: (MOVDconst [val])
- for {
- val := v.AuxInt
- v.reset(OpRISCV64MOVDconst)
- v.AuxInt = val
- return true
- }
-}
func rewriteValueRISCV64_OpConst64F(v *Value) bool {
b := v.Block
typ := &b.Func.Config.Types
return true
}
}
-func rewriteValueRISCV64_OpConst8(v *Value) bool {
- // match: (Const8 [val])
- // result: (MOVBconst [val])
- for {
- val := v.AuxInt
- v.reset(OpRISCV64MOVBconst)
- v.AuxInt = val
- return true
- }
-}
-func rewriteValueRISCV64_OpConstBool(v *Value) bool {
- // match: (ConstBool [b])
- // result: (MOVBconst [b])
- for {
- b := v.AuxInt
- v.reset(OpRISCV64MOVBconst)
- v.AuxInt = b
- return true
- }
-}
func rewriteValueRISCV64_OpConstNil(v *Value) bool {
// match: (ConstNil)
// result: (MOVDconst [0])
return true
}
}
-func rewriteValueRISCV64_OpConvert(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Convert x mem)
- // result: (MOVconvert x mem)
- for {
- x := v_0
- mem := v_1
- v.reset(OpRISCV64MOVconvert)
- v.AddArg(x)
- v.AddArg(mem)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt32Fto32(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt32Fto32 x)
- // result: (FCVTWS x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTWS)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt32Fto64(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt32Fto64 x)
- // result: (FCVTLS x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTLS)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt32Fto64F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt32Fto64F x)
- // result: (FCVTDS x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTDS)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt32to32F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt32to32F x)
- // result: (FCVTSW x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTSW)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt32to64F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt32to64F x)
- // result: (FCVTDW x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTDW)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt64Fto32(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt64Fto32 x)
- // result: (FCVTWD x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTWD)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt64Fto32F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt64Fto32F x)
- // result: (FCVTSD x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTSD)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt64Fto64(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt64Fto64 x)
- // result: (FCVTLD x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTLD)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt64to32F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt64to32F x)
- // result: (FCVTSL x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTSL)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpCvt64to64F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Cvt64to64F x)
- // result: (FCVTDL x)
- for {
- x := v_0
- v.reset(OpRISCV64FCVTDL)
- v.AddArg(x)
- return true
- }
-}
func rewriteValueRISCV64_OpDiv16(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
func rewriteValueRISCV64_OpDiv32(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (Div32 x y)
+ // match: (Div32 [a] x y)
// result: (DIVW x y)
for {
x := v_0
return true
}
}
-func rewriteValueRISCV64_OpDiv32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Div32F x y)
- // result: (FDIVS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FDIVS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpDiv32u(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Div32u x y)
- // result: (DIVUW x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64DIVUW)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpDiv64(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (Div64 x y)
+ // match: (Div64 [a] x y)
// result: (DIV x y)
for {
x := v_0
return true
}
}
-func rewriteValueRISCV64_OpDiv64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Div64F x y)
- // result: (FDIVD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FDIVD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpDiv64u(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Div64u x y)
- // result: (DIVU x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64DIVU)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpDiv8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpEq32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Eq32F x y)
- // result: (FEQS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FEQS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpEq64(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpEq64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Eq64F x y)
- // result: (FEQD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FEQD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpEq8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpGetCallerPC(v *Value) bool {
- // match: (GetCallerPC)
- // result: (LoweredGetCallerPC)
- for {
- v.reset(OpRISCV64LoweredGetCallerPC)
- return true
- }
-}
-func rewriteValueRISCV64_OpGetCallerSP(v *Value) bool {
- // match: (GetCallerSP)
- // result: (LoweredGetCallerSP)
- for {
- v.reset(OpRISCV64LoweredGetCallerSP)
- return true
- }
-}
-func rewriteValueRISCV64_OpGetClosurePtr(v *Value) bool {
- // match: (GetClosurePtr)
- // result: (LoweredGetClosurePtr)
- for {
- v.reset(OpRISCV64LoweredGetClosurePtr)
- return true
- }
-}
func rewriteValueRISCV64_OpGreater16(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
for {
x := v_0
y := v_1
- v.reset(OpRISCV64SRLI)
- v.AuxInt = 32
- v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
- v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v1.AddArg(x)
- v0.AddArg(v1)
- v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
- v2.AddArg(y)
- v0.AddArg(v2)
- v.AddArg(v0)
- return true
- }
-}
-func rewriteValueRISCV64_OpHmul64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Hmul64 x y)
- // result: (MULH x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64MULH)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpHmul64u(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Hmul64u x y)
- // result: (MULHU x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64MULHU)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpInterCall(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (InterCall [argwid] entry mem)
- // result: (CALLinter [argwid] entry mem)
- for {
- argwid := v.AuxInt
- entry := v_0
- mem := v_1
- v.reset(OpRISCV64CALLinter)
- v.AuxInt = argwid
- v.AddArg(entry)
- v.AddArg(mem)
- return true
- }
-}
-func rewriteValueRISCV64_OpIsInBounds(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (IsInBounds idx len)
- // result: (Less64U idx len)
- for {
- idx := v_0
- len := v_1
- v.reset(OpLess64U)
- v.AddArg(idx)
- v.AddArg(len)
+ v.reset(OpRISCV64SRLI)
+ v.AuxInt = 32
+ v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
+ v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+ v1.AddArg(x)
+ v0.AddArg(v1)
+ v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
+ v2.AddArg(y)
+ v0.AddArg(v2)
+ v.AddArg(v0)
return true
}
}
return true
}
}
-func rewriteValueRISCV64_OpIsSliceInBounds(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (IsSliceInBounds idx len)
- // result: (Leq64U idx len)
- for {
- idx := v_0
- len := v_1
- v.reset(OpLeq64U)
- v.AddArg(idx)
- v.AddArg(len)
- return true
- }
-}
func rewriteValueRISCV64_OpLeq16(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpLeq32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Leq32F x y)
- // result: (FLES x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FLES)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpLeq64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Leq64F x y)
- // result: (FLED x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FLED)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpLess32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Less32F x y)
- // result: (FLTS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FLTS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpLess32U(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpLess64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Less64 x y)
- // result: (SLT x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SLT)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpLess64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Less64F x y)
- // result: (FLTD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FLTD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpLess64U(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Less64U x y)
- // result: (SLTU x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SLTU)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpLess8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
func rewriteValueRISCV64_OpMod32(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (Mod32 x y)
+ // match: (Mod32 [a] x y)
// result: (REMW x y)
for {
x := v_0
return true
}
}
-func rewriteValueRISCV64_OpMod32u(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mod32u x y)
- // result: (REMUW x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64REMUW)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpMod64(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
- // match: (Mod64 x y)
+ // match: (Mod64 [a] x y)
// result: (REM x y)
for {
x := v_0
return true
}
}
-func rewriteValueRISCV64_OpMod64u(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mod64u x y)
- // result: (REMU x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64REMU)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpMod8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpMul32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mul32 x y)
- // result: (MULW x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64MULW)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpMul32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mul32F x y)
- // result: (FMULS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FMULS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpMul64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mul64 x y)
- // result: (MUL x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64MUL)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpMul64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Mul64F x y)
- // result: (FMULD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FMULD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpMul8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpNeg32F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Neg32F x)
- // result: (FNEGS x)
- for {
- x := v_0
- v.reset(OpRISCV64FNEGS)
- v.AddArg(x)
- return true
- }
-}
func rewriteValueRISCV64_OpNeg64(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
return true
}
}
-func rewriteValueRISCV64_OpNeg64F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Neg64F x)
- // result: (FNEGD x)
- for {
- x := v_0
- v.reset(OpRISCV64FNEGD)
- v.AddArg(x)
- return true
- }
-}
func rewriteValueRISCV64_OpNeg8(v *Value) bool {
v_0 := v.Args[0]
b := v.Block
return true
}
}
-func rewriteValueRISCV64_OpNeq32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Neq32F x y)
- // result: (FNES x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FNES)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpNeq64(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpNeq64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Neq64F x y)
- // result: (FNED x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FNED)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpNeq8(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpNeqB(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (NeqB x y)
- // result: (XOR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64XOR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpNilCheck(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (NilCheck ptr mem)
- // result: (LoweredNilCheck ptr mem)
- for {
- ptr := v_0
- mem := v_1
- v.reset(OpRISCV64LoweredNilCheck)
- v.AddArg(ptr)
- v.AddArg(mem)
- return true
- }
-}
func rewriteValueRISCV64_OpNot(v *Value) bool {
v_0 := v.Args[0]
// match: (Not x)
}
v.reset(OpRISCV64MOVaddr)
v.AuxInt = off
- v.AddArg(ptr)
- return true
- }
- // match: (OffPtr [off] ptr)
- // cond: is32Bit(off)
- // result: (ADDI [off] ptr)
- for {
- off := v.AuxInt
- ptr := v_0
- if !(is32Bit(off)) {
- break
- }
- v.reset(OpRISCV64ADDI)
- v.AuxInt = off
- v.AddArg(ptr)
- return true
- }
- // match: (OffPtr [off] ptr)
- // result: (ADD (MOVDconst [off]) ptr)
- for {
- off := v.AuxInt
- ptr := v_0
- v.reset(OpRISCV64ADD)
- v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
- v0.AuxInt = off
- v.AddArg(v0)
- v.AddArg(ptr)
- return true
- }
-}
-func rewriteValueRISCV64_OpOr16(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Or16 x y)
- // result: (OR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64OR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpOr32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Or32 x y)
- // result: (OR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64OR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpOr64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Or64 x y)
- // result: (OR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64OR)
- v.AddArg(x)
- v.AddArg(y)
+ v.AddArg(ptr)
return true
}
-}
-func rewriteValueRISCV64_OpOr8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Or8 x y)
- // result: (OR x y)
+ // match: (OffPtr [off] ptr)
+ // cond: is32Bit(off)
+ // result: (ADDI [off] ptr)
for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64OR)
- v.AddArg(x)
- v.AddArg(y)
+ off := v.AuxInt
+ ptr := v_0
+ if !(is32Bit(off)) {
+ break
+ }
+ v.reset(OpRISCV64ADDI)
+ v.AuxInt = off
+ v.AddArg(ptr)
return true
}
-}
-func rewriteValueRISCV64_OpOrB(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (OrB x y)
- // result: (OR x y)
+ // match: (OffPtr [off] ptr)
+ // result: (ADD (MOVDconst [off]) ptr)
for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64OR)
- v.AddArg(x)
- v.AddArg(y)
+ off := v.AuxInt
+ ptr := v_0
+ v.reset(OpRISCV64ADD)
+ v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
+ v0.AuxInt = off
+ v.AddArg(v0)
+ v.AddArg(ptr)
return true
}
}
}
return false
}
-func rewriteValueRISCV64_OpRound32F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Round32F x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpRound64F(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Round64F x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]
return true
}
}
-func rewriteValueRISCV64_OpSqrt(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Sqrt x)
- // result: (FSQRTD x)
- for {
- x := v_0
- v.reset(OpRISCV64FSQRTD)
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpStaticCall(v *Value) bool {
- v_0 := v.Args[0]
- // match: (StaticCall [argwid] {target} mem)
- // result: (CALLstatic [argwid] {target} mem)
- for {
- argwid := v.AuxInt
- target := v.Aux
- mem := v_0
- v.reset(OpRISCV64CALLstatic)
- v.AuxInt = argwid
- v.Aux = target
- v.AddArg(mem)
- return true
- }
-}
func rewriteValueRISCV64_OpStore(v *Value) bool {
v_2 := v.Args[2]
v_1 := v.Args[1]
}
return false
}
-func rewriteValueRISCV64_OpSub16(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub16 x y)
- // result: (SUB x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SUB)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSub32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub32 x y)
- // result: (SUB x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SUB)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSub32F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub32F x y)
- // result: (FSUBS x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FSUBS)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSub64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub64 x y)
- // result: (SUB x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SUB)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSub64F(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub64F x y)
- // result: (FSUBD x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64FSUBD)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSub8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Sub8 x y)
- // result: (SUB x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SUB)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpSubPtr(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (SubPtr x y)
- // result: (SUB x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64SUB)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc16to8(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc16to8 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc32to16(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc32to16 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc32to8(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc32to8 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc64to16(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc64to16 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc64to32(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc64to32 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpTrunc64to8(v *Value) bool {
- v_0 := v.Args[0]
- // match: (Trunc64to8 x)
- // result: x
- for {
- x := v_0
- v.reset(OpCopy)
- v.Type = x.Type
- v.AddArg(x)
- return true
- }
-}
-func rewriteValueRISCV64_OpWB(v *Value) bool {
- v_2 := v.Args[2]
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (WB {fn} destptr srcptr mem)
- // result: (LoweredWB {fn} destptr srcptr mem)
- for {
- fn := v.Aux
- destptr := v_0
- srcptr := v_1
- mem := v_2
- v.reset(OpRISCV64LoweredWB)
- v.Aux = fn
- v.AddArg(destptr)
- v.AddArg(srcptr)
- v.AddArg(mem)
- return true
- }
-}
-func rewriteValueRISCV64_OpXor16(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Xor16 x y)
- // result: (XOR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64XOR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpXor32(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Xor32 x y)
- // result: (XOR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64XOR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpXor64(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Xor64 x y)
- // result: (XOR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64XOR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
-func rewriteValueRISCV64_OpXor8(v *Value) bool {
- v_1 := v.Args[1]
- v_0 := v.Args[0]
- // match: (Xor8 x y)
- // result: (XOR x y)
- for {
- x := v_0
- y := v_1
- v.reset(OpRISCV64XOR)
- v.AddArg(x)
- v.AddArg(y)
- return true
- }
-}
func rewriteValueRISCV64_OpZero(v *Value) bool {
v_1 := v.Args[1]
v_0 := v.Args[0]