// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-(AddPtr x y) -> (ADDV x y)
-(Add64 x y) -> (ADDV x y)
-(Add32 x y) -> (ADDV x y)
-(Add16 x y) -> (ADDV x y)
-(Add8 x y) -> (ADDV x y)
-(Add32F x y) -> (ADDF x y)
-(Add64F x y) -> (ADDD x y)
-
-(SubPtr x y) -> (SUBV x y)
-(Sub64 x y) -> (SUBV x y)
-(Sub32 x y) -> (SUBV x y)
-(Sub16 x y) -> (SUBV x y)
-(Sub8 x y) -> (SUBV x y)
-(Sub32F x y) -> (SUBF x y)
-(Sub64F x y) -> (SUBD x y)
-
-(Mul64 x y) -> (Select1 (MULVU x y))
-(Mul32 x y) -> (Select1 (MULVU x y))
-(Mul16 x y) -> (Select1 (MULVU x y))
-(Mul8 x y) -> (Select1 (MULVU x y))
-(Mul32F x y) -> (MULF x y)
-(Mul64F x y) -> (MULD x y)
+(Add(Ptr|64|32|16|8) x y) -> (ADDV x y)
+(Add(32|64)F x y) -> (ADD(F|D) x y)
+
+(Sub(Ptr|64|32|16|8) x y) -> (SUBV x y)
+(Sub(32|64)F x y) -> (SUB(F|D) x y)
+
+(Mul(64|32|16|8) x y) -> (Select1 (MULVU x y))
+(Mul(32|64)F x y) -> (MUL(F|D) x y)
(Hmul64 x y) -> (Select0 (MULV x y))
(Hmul64u x y) -> (Select0 (MULVU x y))
(Div16u x y) -> (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
(Div8 x y) -> (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
(Div8u x y) -> (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
-(Div32F x y) -> (DIVF x y)
-(Div64F x y) -> (DIVD x y)
+(Div(32|64)F x y) -> (DIV(F|D) x y)
(Mod64 x y) -> (Select0 (DIVV x y))
(Mod64u x y) -> (Select0 (DIVVU x y))
// (x + y) / 2 with x>=y -> (x - y) / 2 + y
(Avg64u <t> x y) -> (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
-(And64 x y) -> (AND x y)
-(And32 x y) -> (AND x y)
-(And16 x y) -> (AND x y)
-(And8 x y) -> (AND x y)
-
-(Or64 x y) -> (OR x y)
-(Or32 x y) -> (OR x y)
-(Or16 x y) -> (OR x y)
-(Or8 x y) -> (OR x y)
-
-(Xor64 x y) -> (XOR x y)
-(Xor32 x y) -> (XOR x y)
-(Xor16 x y) -> (XOR x y)
-(Xor8 x y) -> (XOR x y)
+(And(64|32|16|8) x y) -> (AND x y)
+(Or(64|32|16|8) x y) -> (OR x y)
+(Xor(64|32|16|8) x y) -> (XOR x y)
// shifts
// hardware instruction uses only the low 6 bits of the shift
(Rsh8x8 <t> x y) -> (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
// unary ops
-(Neg64 x) -> (NEGV x)
-(Neg32 x) -> (NEGV x)
-(Neg16 x) -> (NEGV x)
-(Neg8 x) -> (NEGV x)
-(Neg32F x) -> (NEGF x)
-(Neg64F x) -> (NEGD x)
-
-(Com64 x) -> (NOR (MOVVconst [0]) x)
-(Com32 x) -> (NOR (MOVVconst [0]) x)
-(Com16 x) -> (NOR (MOVVconst [0]) x)
-(Com8 x) -> (NOR (MOVVconst [0]) x)
+(Neg(64|32|16|8) x) -> (NEGV x)
+(Neg(32|64)F x) -> (NEG(F|D) x)
+
+(Com(64|32|16|8) x) -> (NOR (MOVVconst [0]) x)
// boolean ops -- booleans are represented with 0=false, 1=true
(AndB x y) -> (AND x y)
(Not x) -> (XORconst [1] x)
// constants
-(Const64 [val]) -> (MOVVconst [val])
-(Const32 [val]) -> (MOVVconst [val])
-(Const16 [val]) -> (MOVVconst [val])
-(Const8 [val]) -> (MOVVconst [val])
-(Const32F [val]) -> (MOVFconst [val])
-(Const64F [val]) -> (MOVDconst [val])
+(Const(64|32|16|8) [val]) -> (MOVVconst [val])
+(Const(32|64)F [val]) -> (MOV(F|D)const [val])
(ConstNil) -> (MOVVconst [0])
(ConstBool [b]) -> (MOVVconst [b])
(Cvt32Fto64F x) -> (MOVFD x)
(Cvt64Fto32F x) -> (MOVDF x)
-(Round32F x) -> x
-(Round64F x) -> x
+(Round(32|64)F x) -> x
// comparisons
(Eq8 x y) -> (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
(Eq32 x y) -> (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
(Eq64 x y) -> (SGTU (MOVVconst [1]) (XOR x y))
(EqPtr x y) -> (SGTU (MOVVconst [1]) (XOR x y))
-(Eq32F x y) -> (FPFlagTrue (CMPEQF x y))
-(Eq64F x y) -> (FPFlagTrue (CMPEQD x y))
+(Eq(32|64)F x y) -> (FPFlagTrue (CMPEQ(F|D) x y))
(Neq8 x y) -> (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
(Neq16 x y) -> (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
(Neq32 x y) -> (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
(Neq64 x y) -> (SGTU (XOR x y) (MOVVconst [0]))
(NeqPtr x y) -> (SGTU (XOR x y) (MOVVconst [0]))
-(Neq32F x y) -> (FPFlagFalse (CMPEQF x y))
-(Neq64F x y) -> (FPFlagFalse (CMPEQD x y))
+(Neq(32|64)F x y) -> (FPFlagFalse (CMPEQ(F|D) x y))
(Less8 x y) -> (SGT (SignExt8to64 y) (SignExt8to64 x))
(Less16 x y) -> (SGT (SignExt16to64 y) (SignExt16to64 x))
(Less32 x y) -> (SGT (SignExt32to64 y) (SignExt32to64 x))
(Less64 x y) -> (SGT y x)
-(Less32F x y) -> (FPFlagTrue (CMPGTF y x)) // reverse operands to work around NaN
-(Less64F x y) -> (FPFlagTrue (CMPGTD y x)) // reverse operands to work around NaN
+(Less(32|64)F x y) -> (FPFlagTrue (CMPGT(F|D) y x)) // reverse operands to work around NaN
(Less8U x y) -> (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
(Less16U x y) -> (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
(Leq16 x y) -> (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
(Leq32 x y) -> (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
(Leq64 x y) -> (XOR (MOVVconst [1]) (SGT x y))
-(Leq32F x y) -> (FPFlagTrue (CMPGEF y x)) // reverse operands to work around NaN
-(Leq64F x y) -> (FPFlagTrue (CMPGED y x)) // reverse operands to work around NaN
+(Leq(32|64)F x y) -> (FPFlagTrue (CMPGE(F|D) y x)) // reverse operands to work around NaN
(Leq8U x y) -> (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
(Leq16U x y) -> (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
(Greater16 x y) -> (SGT (SignExt16to64 x) (SignExt16to64 y))
(Greater32 x y) -> (SGT (SignExt32to64 x) (SignExt32to64 y))
(Greater64 x y) -> (SGT x y)
-(Greater32F x y) -> (FPFlagTrue (CMPGTF x y))
-(Greater64F x y) -> (FPFlagTrue (CMPGTD x y))
+(Greater(32|64)F x y) -> (FPFlagTrue (CMPGT(F|D) x y))
(Greater8U x y) -> (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
(Greater16U x y) -> (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
(Geq16 x y) -> (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
(Geq32 x y) -> (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
(Geq64 x y) -> (XOR (MOVVconst [1]) (SGT y x))
-(Geq32F x y) -> (FPFlagTrue (CMPGEF x y))
-(Geq64F x y) -> (FPFlagTrue (CMPGED x y))
+(Geq(32|64)F x y) -> (FPFlagTrue (CMPGE(F|D) x y))
(Geq8U x y) -> (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
(Geq16U x y) -> (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))