(GEZ (MOVWconst [c]) yes no) && int32(c) < 0 -> (First nil no yes)
// conditional move
-(CMOVZ _ b (MOVWconst [0])) -> b
+(CMOVZ _ f (MOVWconst [0])) -> f
(CMOVZ a _ (MOVWconst [c])) && c!=0 -> a
(CMOVZzero _ (MOVWconst [0])) -> (MOVWconst [0])
(CMOVZzero a (MOVWconst [c])) && c!=0 -> a
}
body := buf.String()
- // Do a rough match to predict whether we need b, config, fe, and/or types.
- // It's not precise--thus the blank assignments--but it's good enough
- // to avoid generating needless code and doing pointless nil checks.
- hasb := strings.Contains(body, "b.")
+ // Figure out whether we need b, config, fe, and/or types; provide them if so.
+ hasb := strings.Contains(body, " b.")
hasconfig := strings.Contains(body, "config.") || strings.Contains(body, "config)")
hasfe := strings.Contains(body, "fe.")
hastyps := strings.Contains(body, "typ.")
fmt.Fprintf(w, "func rewriteValue%s_%s_%d(v *Value) bool {\n", arch.name, op, chunk)
if hasb || hasconfig || hasfe || hastyps {
fmt.Fprintln(w, "b := v.Block")
- fmt.Fprintln(w, "_ = b")
}
if hasconfig {
fmt.Fprintln(w, "config := b.Func.Config")
- fmt.Fprintln(w, "_ = config")
}
if hasfe {
fmt.Fprintln(w, "fe := b.Func.fe")
- fmt.Fprintln(w, "_ = fe")
}
if hastyps {
fmt.Fprintln(w, "typ := &b.Func.Config.Types")
- fmt.Fprintln(w, "_ = typ")
}
fmt.Fprint(w, body)
fmt.Fprintf(w, "}\n")
// autogenerated name
argname = fmt.Sprintf("%s_%d", v, i)
}
+ if argname == "b" {
+ log.Fatalf("don't name args 'b', it is ambiguous with blocks")
+ }
fmt.Fprintf(w, "%s := %s.Args[%d]\n", argname, v, i)
argPos, argCanFail := genMatch0(w, arch, arg, argname, m, false, loc)
if argPos != "" {
}
func rewriteValue386_Op386ADDLconstmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLconstmodify [valoff1] {sym} (ADDLconst [off2] base) mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ADDLconstmodify [ValAndOff(valoff1).add(off2)] {sym} base mem)
}
func rewriteValue386_Op386ADDLconstmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLconstmodifyidx4 [valoff1] {sym} (ADDLconst [off2] base) idx mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ADDLconstmodifyidx4 [ValAndOff(valoff1).add(off2)] {sym} base idx mem)
}
func rewriteValue386_Op386ADDLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ADDLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (ADDLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386ADDLmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLmodify [off1] {sym} (ADDLconst [off2] base) val mem)
// cond: is32Bit(off1+off2)
// result: (ADDLmodify [off1+off2] {sym} base val mem)
}
func rewriteValue386_Op386ADDLmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDLmodifyidx4 [off1] {sym} (ADDLconst [off2] base) idx val mem)
// cond: is32Bit(off1+off2)
// result: (ADDLmodifyidx4 [off1+off2] {sym} base idx val mem)
}
func rewriteValue386_Op386ADDSD_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDSD x l:(MOVSDload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (ADDSDload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386ADDSDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDSDload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDSDload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ADDSS_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDSS x l:(MOVSSload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (ADDSSload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386ADDSSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ADDSSload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDSSload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ANDLconstmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLconstmodify [valoff1] {sym} (ADDLconst [off2] base) mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ANDLconstmodify [ValAndOff(valoff1).add(off2)] {sym} base mem)
}
func rewriteValue386_Op386ANDLconstmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLconstmodifyidx4 [valoff1] {sym} (ADDLconst [off2] base) idx mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ANDLconstmodifyidx4 [ValAndOff(valoff1).add(off2)] {sym} base idx mem)
}
func rewriteValue386_Op386ANDLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ANDLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ANDLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (ANDLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386ANDLmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLmodify [off1] {sym} (ADDLconst [off2] base) val mem)
// cond: is32Bit(off1+off2)
// result: (ANDLmodify [off1+off2] {sym} base val mem)
}
func rewriteValue386_Op386ANDLmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLmodifyidx4 [off1] {sym} (ADDLconst [off2] base) idx val mem)
// cond: is32Bit(off1+off2)
// result: (ANDLmodifyidx4 [off1+off2] {sym} base idx val mem)
}
func rewriteValue386_Op386CMPB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPB x (MOVLconst [c]))
// cond:
// result: (CMPBconst x [int64(int8(c))])
}
func rewriteValue386_Op386CMPBconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPBconst (MOVLconst [x]) [y])
// cond: int8(x)==int8(y)
// result: (FlagEQ)
}
func rewriteValue386_Op386CMPL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPL x (MOVLconst [c]))
// cond:
// result: (CMPLconst x [c])
}
func rewriteValue386_Op386CMPLconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPLconst l:(MOVLload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && validValAndOff(c, off) && clobber(l)
// result: @l.Block (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
}
func rewriteValue386_Op386CMPW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPW x (MOVLconst [c]))
// cond:
// result: (CMPWconst x [int64(int16(c))])
}
func rewriteValue386_Op386CMPWconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPWconst (MOVLconst [x]) [y])
// cond: int16(x)==int16(y)
// result: (FlagEQ)
}
func rewriteValue386_Op386DIVSD_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (DIVSD x l:(MOVSDload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (DIVSDload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386DIVSDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (DIVSDload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (DIVSDload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386DIVSS_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (DIVSS x l:(MOVSSload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (DIVSSload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386DIVSSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (DIVSSload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (DIVSSload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386MOVBLSX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBLSX x:(MOVBload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBLSXload <v.Type> [off] {sym} ptr mem)
}
func rewriteValue386_Op386MOVBLSXload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBLSXload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVBLSX x)
}
func rewriteValue386_Op386MOVBLZX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBLZX x:(MOVBload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBload <v.Type> [off] {sym} ptr mem)
}
func rewriteValue386_Op386MOVBload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVBLZX x)
}
func rewriteValue386_Op386MOVBstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBstore [off] {sym} ptr (MOVBLSX x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
}
func rewriteValue386_Op386MOVBstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBstoreconst [sc] {s} (ADDLconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
// result: (MOVBstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValue386_Op386MOVLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVLload [off] {sym} ptr (MOVLstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: x
}
func rewriteValue386_Op386MOVLstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVLstore [off1] {sym} (ADDLconst [off2] ptr) val mem)
// cond: is32Bit(off1+off2)
// result: (MOVLstore [off1+off2] {sym} ptr val mem)
}
func rewriteValue386_Op386MOVLstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVLstoreconst [sc] {s} (ADDLconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
// result: (MOVLstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValue386_Op386MOVSDconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVSDconst [c])
// cond: config.ctxt.Flag_shared
// result: (MOVSDconst2 (MOVSDconst1 [c]))
}
func rewriteValue386_Op386MOVSDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVSDload [off1] {sym} (ADDLconst [off2] ptr) mem)
// cond: is32Bit(off1+off2)
// result: (MOVSDload [off1+off2] {sym} ptr mem)
}
func rewriteValue386_Op386MOVSDstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVSDstore [off1] {sym} (ADDLconst [off2] ptr) val mem)
// cond: is32Bit(off1+off2)
// result: (MOVSDstore [off1+off2] {sym} ptr val mem)
}
func rewriteValue386_Op386MOVSSconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVSSconst [c])
// cond: config.ctxt.Flag_shared
// result: (MOVSSconst2 (MOVSSconst1 [c]))
}
func rewriteValue386_Op386MOVSSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVSSload [off1] {sym} (ADDLconst [off2] ptr) mem)
// cond: is32Bit(off1+off2)
// result: (MOVSSload [off1+off2] {sym} ptr mem)
}
func rewriteValue386_Op386MOVSSstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVSSstore [off1] {sym} (ADDLconst [off2] ptr) val mem)
// cond: is32Bit(off1+off2)
// result: (MOVSSstore [off1+off2] {sym} ptr val mem)
}
func rewriteValue386_Op386MOVWLSX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWLSX x:(MOVWload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVWLSXload <v.Type> [off] {sym} ptr mem)
}
func rewriteValue386_Op386MOVWLSXload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWLSXload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVWLSX x)
}
func rewriteValue386_Op386MOVWLZX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWLZX x:(MOVWload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVWload <v.Type> [off] {sym} ptr mem)
}
func rewriteValue386_Op386MOVWload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVWLZX x)
}
func rewriteValue386_Op386MOVWstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWstore [off] {sym} ptr (MOVWLSX x) mem)
// cond:
// result: (MOVWstore [off] {sym} ptr x mem)
}
func rewriteValue386_Op386MOVWstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWstoreconst [sc] {s} (ADDLconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
// result: (MOVWstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValue386_Op386MOVWstoreconstidx2_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWstoreconstidx2 [x] {sym} (ADDLconst [c] ptr) idx mem)
// cond:
// result: (MOVWstoreconstidx2 [ValAndOff(x).add(c)] {sym} ptr idx mem)
}
func rewriteValue386_Op386MOVWstoreidx2_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWstoreidx2 [c] {sym} (ADDLconst [d] ptr) idx val mem)
// cond:
// result: (MOVWstoreidx2 [int64(int32(c+d))] {sym} ptr idx val mem)
}
func rewriteValue386_Op386MULLconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [c] (MULLconst [d] x))
// cond:
// result: (MULLconst [int64(int32(c * d))] x)
}
func rewriteValue386_Op386MULLconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [9] x)
// cond:
// result: (LEAL8 x x)
}
func rewriteValue386_Op386MULLconst_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [73] x)
// cond:
// result: (LEAL8 x (LEAL8 <v.Type> x x))
}
func rewriteValue386_Op386MULLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (MULLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386MULLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (MULLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386MULSD_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULSD x l:(MOVSDload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (MULSDload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386MULSDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULSDload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (MULSDload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386MULSS_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULSS x l:(MOVSSload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (MULSSload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386MULSSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MULSSload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (MULSSload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ORL_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL x l:(MOVLloadidx4 [off] {sym} ptr idx mem))
// cond: canMergeLoadClobber(v, l, x) && clobber(l)
// result: (ORLloadidx4 x [off] {sym} ptr idx mem)
}
func rewriteValue386_Op386ORL_20(v *Value) bool {
b := v.Block
- _ = b
// match: (ORL x0:(MOVBloadidx1 [i0] {s} idx p mem) s0:(SHLLconst [8] x1:(MOVBloadidx1 [i1] {s} p idx mem)))
// cond: i1==i0+1 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0)
// result: @mergePoint(b,x0,x1) (MOVWloadidx1 <v.Type> [i0] {s} p idx mem)
}
func rewriteValue386_Op386ORL_30(v *Value) bool {
b := v.Block
- _ = b
// match: (ORL o0:(ORL x0:(MOVWloadidx1 [i0] {s} idx p mem) s0:(SHLLconst [16] x1:(MOVBloadidx1 [i2] {s} idx p mem))) s1:(SHLLconst [24] x2:(MOVBloadidx1 [i3] {s} p idx mem)))
// cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (MOVLloadidx1 <v.Type> [i0] {s} p idx mem)
}
func rewriteValue386_Op386ORL_40(v *Value) bool {
b := v.Block
- _ = b
// match: (ORL o0:(ORL s0:(SHLLconst [16] x1:(MOVBloadidx1 [i2] {s} idx p mem)) x0:(MOVWloadidx1 [i0] {s} p idx mem)) s1:(SHLLconst [24] x2:(MOVBloadidx1 [i3] {s} idx p mem)))
// cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (MOVLloadidx1 <v.Type> [i0] {s} p idx mem)
}
func rewriteValue386_Op386ORL_50(v *Value) bool {
b := v.Block
- _ = b
// match: (ORL s1:(SHLLconst [24] x2:(MOVBloadidx1 [i3] {s} idx p mem)) o0:(ORL x0:(MOVWloadidx1 [i0] {s} idx p mem) s0:(SHLLconst [16] x1:(MOVBloadidx1 [i2] {s} idx p mem))))
// cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (MOVLloadidx1 <v.Type> [i0] {s} p idx mem)
}
func rewriteValue386_Op386ORLconstmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLconstmodify [valoff1] {sym} (ADDLconst [off2] base) mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ORLconstmodify [ValAndOff(valoff1).add(off2)] {sym} base mem)
}
func rewriteValue386_Op386ORLconstmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLconstmodifyidx4 [valoff1] {sym} (ADDLconst [off2] base) idx mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (ORLconstmodifyidx4 [ValAndOff(valoff1).add(off2)] {sym} base idx mem)
}
func rewriteValue386_Op386ORLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ORLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386ORLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (ORLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386ORLmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLmodify [off1] {sym} (ADDLconst [off2] base) val mem)
// cond: is32Bit(off1+off2)
// result: (ORLmodify [off1+off2] {sym} base val mem)
}
func rewriteValue386_Op386ORLmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLmodifyidx4 [off1] {sym} (ADDLconst [off2] base) idx val mem)
// cond: is32Bit(off1+off2)
// result: (ORLmodifyidx4 [off1+off2] {sym} base idx val mem)
}
func rewriteValue386_Op386SUBL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBL x (MOVLconst [c]))
// cond:
// result: (SUBLconst x [c])
}
func rewriteValue386_Op386SUBLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386SUBLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (SUBLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386SUBLmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBLmodify [off1] {sym} (ADDLconst [off2] base) val mem)
// cond: is32Bit(off1+off2)
// result: (SUBLmodify [off1+off2] {sym} base val mem)
}
func rewriteValue386_Op386SUBLmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBLmodifyidx4 [off1] {sym} (ADDLconst [off2] base) idx val mem)
// cond: is32Bit(off1+off2)
// result: (SUBLmodifyidx4 [off1+off2] {sym} base idx val mem)
}
func rewriteValue386_Op386SUBSD_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBSD x l:(MOVSDload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (SUBSDload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386SUBSDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBSDload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBSDload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386SUBSS_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBSS x l:(MOVSSload [off] {sym} ptr mem))
// cond: canMergeLoadClobber(v, l, x) && !config.use387 && clobber(l)
// result: (SUBSSload x [off] {sym} ptr mem)
}
func rewriteValue386_Op386SUBSSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SUBSSload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBSSload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386XORLconstmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLconstmodify [valoff1] {sym} (ADDLconst [off2] base) mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (XORLconstmodify [ValAndOff(valoff1).add(off2)] {sym} base mem)
}
func rewriteValue386_Op386XORLconstmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLconstmodifyidx4 [valoff1] {sym} (ADDLconst [off2] base) idx mem)
// cond: ValAndOff(valoff1).canAdd(off2)
// result: (XORLconstmodifyidx4 [ValAndOff(valoff1).add(off2)] {sym} base idx mem)
}
func rewriteValue386_Op386XORLload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLload [off1] {sym} val (ADDLconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (XORLload [off1+off2] {sym} val base mem)
}
func rewriteValue386_Op386XORLloadidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLloadidx4 [off1] {sym} val (ADDLconst [off2] base) idx mem)
// cond: is32Bit(off1+off2)
// result: (XORLloadidx4 [off1+off2] {sym} val base idx mem)
}
func rewriteValue386_Op386XORLmodify_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLmodify [off1] {sym} (ADDLconst [off2] base) val mem)
// cond: is32Bit(off1+off2)
// result: (XORLmodify [off1+off2] {sym} base val mem)
}
func rewriteValue386_Op386XORLmodifyidx4_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLmodifyidx4 [off1] {sym} (ADDLconst [off2] base) idx val mem)
// cond: is32Bit(off1+off2)
// result: (XORLmodifyidx4 [off1+off2] {sym} base idx val mem)
}
func rewriteValue386_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (DIVW (SignExt8to16 x) (SignExt8to16 y))
}
func rewriteValue386_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (DIVWU (ZeroExt8to16 x) (ZeroExt8to16 y))
}
func rewriteValue386_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq16 x y)
// cond:
// result: (SETEQ (CMPW x y))
}
func rewriteValue386_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x y)
// cond:
// result: (SETEQ (CMPL x y))
}
func rewriteValue386_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (SETEQF (UCOMISS x y))
}
func rewriteValue386_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (SETEQF (UCOMISD x y))
}
func rewriteValue386_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq8 x y)
// cond:
// result: (SETEQ (CMPB x y))
}
func rewriteValue386_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqB x y)
// cond:
// result: (SETEQ (CMPB x y))
}
func rewriteValue386_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqPtr x y)
// cond:
// result: (SETEQ (CMPL x y))
}
func rewriteValue386_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq16 x y)
// cond:
// result: (SETGE (CMPW x y))
}
func rewriteValue386_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq16U x y)
// cond:
// result: (SETAE (CMPW x y))
}
func rewriteValue386_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32 x y)
// cond:
// result: (SETGE (CMPL x y))
}
func rewriteValue386_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (SETGEF (UCOMISS x y))
}
func rewriteValue386_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32U x y)
// cond:
// result: (SETAE (CMPL x y))
}
func rewriteValue386_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (SETGEF (UCOMISD x y))
}
func rewriteValue386_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq8 x y)
// cond:
// result: (SETGE (CMPB x y))
}
func rewriteValue386_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq8U x y)
// cond:
// result: (SETAE (CMPB x y))
}
func rewriteValue386_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater16 x y)
// cond:
// result: (SETG (CMPW x y))
}
func rewriteValue386_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater16U x y)
// cond:
// result: (SETA (CMPW x y))
}
func rewriteValue386_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32 x y)
// cond:
// result: (SETG (CMPL x y))
}
func rewriteValue386_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (SETGF (UCOMISS x y))
}
func rewriteValue386_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32U x y)
// cond:
// result: (SETA (CMPL x y))
}
func rewriteValue386_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (SETGF (UCOMISD x y))
}
func rewriteValue386_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater8 x y)
// cond:
// result: (SETG (CMPB x y))
}
func rewriteValue386_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater8U x y)
// cond:
// result: (SETA (CMPB x y))
}
func rewriteValue386_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsInBounds idx len)
// cond:
// result: (SETB (CMPL idx len))
}
func rewriteValue386_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsNonNil p)
// cond:
// result: (SETNE (TESTL p p))
}
func rewriteValue386_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsSliceInBounds idx len)
// cond:
// result: (SETBE (CMPL idx len))
}
func rewriteValue386_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq16 x y)
// cond:
// result: (SETLE (CMPW x y))
}
func rewriteValue386_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq16U x y)
// cond:
// result: (SETBE (CMPW x y))
}
func rewriteValue386_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32 x y)
// cond:
// result: (SETLE (CMPL x y))
}
func rewriteValue386_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (SETGEF (UCOMISS y x))
}
func rewriteValue386_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32U x y)
// cond:
// result: (SETBE (CMPL x y))
}
func rewriteValue386_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (SETGEF (UCOMISD y x))
}
func rewriteValue386_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq8 x y)
// cond:
// result: (SETLE (CMPB x y))
}
func rewriteValue386_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq8U x y)
// cond:
// result: (SETBE (CMPB x y))
}
func rewriteValue386_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less16 x y)
// cond:
// result: (SETL (CMPW x y))
}
func rewriteValue386_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less16U x y)
// cond:
// result: (SETB (CMPW x y))
}
func rewriteValue386_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32 x y)
// cond:
// result: (SETL (CMPL x y))
}
func rewriteValue386_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (SETGF (UCOMISS y x))
}
func rewriteValue386_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32U x y)
// cond:
// result: (SETB (CMPL x y))
}
func rewriteValue386_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (SETGF (UCOMISD y x))
}
func rewriteValue386_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less8 x y)
// cond:
// result: (SETL (CMPB x y))
}
func rewriteValue386_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less8U x y)
// cond:
// result: (SETB (CMPB x y))
}
func rewriteValue386_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x16 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValue386_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x32 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValue386_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x8 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValue386_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x16 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValue386_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x32 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValue386_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x8 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValue386_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x16 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValue386_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x32 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValue386_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x8 <t> x y)
// cond:
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValue386_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (MODW (SignExt8to16 x) (SignExt8to16 y))
}
func rewriteValue386_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (MODWU (ZeroExt8to16 x) (ZeroExt8to16 y))
}
func rewriteValue386_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValue386_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [s] dst src mem)
// cond: s > 8 && s <= 4*128 && s%4 == 0 && !config.noDuffDevice
// result: (DUFFCOPY [10*(128-s/4)] dst src mem)
}
func rewriteValue386_OpNeg32F_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg32F x)
// cond: !config.use387
// result: (PXOR x (MOVSSconst <typ.Float32> [auxFrom32F(float32(math.Copysign(0, -1)))]))
}
func rewriteValue386_OpNeg64F_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg64F x)
// cond: !config.use387
// result: (PXOR x (MOVSDconst <typ.Float64> [auxFrom64F(math.Copysign(0, -1))]))
}
func rewriteValue386_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq16 x y)
// cond:
// result: (SETNE (CMPW x y))
}
func rewriteValue386_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x y)
// cond:
// result: (SETNE (CMPL x y))
}
func rewriteValue386_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (SETNEF (UCOMISS x y))
}
func rewriteValue386_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (SETNEF (UCOMISD x y))
}
func rewriteValue386_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq8 x y)
// cond:
// result: (SETNE (CMPB x y))
}
func rewriteValue386_OpNeqB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqB x y)
// cond:
// result: (SETNE (CMPB x y))
}
func rewriteValue386_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqPtr x y)
// cond:
// result: (SETNE (CMPL x y))
}
func rewriteValue386_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPWconst y [16])))
}
func rewriteValue386_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPLconst y [16])))
}
func rewriteValue386_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPBconst y [16])))
}
func rewriteValue386_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x16 <t> x y)
// cond:
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [16])))))
}
func rewriteValue386_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x32 <t> x y)
// cond:
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [16])))))
}
func rewriteValue386_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x8 <t> x y)
// cond:
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [16])))))
}
func rewriteValue386_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValue386_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValue386_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValue386_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x16 <t> x y)
// cond:
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [32])))))
}
func rewriteValue386_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x32 <t> x y)
// cond:
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [32])))))
}
func rewriteValue386_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x8 <t> x y)
// cond:
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [32])))))
}
func rewriteValue386_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPWconst y [8])))
}
func rewriteValue386_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPLconst y [8])))
}
func rewriteValue386_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPBconst y [8])))
}
func rewriteValue386_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x16 <t> x y)
// cond:
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [8])))))
}
func rewriteValue386_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x32 <t> x y)
// cond:
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [8])))))
}
func rewriteValue386_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x8 <t> x y)
// cond:
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [8])))))
}
func rewriteValue386_OpSelect0_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select0 (Mul32uover x y))
// cond:
// result: (Select0 <typ.UInt32> (MULLU x y))
}
func rewriteValue386_OpSelect1_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select1 (Mul32uover x y))
// cond:
// result: (SETO (Select1 <types.TypeFlags> (MULLU x y)))
}
func rewriteValue386_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SARLconst (NEGL <t> x) [31])
}
func rewriteValue386_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValue386_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [12] destptr mem)
// cond:
// result: (MOVLstoreconst [makeValAndOff(0,8)] destptr (MOVLstoreconst [makeValAndOff(0,4)] destptr (MOVLstoreconst [0] destptr mem)))
}
func rewriteValue386_OpZeromask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Zeromask <t> x)
// cond:
// result: (XORLconst [-1] (SBBLcarrymask <t> (CMPLconst x [1])))
}
func rewriteValueAMD64_OpAMD64ADDLload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDLload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDLload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ADDQload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDQload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDQload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ADDSDload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDSDload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDSDload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ADDSSload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDSSload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ADDSSload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ANDL_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDL (NOTL (SHLL (MOVLconst [1]) y)) x)
// cond: !config.nacl
// result: (BTRL x y)
}
func rewriteValueAMD64_OpAMD64ANDLconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDLconst [c] x)
// cond: isUint32PowerOfTwo(^c) && uint64(^c) >= 128 && !config.nacl
// result: (BTRLconst [log2uint32(^c)] x)
}
func rewriteValueAMD64_OpAMD64ANDLload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ANDLload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ANDLload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ANDQ_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDQ (NOTQ (SHLQ (MOVQconst [1]) y)) x)
// cond: !config.nacl
// result: (BTRQ x y)
}
func rewriteValueAMD64_OpAMD64ANDQconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ANDQconst [c] x)
// cond: isUint64PowerOfTwo(^c) && uint64(^c) >= 128 && !config.nacl
// result: (BTRQconst [log2(^c)] x)
}
func rewriteValueAMD64_OpAMD64ANDQload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ANDQload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ANDQload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64BSFQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (BSFQ (ORQconst <t> [1<<8] (MOVBQZX x)))
// cond:
// result: (BSFQ (ORQconst <t> [1<<8] x))
}
func rewriteValueAMD64_OpAMD64CMPB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPB x (MOVLconst [c]))
// cond:
// result: (CMPBconst x [int64(int8(c))])
}
func rewriteValueAMD64_OpAMD64CMPBconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPBconst (MOVLconst [x]) [y])
// cond: int8(x)==int8(y)
// result: (FlagEQ)
}
func rewriteValueAMD64_OpAMD64CMPL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPL x (MOVLconst [c]))
// cond:
// result: (CMPLconst x [c])
}
func rewriteValueAMD64_OpAMD64CMPLconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPLconst l:(MOVLload {sym} [off] ptr mem) [c])
// cond: l.Uses == 1 && validValAndOff(c, off) && clobber(l)
// result: @l.Block (CMPLconstload {sym} [makeValAndOff(c,off)] ptr mem)
}
func rewriteValueAMD64_OpAMD64CMPQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPQ x (MOVQconst [c]))
// cond: is32Bit(c)
// result: (CMPQconst x [c])
}
func rewriteValueAMD64_OpAMD64CMPQconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPQconst (SHRQconst _ [c]) [n])
// cond: 0 <= n && 0 < c && c <= 64 && (1<<uint64(64-c)) <= uint64(n)
// result: (FlagLT_ULT)
}
func rewriteValueAMD64_OpAMD64CMPW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPW x (MOVLconst [c]))
// cond:
// result: (CMPWconst x [int64(int16(c))])
}
func rewriteValueAMD64_OpAMD64CMPWconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPWconst (MOVLconst [x]) [y])
// cond: int16(x)==int16(y)
// result: (FlagEQ)
}
func rewriteValueAMD64_OpAMD64MOVBQSX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBQSX x:(MOVBload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBQSXload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVBQZX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBQZX x:(MOVBload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVBstore_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstore [off] {sym} ptr (MOVBQSX x) mem)
// cond:
// result: (MOVBstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64MOVBstore_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBstore [i] {s} p w x6:(MOVBstore [i-1] {s} p (SHRQconst [8] w) x5:(MOVBstore [i-2] {s} p (SHRQconst [16] w) x4:(MOVBstore [i-3] {s} p (SHRQconst [24] w) x3:(MOVBstore [i-4] {s} p (SHRQconst [32] w) x2:(MOVBstore [i-5] {s} p (SHRQconst [40] w) x1:(MOVBstore [i-6] {s} p (SHRQconst [48] w) x0:(MOVBstore [i-7] {s} p (SHRQconst [56] w) mem))))))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)
// result: (MOVQstore [i-7] {s} p (BSWAPQ <w.Type> w) mem)
}
func rewriteValueAMD64_OpAMD64MOVBstoreidx1_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstoreidx1 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond: is32Bit(c+d)
// result: (MOVBstoreidx1 [c+d] {sym} ptr idx val mem)
}
func rewriteValueAMD64_OpAMD64MOVLQSX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVLQSX x:(MOVLload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVLQSXload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVLQZX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVLQZX x:(MOVLload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVLload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVLf2i_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVLf2i <t> (Arg <u> [off] {sym}))
// cond: t.Size() == u.Size()
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueAMD64_OpAMD64MOVLi2f_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVLi2f <t> (Arg <u> [off] {sym}))
// cond: t.Size() == u.Size()
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueAMD64_OpAMD64MOVLload_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVLload [off] {sym} (SB) _)
// cond: symIsRO(sym)
// result: (MOVQconst [int64(read32(sym, off, config.BigEndian))])
}
func rewriteValueAMD64_OpAMD64MOVLstore_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVLstore [i] {s} p (SHRQconst [32] w) x:(MOVLstore [i-4] {s} p w mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVQstore [i-4] {s} p w mem)
}
func rewriteValueAMD64_OpAMD64MOVLstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
// result: (MOVLstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVLstoreconstidx1_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVLstoreconstidx1 [c] {sym} ptr (SHLQconst [2] idx) mem)
// cond:
// result: (MOVLstoreconstidx4 [c] {sym} ptr idx mem)
}
func rewriteValueAMD64_OpAMD64MOVLstoreconstidx4_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVLstoreconstidx4 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond: ValAndOff(x).canAdd(c)
// result: (MOVLstoreconstidx4 [ValAndOff(x).add(c)] {sym} ptr idx mem)
}
func rewriteValueAMD64_OpAMD64MOVLstoreidx4_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVLstoreidx4 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond: is32Bit(c+d)
// result: (MOVLstoreidx4 [c+d] {sym} ptr idx val mem)
}
func rewriteValueAMD64_OpAMD64MOVQf2i_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVQf2i <t> (Arg <u> [off] {sym}))
// cond: t.Size() == u.Size()
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueAMD64_OpAMD64MOVQi2f_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVQi2f <t> (Arg <u> [off] {sym}))
// cond: t.Size() == u.Size()
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueAMD64_OpAMD64MOVQload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVQload [off] {sym} ptr (MOVQstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: x
}
func rewriteValueAMD64_OpAMD64MOVQstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVQstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
// result: (MOVQstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVWQSX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWQSX x:(MOVWload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVWQSXload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVWQZX_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWQZX x:(MOVWload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVWload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueAMD64_OpAMD64MOVWload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _))
// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
// result: (MOVWQZX x)
}
func rewriteValueAMD64_OpAMD64MOVWstore_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVWstore [i] {s} p (SHRQconst [16] w) x:(MOVWstore [i-2] {s} p w mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVLstore [i-2] {s} p w mem)
}
func rewriteValueAMD64_OpAMD64MOVWstoreconstidx2_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWstoreconstidx2 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond: ValAndOff(x).canAdd(c)
// result: (MOVWstoreconstidx2 [ValAndOff(x).add(c)] {sym} ptr idx mem)
}
func rewriteValueAMD64_OpAMD64MOVWstoreidx2_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWstoreidx2 [c] {sym} (ADDQconst [d] ptr) idx val mem)
// cond: is32Bit(c+d)
// result: (MOVWstoreidx2 [c+d] {sym} ptr idx val mem)
}
func rewriteValueAMD64_OpAMD64MULLconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [c] (MULLconst [d] x))
// cond:
// result: (MULLconst [int64(int32(c * d))] x)
}
func rewriteValueAMD64_OpAMD64MULLconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [ 9] x)
// cond:
// result: (LEAL8 x x)
}
func rewriteValueAMD64_OpAMD64MULLconst_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLconst [73] x)
// cond:
// result: (LEAL8 x (LEAL8 <v.Type> x x))
}
func rewriteValueAMD64_OpAMD64MULQconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULQconst [c] (MULQconst [d] x))
// cond: is32Bit(c*d)
// result: (MULQconst [c * d] x)
}
func rewriteValueAMD64_OpAMD64MULQconst_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULQconst [ 9] x)
// cond:
// result: (LEAQ8 x x)
}
func rewriteValueAMD64_OpAMD64MULQconst_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MULQconst [73] x)
// cond:
// result: (LEAQ8 x (LEAQ8 <v.Type> x x))
}
func rewriteValueAMD64_OpAMD64MULSDload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MULSDload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (MULSDload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64MULSSload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MULSSload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (MULSSload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ORL_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORL (SHLL (MOVLconst [1]) y) x)
// cond: !config.nacl
// result: (BTSL x y)
}
func rewriteValueAMD64_OpAMD64ORL_50(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL (SHRB x (ANDLconst y [ 7])) (SHLL x (NEGL (ADDLconst (ANDLconst y [ 7]) [ -8]))))
// cond: v.Type.Size() == 1
// result: (RORB x y)
}
func rewriteValueAMD64_OpAMD64ORL_60(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL or:(ORL y s0:(SHLLconst [j0] x0:(MOVBload [i0] {s} p mem))) s1:(SHLLconst [j1] x1:(MOVBload [i1] {s} p mem)))
// cond: i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j0] (MOVWload [i0] {s} p mem)) y)
}
func rewriteValueAMD64_OpAMD64ORL_70(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL x0:(MOVWloadidx1 [i0] {s} idx p mem) sh:(SHLLconst [16] x1:(MOVWloadidx1 [i1] {s} p idx mem)))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVLloadidx1 [i0] {s} p idx mem)
}
func rewriteValueAMD64_OpAMD64ORL_80(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL s1:(SHLLconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem)) or:(ORL s0:(SHLLconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem)) y))
// cond: i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j0] (MOVWloadidx1 [i0] {s} p idx mem)) y)
}
func rewriteValueAMD64_OpAMD64ORL_90(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL or:(ORL s0:(SHLLconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem)) y) s1:(SHLLconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem)))
// cond: i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j0] (MOVWloadidx1 [i0] {s} p idx mem)) y)
}
func rewriteValueAMD64_OpAMD64ORL_100(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL or:(ORL y s1:(SHLLconst [j1] x1:(MOVBload [i1] {s} p mem))) s0:(SHLLconst [j0] x0:(MOVBload [i0] {s} p mem)))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j1] (ROLWconst <typ.UInt16> [8] (MOVWload [i0] {s} p mem))) y)
}
func rewriteValueAMD64_OpAMD64ORL_110(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL r1:(ROLWconst [8] x1:(MOVWloadidx1 [i1] {s} idx p mem)) sh:(SHLLconst [16] r0:(ROLWconst [8] x0:(MOVWloadidx1 [i0] {s} p idx mem))))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (BSWAPL <v.Type> (MOVLloadidx1 [i0] {s} p idx mem))
}
func rewriteValueAMD64_OpAMD64ORL_120(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL s0:(SHLLconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem)) or:(ORL s1:(SHLLconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem)) y))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j1] (ROLWconst <typ.UInt16> [8] (MOVWloadidx1 [i0] {s} p idx mem))) y)
}
func rewriteValueAMD64_OpAMD64ORL_130(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORL or:(ORL s1:(SHLLconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem)) y) s0:(SHLLconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem)))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORL <v.Type> (SHLLconst <v.Type> [j1] (ROLWconst <typ.UInt16> [8] (MOVWloadidx1 [i0] {s} p idx mem))) y)
}
func rewriteValueAMD64_OpAMD64ORLconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORLconst [c] x)
// cond: isUint32PowerOfTwo(c) && uint64(c) >= 128 && !config.nacl
// result: (BTSLconst [log2uint32(c)] x)
}
func rewriteValueAMD64_OpAMD64ORLload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORLload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ORLload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64ORQ_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORQ (SHLQ (MOVQconst [1]) y) x)
// cond: !config.nacl
// result: (BTSQ x y)
}
func rewriteValueAMD64_OpAMD64ORQ_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ (SHRQ x y) (ANDQ (SHLQ x (NEGL y)) (SBBQcarrymask (CMPLconst (NEGL (ADDLconst (ANDLconst y [63]) [-64])) [64]))))
// cond:
// result: (RORQ x y)
}
func rewriteValueAMD64_OpAMD64ORQ_30(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ sh:(SHLQconst [32] x1:(MOVLload [i1] {s} p mem)) x0:(MOVLload [i0] {s} p mem))
// cond: i1 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVQload [i0] {s} p mem)
}
func rewriteValueAMD64_OpAMD64ORQ_40(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ x0:(MOVBloadidx1 [i0] {s} idx p mem) sh:(SHLQconst [8] x1:(MOVBloadidx1 [i1] {s} p idx mem)))
// cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWloadidx1 <v.Type> [i0] {s} p idx mem)
}
func rewriteValueAMD64_OpAMD64ORQ_50(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ x0:(MOVWloadidx1 [i0] {s} idx p mem) sh:(SHLQconst [16] x1:(MOVWloadidx1 [i1] {s} idx p mem)))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVLloadidx1 [i0] {s} p idx mem)
}
func rewriteValueAMD64_OpAMD64ORQ_60(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ sh:(SHLQconst [32] x1:(MOVLloadidx1 [i1] {s} idx p mem)) x0:(MOVLloadidx1 [i0] {s} p idx mem))
// cond: i1 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVQloadidx1 [i0] {s} p idx mem)
}
func rewriteValueAMD64_OpAMD64ORQ_70(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ s1:(SHLQconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem)) or:(ORQ y s0:(SHLQconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem))))
// cond: i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j0] (MOVWloadidx1 [i0] {s} p idx mem)) y)
}
func rewriteValueAMD64_OpAMD64ORQ_80(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ s1:(SHLQconst [j1] x1:(MOVWloadidx1 [i1] {s} idx p mem)) or:(ORQ s0:(SHLQconst [j0] x0:(MOVWloadidx1 [i0] {s} p idx mem)) y))
// cond: i1 == i0+2 && j1 == j0+16 && j0 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j0] (MOVLloadidx1 [i0] {s} p idx mem)) y)
}
func rewriteValueAMD64_OpAMD64ORQ_90(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ or:(ORQ y s0:(SHLQconst [j0] x0:(MOVWloadidx1 [i0] {s} idx p mem))) s1:(SHLQconst [j1] x1:(MOVWloadidx1 [i1] {s} p idx mem)))
// cond: i1 == i0+2 && j1 == j0+16 && j0 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j0] (MOVLloadidx1 [i0] {s} p idx mem)) y)
}
func rewriteValueAMD64_OpAMD64ORQ_100(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ sh:(SHLQconst [32] r0:(BSWAPL x0:(MOVLload [i0] {s} p mem))) r1:(BSWAPL x1:(MOVLload [i1] {s} p mem)))
// cond: i1 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (BSWAPQ <v.Type> (MOVQload [i0] {s} p mem))
}
func rewriteValueAMD64_OpAMD64ORQ_110(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ x1:(MOVBloadidx1 [i1] {s} idx p mem) sh:(SHLQconst [8] x0:(MOVBloadidx1 [i0] {s} p idx mem)))
// cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (ROLWconst <v.Type> [8] (MOVWloadidx1 [i0] {s} p idx mem))
}
func rewriteValueAMD64_OpAMD64ORQ_120(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ r1:(ROLWconst [8] x1:(MOVWloadidx1 [i1] {s} idx p mem)) sh:(SHLQconst [16] r0:(ROLWconst [8] x0:(MOVWloadidx1 [i0] {s} idx p mem))))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (BSWAPL <v.Type> (MOVLloadidx1 [i0] {s} p idx mem))
}
func rewriteValueAMD64_OpAMD64ORQ_130(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ sh:(SHLQconst [32] r0:(BSWAPL x0:(MOVLloadidx1 [i0] {s} idx p mem))) r1:(BSWAPL x1:(MOVLloadidx1 [i1] {s} p idx mem)))
// cond: i1 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (BSWAPQ <v.Type> (MOVQloadidx1 [i0] {s} p idx mem))
}
func rewriteValueAMD64_OpAMD64ORQ_140(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ s0:(SHLQconst [j0] x0:(MOVBloadidx1 [i0] {s} idx p mem)) or:(ORQ y s1:(SHLQconst [j1] x1:(MOVBloadidx1 [i1] {s} idx p mem))))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j1] (ROLWconst <typ.UInt16> [8] (MOVWloadidx1 [i0] {s} p idx mem))) y)
}
func rewriteValueAMD64_OpAMD64ORQ_150(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ s0:(SHLQconst [j0] r0:(ROLWconst [8] x0:(MOVWloadidx1 [i0] {s} idx p mem))) or:(ORQ s1:(SHLQconst [j1] r1:(ROLWconst [8] x1:(MOVWloadidx1 [i1] {s} p idx mem))) y))
// cond: i1 == i0+2 && j1 == j0-16 && j1 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j1] (BSWAPL <typ.UInt32> (MOVLloadidx1 [i0] {s} p idx mem))) y)
}
func rewriteValueAMD64_OpAMD64ORQ_160(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQ or:(ORQ y s1:(SHLQconst [j1] r1:(ROLWconst [8] x1:(MOVWloadidx1 [i1] {s} idx p mem)))) s0:(SHLQconst [j0] r0:(ROLWconst [8] x0:(MOVWloadidx1 [i0] {s} p idx mem))))
// cond: i1 == i0+2 && j1 == j0-16 && j1 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORQ <v.Type> (SHLQconst <v.Type> [j1] (BSWAPL <typ.UInt32> (MOVLloadidx1 [i0] {s} p idx mem))) y)
}
func rewriteValueAMD64_OpAMD64ORQconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ORQconst [c] x)
// cond: isUint64PowerOfTwo(c) && uint64(c) >= 128 && !config.nacl
// result: (BTSQconst [log2(c)] x)
}
func rewriteValueAMD64_OpAMD64ORQload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORQload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (ORQload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64SARL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SARL x (MOVQconst [c]))
// cond:
// result: (SARLconst [c&31] x)
}
func rewriteValueAMD64_OpAMD64SARQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SARQ x (MOVQconst [c]))
// cond:
// result: (SARQconst [c&63] x)
}
func rewriteValueAMD64_OpAMD64SETAEstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETAEstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETBEstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETAstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETAstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETBstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETBEstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETBEstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETAEstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETBstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETBstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETAstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETEQ_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQ (TESTL (SHLL (MOVLconst [1]) x) y))
// cond: !config.nacl
// result: (SETAE (BTL x y))
}
func rewriteValueAMD64_OpAMD64SETEQ_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQ (TESTQ z1:(SHLQconst [63] (SHRQconst [63] x)) z2))
// cond: z1==z2 && !config.nacl
// result: (SETAE (BTQconst [63] x))
}
func rewriteValueAMD64_OpAMD64SETEQ_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQ (TESTL z1:(SHRLconst [31] x) z2))
// cond: z1==z2 && !config.nacl
// result: (SETAE (BTLconst [31] x))
}
func rewriteValueAMD64_OpAMD64SETEQstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQstore [off] {sym} ptr (TESTL (SHLL (MOVLconst [1]) x) y) mem)
// cond: !config.nacl
// result: (SETAEstore [off] {sym} ptr (BTL x y) mem)
}
func rewriteValueAMD64_OpAMD64SETEQstore_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQstore [off] {sym} ptr (TESTQ z1:(SHLQconst [63] (SHRQconst [63] x)) z2) mem)
// cond: z1==z2 && !config.nacl
// result: (SETAEstore [off] {sym} ptr (BTQconst [63] x) mem)
}
func rewriteValueAMD64_OpAMD64SETEQstore_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETEQstore [off] {sym} ptr (TESTL z1:(SHRLconst [31] x) z2) mem)
// cond: z1==z2 && !config.nacl
// result: (SETAEstore [off] {sym} ptr (BTLconst [31] x) mem)
}
func rewriteValueAMD64_OpAMD64SETGEstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETGEstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETLEstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETGstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETGstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETLstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETLEstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETLEstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETGEstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETLstore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SETLstore [off] {sym} ptr (InvertFlags x) mem)
// cond:
// result: (SETGstore [off] {sym} ptr x mem)
}
func rewriteValueAMD64_OpAMD64SETNE_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNE (TESTL (SHLL (MOVLconst [1]) x) y))
// cond: !config.nacl
// result: (SETB (BTL x y))
}
func rewriteValueAMD64_OpAMD64SETNE_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNE (TESTQ z1:(SHLQconst [63] (SHRQconst [63] x)) z2))
// cond: z1==z2 && !config.nacl
// result: (SETB (BTQconst [63] x))
}
func rewriteValueAMD64_OpAMD64SETNE_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNE (TESTL z1:(SHRLconst [31] x) z2))
// cond: z1==z2 && !config.nacl
// result: (SETB (BTLconst [31] x))
}
func rewriteValueAMD64_OpAMD64SETNEstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNEstore [off] {sym} ptr (TESTL (SHLL (MOVLconst [1]) x) y) mem)
// cond: !config.nacl
// result: (SETBstore [off] {sym} ptr (BTL x y) mem)
}
func rewriteValueAMD64_OpAMD64SETNEstore_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNEstore [off] {sym} ptr (TESTQ z1:(SHLQconst [63] (SHRQconst [63] x)) z2) mem)
// cond: z1==z2 && !config.nacl
// result: (SETBstore [off] {sym} ptr (BTQconst [63] x) mem)
}
func rewriteValueAMD64_OpAMD64SETNEstore_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SETNEstore [off] {sym} ptr (TESTL z1:(SHRLconst [31] x) z2) mem)
// cond: z1==z2 && !config.nacl
// result: (SETBstore [off] {sym} ptr (BTLconst [31] x) mem)
}
func rewriteValueAMD64_OpAMD64SHLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SHLL x (MOVQconst [c]))
// cond:
// result: (SHLLconst [c&31] x)
}
func rewriteValueAMD64_OpAMD64SHLLconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SHLLconst [1] (SHRLconst [1] x))
// cond: !config.nacl
// result: (BTRLconst [0] x)
}
func rewriteValueAMD64_OpAMD64SHLQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SHLQ x (MOVQconst [c]))
// cond:
// result: (SHLQconst [c&63] x)
}
func rewriteValueAMD64_OpAMD64SHLQconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SHLQconst [1] (SHRQconst [1] x))
// cond: !config.nacl
// result: (BTRQconst [0] x)
}
func rewriteValueAMD64_OpAMD64SHRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SHRL x (MOVQconst [c]))
// cond:
// result: (SHRLconst [c&31] x)
}
func rewriteValueAMD64_OpAMD64SHRLconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SHRLconst [1] (SHLLconst [1] x))
// cond: !config.nacl
// result: (BTRLconst [31] x)
}
func rewriteValueAMD64_OpAMD64SHRQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SHRQ x (MOVQconst [c]))
// cond:
// result: (SHRQconst [c&63] x)
}
func rewriteValueAMD64_OpAMD64SHRQconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SHRQconst [1] (SHLQconst [1] x))
// cond: !config.nacl
// result: (BTRQconst [63] x)
}
func rewriteValueAMD64_OpAMD64SUBL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBL x (MOVLconst [c]))
// cond:
// result: (SUBLconst x [c])
}
func rewriteValueAMD64_OpAMD64SUBLload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SUBLload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBLload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64SUBQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBQ x (MOVQconst [c]))
// cond: is32Bit(c)
// result: (SUBQconst x [c])
}
func rewriteValueAMD64_OpAMD64SUBQload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SUBQload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBQload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64SUBSDload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SUBSDload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBSDload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64SUBSSload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SUBSSload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (SUBSSload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64TESTB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TESTB (MOVLconst [c]) x)
// cond:
// result: (TESTBconst [c] x)
}
func rewriteValueAMD64_OpAMD64TESTL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TESTL (MOVLconst [c]) x)
// cond:
// result: (TESTLconst [c] x)
}
func rewriteValueAMD64_OpAMD64TESTQ_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TESTQ (MOVQconst [c]) x)
// cond: is32Bit(c)
// result: (TESTQconst [c] x)
}
func rewriteValueAMD64_OpAMD64TESTW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TESTW (MOVLconst [c]) x)
// cond:
// result: (TESTWconst [c] x)
}
func rewriteValueAMD64_OpAMD64XORL_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORL (SHLL (MOVLconst [1]) y) x)
// cond: !config.nacl
// result: (BTCL x y)
}
func rewriteValueAMD64_OpAMD64XORLconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORLconst [c] x)
// cond: isUint32PowerOfTwo(c) && uint64(c) >= 128 && !config.nacl
// result: (BTCLconst [log2uint32(c)] x)
}
func rewriteValueAMD64_OpAMD64XORLload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XORLload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (XORLload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAMD64XORQ_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORQ (SHLQ (MOVQconst [1]) y) x)
// cond: !config.nacl
// result: (BTCQ x y)
}
func rewriteValueAMD64_OpAMD64XORQconst_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (XORQconst [c] x)
// cond: isUint64PowerOfTwo(c) && uint64(c) >= 128 && !config.nacl
// result: (BTCQconst [log2(c)] x)
}
func rewriteValueAMD64_OpAMD64XORQload_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XORQload [off1] {sym} val (ADDQconst [off2] base) mem)
// cond: is32Bit(off1+off2)
// result: (XORQload [off1+off2] {sym} val base mem)
}
func rewriteValueAMD64_OpAddPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (AddPtr x y)
// cond: config.PtrSize == 8
// result: (ADDQ x y)
}
func rewriteValueAMD64_OpAddr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Addr {sym} base)
// cond: config.PtrSize == 8
// result: (LEAQ {sym} base)
}
func rewriteValueAMD64_OpAtomicAdd32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAdd32 ptr val mem)
// cond:
// result: (AddTupleFirst32 val (XADDLlock val ptr mem))
}
func rewriteValueAMD64_OpAtomicAdd64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAdd64 ptr val mem)
// cond:
// result: (AddTupleFirst64 val (XADDQlock val ptr mem))
}
func rewriteValueAMD64_OpAtomicLoadPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (AtomicLoadPtr ptr mem)
// cond: config.PtrSize == 8
// result: (MOVQatomicload ptr mem)
}
func rewriteValueAMD64_OpAtomicStore32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicStore32 ptr val mem)
// cond:
// result: (Select1 (XCHGL <types.NewTuple(typ.UInt32,types.TypeMem)> val ptr mem))
}
func rewriteValueAMD64_OpAtomicStore64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicStore64 ptr val mem)
// cond:
// result: (Select1 (XCHGQ <types.NewTuple(typ.UInt64,types.TypeMem)> val ptr mem))
}
func rewriteValueAMD64_OpAtomicStorePtrNoWB_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicStorePtrNoWB ptr val mem)
// cond: config.PtrSize == 8
// result: (Select1 (XCHGQ <types.NewTuple(typ.BytePtr,types.TypeMem)> val ptr mem))
}
func rewriteValueAMD64_OpBitLen16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen16 x)
// cond:
// result: (BSRL (LEAL1 <typ.UInt32> [1] (MOVWQZX <typ.UInt32> x) (MOVWQZX <typ.UInt32> x)))
}
func rewriteValueAMD64_OpBitLen32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen32 x)
// cond:
// result: (Select0 (BSRQ (LEAQ1 <typ.UInt64> [1] (MOVLQZX <typ.UInt64> x) (MOVLQZX <typ.UInt64> x))))
}
func rewriteValueAMD64_OpBitLen64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen64 <t> x)
// cond:
// result: (ADDQconst [1] (CMOVQEQ <t> (Select0 <t> (BSRQ x)) (MOVQconst <t> [-1]) (Select1 <types.TypeFlags> (BSRQ x))))
}
func rewriteValueAMD64_OpBitLen8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen8 x)
// cond:
// result: (BSRL (LEAL1 <typ.UInt32> [1] (MOVBQZX <typ.UInt32> x) (MOVBQZX <typ.UInt32> x)))
}
func rewriteValueAMD64_OpCondSelect_40(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (CondSelect <t> x y (SETGF cond))
// cond: is16BitInt(t)
// result: (CMOVWGTF y x cond)
}
func rewriteValueAMD64_OpConstNil_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (ConstNil)
// cond: config.PtrSize == 8
// result: (MOVQconst [0])
}
func rewriteValueAMD64_OpCtz16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz16 x)
// cond:
// result: (BSFL (BTSLconst <typ.UInt32> [16] x))
}
func rewriteValueAMD64_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz32 x)
// cond:
// result: (Select0 (BSFQ (BTSQconst <typ.UInt64> [32] x)))
}
func rewriteValueAMD64_OpCtz64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz64 <t> x)
// cond:
// result: (CMOVQEQ (Select0 <t> (BSFQ x)) (MOVQconst <t> [64]) (Select1 <types.TypeFlags> (BSFQ x)))
}
func rewriteValueAMD64_OpCtz64NonZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz64NonZero x)
// cond:
// result: (Select0 (BSFQ x))
}
func rewriteValueAMD64_OpCtz8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz8 x)
// cond:
// result: (BSFL (BTSLconst <typ.UInt32> [ 8] x))
}
func rewriteValueAMD64_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 [a] x y)
// cond:
// result: (Select0 (DIVW [a] x y))
}
func rewriteValueAMD64_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (Select0 (DIVWU x y))
}
func rewriteValueAMD64_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 [a] x y)
// cond:
// result: (Select0 (DIVL [a] x y))
}
func rewriteValueAMD64_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (Select0 (DIVLU x y))
}
func rewriteValueAMD64_OpDiv64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64 [a] x y)
// cond:
// result: (Select0 (DIVQ [a] x y))
}
func rewriteValueAMD64_OpDiv64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64u x y)
// cond:
// result: (Select0 (DIVQU x y))
}
func rewriteValueAMD64_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (Select0 (DIVW (SignExt8to16 x) (SignExt8to16 y)))
}
func rewriteValueAMD64_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (Select0 (DIVWU (ZeroExt8to16 x) (ZeroExt8to16 y)))
}
func rewriteValueAMD64_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq16 x y)
// cond:
// result: (SETEQ (CMPW x y))
}
func rewriteValueAMD64_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x y)
// cond:
// result: (SETEQ (CMPL x y))
}
func rewriteValueAMD64_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (SETEQF (UCOMISS x y))
}
func rewriteValueAMD64_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64 x y)
// cond:
// result: (SETEQ (CMPQ x y))
}
func rewriteValueAMD64_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (SETEQF (UCOMISD x y))
}
func rewriteValueAMD64_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq8 x y)
// cond:
// result: (SETEQ (CMPB x y))
}
func rewriteValueAMD64_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqB x y)
// cond:
// result: (SETEQ (CMPB x y))
}
func rewriteValueAMD64_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (EqPtr x y)
// cond: config.PtrSize == 8
// result: (SETEQ (CMPQ x y))
}
func rewriteValueAMD64_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq16 x y)
// cond:
// result: (SETGE (CMPW x y))
}
func rewriteValueAMD64_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq16U x y)
// cond:
// result: (SETAE (CMPW x y))
}
func rewriteValueAMD64_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32 x y)
// cond:
// result: (SETGE (CMPL x y))
}
func rewriteValueAMD64_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (SETGEF (UCOMISS x y))
}
func rewriteValueAMD64_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32U x y)
// cond:
// result: (SETAE (CMPL x y))
}
func rewriteValueAMD64_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64 x y)
// cond:
// result: (SETGE (CMPQ x y))
}
func rewriteValueAMD64_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (SETGEF (UCOMISD x y))
}
func rewriteValueAMD64_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64U x y)
// cond:
// result: (SETAE (CMPQ x y))
}
func rewriteValueAMD64_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq8 x y)
// cond:
// result: (SETGE (CMPB x y))
}
func rewriteValueAMD64_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq8U x y)
// cond:
// result: (SETAE (CMPB x y))
}
func rewriteValueAMD64_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater16 x y)
// cond:
// result: (SETG (CMPW x y))
}
func rewriteValueAMD64_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater16U x y)
// cond:
// result: (SETA (CMPW x y))
}
func rewriteValueAMD64_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32 x y)
// cond:
// result: (SETG (CMPL x y))
}
func rewriteValueAMD64_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (SETGF (UCOMISS x y))
}
func rewriteValueAMD64_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32U x y)
// cond:
// result: (SETA (CMPL x y))
}
func rewriteValueAMD64_OpGreater64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64 x y)
// cond:
// result: (SETG (CMPQ x y))
}
func rewriteValueAMD64_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (SETGF (UCOMISD x y))
}
func rewriteValueAMD64_OpGreater64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64U x y)
// cond:
// result: (SETA (CMPQ x y))
}
func rewriteValueAMD64_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater8 x y)
// cond:
// result: (SETG (CMPB x y))
}
func rewriteValueAMD64_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater8U x y)
// cond:
// result: (SETA (CMPB x y))
}
func rewriteValueAMD64_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (IsInBounds idx len)
// cond: config.PtrSize == 8
// result: (SETB (CMPQ idx len))
}
func rewriteValueAMD64_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (IsNonNil p)
// cond: config.PtrSize == 8
// result: (SETNE (TESTQ p p))
}
func rewriteValueAMD64_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (IsSliceInBounds idx len)
// cond: config.PtrSize == 8
// result: (SETBE (CMPQ idx len))
}
func rewriteValueAMD64_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq16 x y)
// cond:
// result: (SETLE (CMPW x y))
}
func rewriteValueAMD64_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq16U x y)
// cond:
// result: (SETBE (CMPW x y))
}
func rewriteValueAMD64_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32 x y)
// cond:
// result: (SETLE (CMPL x y))
}
func rewriteValueAMD64_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (SETGEF (UCOMISS y x))
}
func rewriteValueAMD64_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32U x y)
// cond:
// result: (SETBE (CMPL x y))
}
func rewriteValueAMD64_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64 x y)
// cond:
// result: (SETLE (CMPQ x y))
}
func rewriteValueAMD64_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (SETGEF (UCOMISD y x))
}
func rewriteValueAMD64_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64U x y)
// cond:
// result: (SETBE (CMPQ x y))
}
func rewriteValueAMD64_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq8 x y)
// cond:
// result: (SETLE (CMPB x y))
}
func rewriteValueAMD64_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq8U x y)
// cond:
// result: (SETBE (CMPB x y))
}
func rewriteValueAMD64_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less16 x y)
// cond:
// result: (SETL (CMPW x y))
}
func rewriteValueAMD64_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less16U x y)
// cond:
// result: (SETB (CMPW x y))
}
func rewriteValueAMD64_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32 x y)
// cond:
// result: (SETL (CMPL x y))
}
func rewriteValueAMD64_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (SETGF (UCOMISS y x))
}
func rewriteValueAMD64_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32U x y)
// cond:
// result: (SETB (CMPL x y))
}
func rewriteValueAMD64_OpLess64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64 x y)
// cond:
// result: (SETL (CMPQ x y))
}
func rewriteValueAMD64_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (SETGF (UCOMISD y x))
}
func rewriteValueAMD64_OpLess64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64U x y)
// cond:
// result: (SETB (CMPQ x y))
}
func rewriteValueAMD64_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less8 x y)
// cond:
// result: (SETL (CMPB x y))
}
func rewriteValueAMD64_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less8U x y)
// cond:
// result: (SETB (CMPB x y))
}
func rewriteValueAMD64_OpLoad_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Load <t> ptr mem)
// cond: (is64BitInt(t) || isPtr(t) && config.PtrSize == 8)
// result: (MOVQload ptr mem)
}
func rewriteValueAMD64_OpLocalAddr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (LocalAddr {sym} base _)
// cond: config.PtrSize == 8
// result: (LEAQ {sym} base)
}
func rewriteValueAMD64_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValueAMD64_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValueAMD64_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPQconst y [32])))
}
func rewriteValueAMD64_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValueAMD64_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValueAMD64_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValueAMD64_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPQconst y [32])))
}
func rewriteValueAMD64_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValueAMD64_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHLQ <t> x y) (SBBQcarrymask <t> (CMPWconst y [64])))
}
func rewriteValueAMD64_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHLQ <t> x y) (SBBQcarrymask <t> (CMPLconst y [64])))
}
func rewriteValueAMD64_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHLQ <t> x y) (SBBQcarrymask <t> (CMPQconst y [64])))
}
func rewriteValueAMD64_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHLQ <t> x y) (SBBQcarrymask <t> (CMPBconst y [64])))
}
func rewriteValueAMD64_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValueAMD64_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValueAMD64_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPQconst y [32])))
}
func rewriteValueAMD64_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHLL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValueAMD64_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 [a] x y)
// cond:
// result: (Select1 (DIVW [a] x y))
}
func rewriteValueAMD64_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (Select1 (DIVWU x y))
}
func rewriteValueAMD64_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 [a] x y)
// cond:
// result: (Select1 (DIVL [a] x y))
}
func rewriteValueAMD64_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (Select1 (DIVLU x y))
}
func rewriteValueAMD64_OpMod64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64 [a] x y)
// cond:
// result: (Select1 (DIVQ [a] x y))
}
func rewriteValueAMD64_OpMod64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64u x y)
// cond:
// result: (Select1 (DIVQU x y))
}
func rewriteValueAMD64_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (Select1 (DIVW (SignExt8to16 x) (SignExt8to16 y)))
}
func rewriteValueAMD64_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (Select1 (DIVWU (ZeroExt8to16 x) (ZeroExt8to16 y)))
}
func rewriteValueAMD64_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueAMD64_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [3] dst src mem)
// cond:
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVWstore dst (MOVWload src mem) mem))
}
func rewriteValueAMD64_OpMove_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [s] dst src mem)
// cond: s > 16 && s%16 != 0 && s%16 > 8 && !config.useSSE
// result: (Move [s-s%16] (OffPtr <dst.Type> dst [s%16]) (OffPtr <src.Type> src [s%16]) (MOVQstore [8] dst (MOVQload [8] src mem) (MOVQstore dst (MOVQload src mem) mem)))
}
func rewriteValueAMD64_OpNeg32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg32F x)
// cond:
// result: (PXOR x (MOVSSconst <typ.Float32> [auxFrom32F(float32(math.Copysign(0, -1)))]))
}
func rewriteValueAMD64_OpNeg64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg64F x)
// cond:
// result: (PXOR x (MOVSDconst <typ.Float64> [auxFrom64F(math.Copysign(0, -1))]))
}
func rewriteValueAMD64_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq16 x y)
// cond:
// result: (SETNE (CMPW x y))
}
func rewriteValueAMD64_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x y)
// cond:
// result: (SETNE (CMPL x y))
}
func rewriteValueAMD64_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (SETNEF (UCOMISS x y))
}
func rewriteValueAMD64_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64 x y)
// cond:
// result: (SETNE (CMPQ x y))
}
func rewriteValueAMD64_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (SETNEF (UCOMISD x y))
}
func rewriteValueAMD64_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq8 x y)
// cond:
// result: (SETNE (CMPB x y))
}
func rewriteValueAMD64_OpNeqB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqB x y)
// cond:
// result: (SETNE (CMPB x y))
}
func rewriteValueAMD64_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (NeqPtr x y)
// cond: config.PtrSize == 8
// result: (SETNE (CMPQ x y))
}
func rewriteValueAMD64_OpOffPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OffPtr [off] ptr)
// cond: config.PtrSize == 8 && is32Bit(off)
// result: (ADDQconst [off] ptr)
}
func rewriteValueAMD64_OpPopCount16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount16 x)
// cond:
// result: (POPCNTL (MOVWQZX <typ.UInt32> x))
}
func rewriteValueAMD64_OpPopCount8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount8 x)
// cond:
// result: (POPCNTL (MOVBQZX <typ.UInt32> x))
}
func rewriteValueAMD64_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPWconst y [16])))
}
func rewriteValueAMD64_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPLconst y [16])))
}
func rewriteValueAMD64_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPQconst y [16])))
}
func rewriteValueAMD64_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRW <t> x y) (SBBLcarrymask <t> (CMPBconst y [16])))
}
func rewriteValueAMD64_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [16])))))
}
func rewriteValueAMD64_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [16])))))
}
func rewriteValueAMD64_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARW <t> x (ORQ <y.Type> y (NOTQ <y.Type> (SBBQcarrymask <y.Type> (CMPQconst y [16])))))
}
func rewriteValueAMD64_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARW <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [16])))))
}
func rewriteValueAMD64_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPWconst y [32])))
}
func rewriteValueAMD64_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPLconst y [32])))
}
func rewriteValueAMD64_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPQconst y [32])))
}
func rewriteValueAMD64_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRL <t> x y) (SBBLcarrymask <t> (CMPBconst y [32])))
}
func rewriteValueAMD64_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [32])))))
}
func rewriteValueAMD64_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [32])))))
}
func rewriteValueAMD64_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARL <t> x (ORQ <y.Type> y (NOTQ <y.Type> (SBBQcarrymask <y.Type> (CMPQconst y [32])))))
}
func rewriteValueAMD64_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARL <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [32])))))
}
func rewriteValueAMD64_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHRQ <t> x y) (SBBQcarrymask <t> (CMPWconst y [64])))
}
func rewriteValueAMD64_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHRQ <t> x y) (SBBQcarrymask <t> (CMPLconst y [64])))
}
func rewriteValueAMD64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHRQ <t> x y) (SBBQcarrymask <t> (CMPQconst y [64])))
}
func rewriteValueAMD64_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDQ (SHRQ <t> x y) (SBBQcarrymask <t> (CMPBconst y [64])))
}
func rewriteValueAMD64_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARQ <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [64])))))
}
func rewriteValueAMD64_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARQ <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [64])))))
}
func rewriteValueAMD64_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARQ <t> x (ORQ <y.Type> y (NOTQ <y.Type> (SBBQcarrymask <y.Type> (CMPQconst y [64])))))
}
func rewriteValueAMD64_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARQ <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [64])))))
}
func rewriteValueAMD64_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPWconst y [8])))
}
func rewriteValueAMD64_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPLconst y [8])))
}
func rewriteValueAMD64_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPQconst y [8])))
}
func rewriteValueAMD64_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (ANDL (SHRB <t> x y) (SBBLcarrymask <t> (CMPBconst y [8])))
}
func rewriteValueAMD64_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x16 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPWconst y [8])))))
}
func rewriteValueAMD64_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x32 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPLconst y [8])))))
}
func rewriteValueAMD64_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x64 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARB <t> x (ORQ <y.Type> y (NOTQ <y.Type> (SBBQcarrymask <y.Type> (CMPQconst y [8])))))
}
func rewriteValueAMD64_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x8 <t> x y)
// cond: !shiftIsBounded(v)
// result: (SARB <t> x (ORL <y.Type> y (NOTL <y.Type> (SBBLcarrymask <y.Type> (CMPBconst y [8])))))
}
func rewriteValueAMD64_OpSelect0_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select0 (Mul64uover x y))
// cond:
// result: (Select0 <typ.UInt64> (MULQU x y))
}
func rewriteValueAMD64_OpSelect1_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select1 (Mul64uover x y))
// cond:
// result: (SETO (Select1 <types.TypeFlags> (MULQU x y)))
}
func rewriteValueAMD64_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SARQconst (NEGQ <t> x) [63])
}
func rewriteValueAMD64_OpSubPtr_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (SubPtr x y)
// cond: config.PtrSize == 8
// result: (SUBQ x y)
}
func rewriteValueAMD64_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueAMD64_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Zero [16] destptr mem)
// cond: !config.useSSE
// result: (MOVQstoreconst [makeValAndOff(0,8)] destptr (MOVQstoreconst [0] destptr mem))
}
func rewriteValueAMD64_OpZero_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [s] destptr mem)
// cond: s > 64 && s <= 1024 && s%16 == 0 && !config.noDuffDevice
// result: (DUFFZERO [s] destptr (MOVOconst [0]) mem)
}
func rewriteValueARM_OpARMADCshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftLL (MOVWconst [c]) x [d] flags)
// cond:
// result: (ADCconst [c] (SLLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMADCshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftLLreg (MOVWconst [c]) x y flags)
// cond:
// result: (ADCconst [c] (SLL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMADCshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftRA (MOVWconst [c]) x [d] flags)
// cond:
// result: (ADCconst [c] (SRAconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMADCshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftRAreg (MOVWconst [c]) x y flags)
// cond:
// result: (ADCconst [c] (SRA <x.Type> x y) flags)
}
func rewriteValueARM_OpARMADCshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftRL (MOVWconst [c]) x [d] flags)
// cond:
// result: (ADCconst [c] (SRLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMADCshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADCshiftRLreg (MOVWconst [c]) x y flags)
// cond:
// result: (ADCconst [c] (SRL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMADD_10(v *Value) bool {
b := v.Block
- _ = b
// match: (ADD x (SRL y z))
// cond:
// result: (ADDshiftRLreg x y z)
}
func rewriteValueARM_OpARMADDSshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (ADDSconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDSshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (ADDSconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMADDSshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (ADDSconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDSshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (ADDSconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMADDSshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (ADDSconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDSshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDSshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (ADDSconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMADDshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (ADDconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMADDshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (ADDconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMADDshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMADDshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (ADDconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMANDshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMANDshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (ANDconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMANDshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMANDshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (ANDconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMANDshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMANDshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (ANDconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMCMNshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMCMNshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (CMNconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMCMNshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMCMNshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (CMNconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMCMNshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMCMNshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (CMNconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMCMP_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMP x (MOVWconst [c]))
// cond:
// result: (CMPconst [c] x)
}
func rewriteValueARM_OpARMCMP_10(v *Value) bool {
b := v.Block
- _ = b
// match: (CMP x (SRL y z))
// cond:
// result: (CMPshiftRLreg x y z)
}
func rewriteValueARM_OpARMCMPshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
}
func rewriteValueARM_OpARMCMPshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (InvertFlags (CMPconst [c] (SLL <x.Type> x y)))
}
func rewriteValueARM_OpARMCMPshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
}
func rewriteValueARM_OpARMCMPshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (InvertFlags (CMPconst [c] (SRA <x.Type> x y)))
}
func rewriteValueARM_OpARMCMPshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
}
func rewriteValueARM_OpARMCMPshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (InvertFlags (CMPconst [c] (SRL <x.Type> x y)))
}
func rewriteValueARM_OpARMMOVBUload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVBUload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM_OpARMMOVBload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVBload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM_OpARMMOVBstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond:
// result: (MOVBstore [off1+off2] {sym} ptr val mem)
}
func rewriteValueARM_OpARMMOVHUload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVHUload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM_OpARMMOVHload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVHload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM_OpARMMOVHstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond:
// result: (MOVHstore [off1+off2] {sym} ptr val mem)
}
func rewriteValueARM_OpARMMOVWload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond:
// result: (MOVWload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM_OpARMMOVWstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond:
// result: (MOVWstore [off1+off2] {sym} ptr val mem)
}
func rewriteValueARM_OpARMMUL_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MUL x (MOVWconst [c]))
// cond: isPowerOfTwo(c+1) && int32(c) >= 7
// result: (RSBshiftLL x x [log2(c+1)])
}
func rewriteValueARM_OpARMMULA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULA x (MOVWconst [c]) a)
// cond: int32(c) == -1
// result: (SUB a x)
}
func rewriteValueARM_OpARMMULA_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULA (MOVWconst [c]) x a)
// cond: int32(c) == -1
// result: (SUB a x)
}
func rewriteValueARM_OpARMMULS_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULS x (MOVWconst [c]) a)
// cond: int32(c) == -1
// result: (ADD a x)
}
func rewriteValueARM_OpARMMULS_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULS (MOVWconst [c]) x a)
// cond: int32(c) == -1
// result: (ADD a x)
}
func rewriteValueARM_OpARMORshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (ORconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMORshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (ORconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMORshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (ORconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMORshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (ORconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMORshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (ORconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMORshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (ORconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMRSBSshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (SUBSconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBSshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (SUBSconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMRSBSshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (SUBSconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBSshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (SUBSconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMRSBSshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (SUBSconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBSshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBSshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (SUBSconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMRSBshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (SUBconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (SUBconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMRSBshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (SUBconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (SUBconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMRSBshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (SUBconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMRSBshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSBshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (SUBconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMRSCshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftLL (MOVWconst [c]) x [d] flags)
// cond:
// result: (SBCconst [c] (SLLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMRSCshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftLLreg (MOVWconst [c]) x y flags)
// cond:
// result: (SBCconst [c] (SLL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMRSCshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftRA (MOVWconst [c]) x [d] flags)
// cond:
// result: (SBCconst [c] (SRAconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMRSCshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftRAreg (MOVWconst [c]) x y flags)
// cond:
// result: (SBCconst [c] (SRA <x.Type> x y) flags)
}
func rewriteValueARM_OpARMRSCshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftRL (MOVWconst [c]) x [d] flags)
// cond:
// result: (SBCconst [c] (SRLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMRSCshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RSCshiftRLreg (MOVWconst [c]) x y flags)
// cond:
// result: (SBCconst [c] (SRL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMSBCshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftLL (MOVWconst [c]) x [d] flags)
// cond:
// result: (RSCconst [c] (SLLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMSBCshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftLLreg (MOVWconst [c]) x y flags)
// cond:
// result: (RSCconst [c] (SLL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMSBCshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftRA (MOVWconst [c]) x [d] flags)
// cond:
// result: (RSCconst [c] (SRAconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMSBCshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftRAreg (MOVWconst [c]) x y flags)
// cond:
// result: (RSCconst [c] (SRA <x.Type> x y) flags)
}
func rewriteValueARM_OpARMSBCshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftRL (MOVWconst [c]) x [d] flags)
// cond:
// result: (RSCconst [c] (SRLconst <x.Type> x [d]) flags)
}
func rewriteValueARM_OpARMSBCshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SBCshiftRLreg (MOVWconst [c]) x y flags)
// cond:
// result: (RSCconst [c] (SRL <x.Type> x y) flags)
}
func rewriteValueARM_OpARMSUBSshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (RSBSconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBSshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (RSBSconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMSUBSshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (RSBSconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBSshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (RSBSconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMSUBSshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (RSBSconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBSshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBSshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (RSBSconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMSUBshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (RSBconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (RSBconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMSUBshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (RSBconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (RSBconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMSUBshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (RSBconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMSUBshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (RSBconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMTEQshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (TEQconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTEQshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (TEQconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMTEQshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (TEQconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTEQshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (TEQconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMTEQshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (TEQconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTEQshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TEQshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (TEQconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMTSTshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTSTshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (TSTconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMTSTshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTSTshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (TSTconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMTSTshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMTSTshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (TSTconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMXORshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XORshiftLL (MOVWconst [c]) x [d])
// cond:
// result: (XORconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMXORshiftLLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftLLreg (MOVWconst [c]) x y)
// cond:
// result: (XORconst [c] (SLL <x.Type> x y))
}
func rewriteValueARM_OpARMXORshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRA (MOVWconst [c]) x [d])
// cond:
// result: (XORconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMXORshiftRAreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRAreg (MOVWconst [c]) x y)
// cond:
// result: (XORconst [c] (SRA <x.Type> x y))
}
func rewriteValueARM_OpARMXORshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRL (MOVWconst [c]) x [d])
// cond:
// result: (XORconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM_OpARMXORshiftRLreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRLreg (MOVWconst [c]) x y)
// cond:
// result: (XORconst [c] (SRL <x.Type> x y))
}
func rewriteValueARM_OpARMXORshiftRR_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRR (MOVWconst [c]) x [d])
// cond:
// result: (XORconst [c] (SRRconst <x.Type> x [d]))
}
func rewriteValueARM_OpAvg32u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg32u <t> x y)
// cond:
// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
}
func rewriteValueARM_OpBitLen32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (BitLen32 <t> x)
// cond:
// result: (RSBconst [32] (CLZ <t> x))
}
func rewriteValueARM_OpBswap32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Bswap32 <t> x)
// cond: objabi.GOARM==5
// result: (XOR <t> (SRLconst <t> (BICconst <t> (XOR <t> x (SRRconst <t> [16] x)) [0xff0000]) [8]) (SRRconst <t> x [8]))
}
func rewriteValueARM_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Ctz32 <t> x)
// cond: objabi.GOARM<=6
// result: (RSBconst [32] (CLZ <t> (SUBconst <t> (AND <t> x (RSBconst <t> [0] x)) [1])))
}
func rewriteValueARM_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (Div32 (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValueARM_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (Div32u (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValueARM_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 x y)
// cond:
// result: (SUB (XOR <typ.UInt32> (Select0 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR x <typ.UInt32> (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR y <typ.UInt32> (Signmask y)) (Signmask y)))) (Signmask (XOR <typ.UInt32> x y))) (Signmask (XOR <typ.UInt32> x y)))
}
func rewriteValueARM_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (Select0 <typ.UInt32> (CALLudiv x y))
}
func rewriteValueARM_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (Div32 (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValueARM_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (Div32u (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (Equal (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValueARM_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (Equal (CMPF x y))
}
func rewriteValueARM_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (Equal (CMPD x y))
}
func rewriteValueARM_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (Equal (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (XORconst [1] (XOR <typ.Bool> x y))
}
func rewriteValueARM_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqPtr x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValueARM_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (GreaterEqual (CMP (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (GreaterEqualU (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32 x y)
// cond:
// result: (GreaterEqual (CMP x y))
}
func rewriteValueARM_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (GreaterEqual (CMPF x y))
}
func rewriteValueARM_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32U x y)
// cond:
// result: (GreaterEqualU (CMP x y))
}
func rewriteValueARM_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (GreaterEqual (CMPD x y))
}
func rewriteValueARM_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (GreaterEqual (CMP (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (GreaterEqualU (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (GreaterThan (CMP (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (GreaterThanU (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32 x y)
// cond:
// result: (GreaterThan (CMP x y))
}
func rewriteValueARM_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (GreaterThan (CMPF x y))
}
func rewriteValueARM_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32U x y)
// cond:
// result: (GreaterThanU (CMP x y))
}
func rewriteValueARM_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (GreaterThan (CMPD x y))
}
func rewriteValueARM_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (GreaterThan (CMP (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (GreaterThanU (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsInBounds idx len)
// cond:
// result: (LessThanU (CMP idx len))
}
func rewriteValueARM_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsNonNil ptr)
// cond:
// result: (NotEqual (CMPconst [0] ptr))
}
func rewriteValueARM_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsSliceInBounds idx len)
// cond:
// result: (LessEqualU (CMP idx len))
}
func rewriteValueARM_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (LessEqual (CMP (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (LessEqualU (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32 x y)
// cond:
// result: (LessEqual (CMP x y))
}
func rewriteValueARM_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (GreaterEqual (CMPF y x))
}
func rewriteValueARM_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32U x y)
// cond:
// result: (LessEqualU (CMP x y))
}
func rewriteValueARM_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (GreaterEqual (CMPD y x))
}
func rewriteValueARM_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (LessEqual (CMP (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (LessEqualU (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (LessThan (CMP (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (LessThanU (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32 x y)
// cond:
// result: (LessThan (CMP x y))
}
func rewriteValueARM_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (GreaterThan (CMPF y x))
}
func rewriteValueARM_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32U x y)
// cond:
// result: (LessThanU (CMP x y))
}
func rewriteValueARM_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (GreaterThan (CMPD y x))
}
func rewriteValueARM_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (LessThan (CMP (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (LessThanU (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x32 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 x y)
// cond:
// result: (SLL x (ZeroExt8to32 y))
}
func rewriteValueARM_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x32 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 x y)
// cond:
// result: (SLL x (ZeroExt8to32 y))
}
func rewriteValueARM_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x32 x y)
// cond:
// result: (CMOVWHSconst (SLL <x.Type> x y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 x y)
// cond:
// result: (SLL x (ZeroExt8to32 y))
}
func rewriteValueARM_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (Mod32 (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValueARM_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (Mod32u (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValueARM_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (SUB (XOR <typ.UInt32> (Select1 <typ.UInt32> (CALLudiv (SUB <typ.UInt32> (XOR <typ.UInt32> x (Signmask x)) (Signmask x)) (SUB <typ.UInt32> (XOR <typ.UInt32> y (Signmask y)) (Signmask y)))) (Signmask x)) (Signmask x))
}
func rewriteValueARM_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (Select1 <typ.UInt32> (CALLudiv x y))
}
func rewriteValueARM_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (Mod32 (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValueARM_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (Mod32u (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueARM_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (NotEqual (CMP (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValueARM_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (NotEqual (CMPF x y))
}
func rewriteValueARM_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (NotEqual (CMPD x y))
}
func rewriteValueARM_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (NotEqual (CMP (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqPtr x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValueARM_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> (ZeroExt16to32 x) (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> (ZeroExt16to32 x) y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint64(c) < 16
// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
}
func rewriteValueARM_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 x y)
// cond:
// result: (SRL (ZeroExt16to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond:
// result: (SRAcond (SignExt16to32 x) (ZeroExt16to32 y) (CMPconst [256] (ZeroExt16to32 y)))
}
func rewriteValueARM_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x y)
// cond:
// result: (SRAcond (SignExt16to32 x) y (CMPconst [256] y))
}
func rewriteValueARM_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
}
func rewriteValueARM_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond:
// result: (SRA (SignExt16to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> x (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux32 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> x y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 x y)
// cond:
// result: (SRL x (ZeroExt8to32 y))
}
func rewriteValueARM_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond:
// result: (SRAcond x (ZeroExt16to32 y) (CMPconst [256] (ZeroExt16to32 y)))
}
func rewriteValueARM_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x32 x y)
// cond:
// result: (SRAcond x y (CMPconst [256] y))
}
func rewriteValueARM_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond:
// result: (SRA x (ZeroExt8to32 y))
}
func rewriteValueARM_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> (ZeroExt8to32 x) (ZeroExt16to32 y)) (CMPconst [256] (ZeroExt16to32 y)) [0])
}
func rewriteValueARM_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 x y)
// cond:
// result: (CMOVWHSconst (SRL <x.Type> (ZeroExt8to32 x) y) (CMPconst [256] y) [0])
}
func rewriteValueARM_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint64(c) < 8
// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
}
func rewriteValueARM_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 x y)
// cond:
// result: (SRL (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond:
// result: (SRAcond (SignExt8to32 x) (ZeroExt16to32 y) (CMPconst [256] (ZeroExt16to32 y)))
}
func rewriteValueARM_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x y)
// cond:
// result: (SRAcond (SignExt8to32 x) y (CMPconst [256] y))
}
func rewriteValueARM_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
}
func rewriteValueARM_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond:
// result: (SRA (SignExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRAconst (RSBconst <t> [0] x) [31])
}
func rewriteValueARM_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueARM_OpZeromask_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zeromask x)
// cond:
// result: (SRAconst (RSBshiftRL <typ.Int32> x x [1]) [31])
}
func rewriteValueARM64_OpARM64ADD_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADD x (NEG y))
// cond:
// result: (SUB x y)
}
func rewriteValueARM64_OpARM64ADD_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
// cond: cc.(Op) == OpARM64LessThanU
// result: (ROR x y)
}
func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADDshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMNshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64CMP_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMP x (MOVDconst [c]))
// cond:
// result: (CMPconst [c] x)
}
func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPW x (MOVDconst [c]))
// cond:
// result: (CMPWconst [int64(int32(c))] x)
}
func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
}
func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
}
func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
}
func rewriteValueARM64_OpARM64FCMPD_0(v *Value) bool {
b := v.Block
- _ = b
// match: (FCMPD x (FMOVDconst [0]))
// cond:
// result: (FCMPD0 x)
}
func rewriteValueARM64_OpARM64FCMPS_0(v *Value) bool {
b := v.Block
- _ = b
// match: (FCMPS x (FMOVSconst [0]))
// cond:
// result: (FCMPS0 x)
}
func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool {
b := v.Block
- _ = b
// match: (FMOVDfpgp <t> (Arg [off] {sym}))
// cond:
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool {
b := v.Block
- _ = b
// match: (FMOVDgpfp <t> (Arg [off] {sym}))
// cond:
// result: @b.Func.Entry (Arg <t> [off] {sym})
}
func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
// cond:
// result: (FMOVDgpfp val)
}
func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem)
// cond:
// result: (MOVDstore [off] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
// cond:
// result: (FMOVSgpfp val)
}
func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem)
// cond:
// result: (MOVWstore [off] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64MADD_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MADD a x (MOVDconst [-1]))
// cond:
// result: (SUB a x)
}
func rewriteValueARM64_OpARM64MADD_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MADD a (MOVDconst [-1]) x)
// cond:
// result: (SUB a x)
}
func rewriteValueARM64_OpARM64MADD_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MADD (MOVDconst [c]) x y)
// cond:
// result: (ADDconst [c] (MUL <x.Type> x y))
}
func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MADDW a x (MOVDconst [c]))
// cond: int32(c)==-1
// result: (SUB a x)
}
func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MADDW a (MOVDconst [c]) x)
// cond: int32(c)==-1
// result: (SUB a x)
}
func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MADDW (MOVDconst [c]) x y)
// cond:
// result: (ADDconst [c] (MULW <x.Type> x y))
}
func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MNEG x (MOVDconst [-1]))
// cond:
// result: x
}
func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MNEG x (MOVDconst [c]))
// cond: isPowerOfTwo(c+1) && c >= 7
// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
}
func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MNEGW x (MOVDconst [c]))
// cond: int32(c)==-1
// result: x
}
func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MNEGW x (MOVDconst [c]))
// cond: isPowerOfTwo(c+1) && int32(c) >= 7
// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
}
func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBUload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBstore [off1+off2] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem))
// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
}
func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
}
func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
// cond: x.Uses == 1 && clobber(x)
// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
}
func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(24, 8)] w) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
// result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem)
}
func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _))
// cond:
// result: (FMOVDfpgp val)
}
func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem)
// cond:
// result: (FMOVDstore [off] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVDstorezero [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVHUload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVHload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVHstore [off1+off2] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64MOVHstore_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
// result: (MOVWstoreidx ptr1 idx1 w mem)
}
func rewriteValueARM64_OpARM64MOVHstore_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx2 ptr1 idx1 w0:(SRLconst [j-16] w) mem))
// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
// result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w0 mem)
}
func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVHstorezero [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVQstorezero [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _))
// cond:
// result: (FMOVSfpgp val)
}
func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVWload [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem)
// cond:
// result: (FMOVSstore [off] {sym} ptr val mem)
}
func rewriteValueARM64_OpARM64MOVWstore_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx4 ptr1 idx1 w mem))
// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
// result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w mem)
}
func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (MOVWstorezero [off1+off2] {sym} ptr mem)
}
func rewriteValueARM64_OpARM64MSUB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUB a x (MOVDconst [-1]))
// cond:
// result: (ADD a x)
}
func rewriteValueARM64_OpARM64MSUB_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUB a (MOVDconst [-1]) x)
// cond:
// result: (ADD a x)
}
func rewriteValueARM64_OpARM64MSUB_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUB (MOVDconst [c]) x y)
// cond:
// result: (ADDconst [c] (MNEG <x.Type> x y))
}
func rewriteValueARM64_OpARM64MSUBW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUBW a x (MOVDconst [c]))
// cond: int32(c)==-1
// result: (ADD a x)
}
func rewriteValueARM64_OpARM64MSUBW_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUBW a (MOVDconst [c]) x)
// cond: int32(c)==-1
// result: (ADD a x)
}
func rewriteValueARM64_OpARM64MSUBW_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MSUBW (MOVDconst [c]) x y)
// cond:
// result: (ADDconst [c] (MNEGW <x.Type> x y))
}
func rewriteValueARM64_OpARM64MUL_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MUL x (MOVDconst [c]))
// cond: isPowerOfTwo(c-1) && c >= 3
// result: (ADDshiftLL x x [log2(c-1)])
}
func rewriteValueARM64_OpARM64MUL_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MUL x (MOVDconst [c]))
// cond: c%9 == 0 && isPowerOfTwo(c/9)
// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
}
func rewriteValueARM64_OpARM64MULW_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MULW x (MOVDconst [c]))
// cond: isPowerOfTwo(c-1) && int32(c) >= 3
// result: (ADDshiftLL x x [log2(c-1)])
}
func rewriteValueARM64_OpARM64MULW_20(v *Value) bool {
b := v.Block
- _ = b
// match: (MULW x (MOVDconst [c]))
// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
// result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))
}
func rewriteValueARM64_OpARM64OR_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR x1:(SRAconst [c] y) x0)
// cond: clobberIfDead(x1)
// result: (ORshiftRA x0 y [c])
}
func rewriteValueARM64_OpARM64OR_20(v *Value) bool {
b := v.Block
- _ = b
// match: (OR (ANDconst [ac] y) (UBFIZ [bfc] x))
// cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc)))
// result: (BFI [bfc] y x)
}
func rewriteValueARM64_OpARM64OR_30(v *Value) bool {
b := v.Block
- _ = b
// match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))))
// cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)
// result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)
}
func rewriteValueARM64_OpARM64OR_40(v *Value) bool {
b := v.Block
- _ = b
// match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))))
// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)
// result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem))
}
func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (ORconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))
// cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)
// result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr0 idx0 mem)
}
func rewriteValueARM64_OpARM64ORshiftLL_20(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem)))
// cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)
// result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem))
}
func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (ORconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (ORconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64STP_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem)
// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
// result: (STP [off1+off2] {sym} ptr val1 val2 mem)
}
func rewriteValueARM64_OpARM64SUB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUB x (MOVDconst [c]))
// cond:
// result: (SUBconst [c] x)
}
func rewriteValueARM64_OpARM64TSTshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64TSTshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64TSTshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (TSTshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (TSTconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (UMOD <typ.UInt64> x y)
// cond:
// result: (MSUB <typ.UInt64> x y (UDIV <typ.UInt64> x y))
}
func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (UMODW <typ.UInt32> x y)
// cond:
// result: (MSUBW <typ.UInt32> x y (UDIVW <typ.UInt32> x y))
}
func rewriteValueARM64_OpARM64XOR_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XOR x1:(SRAconst [c] y) x0)
// cond: clobberIfDead(x1)
// result: (XORshiftRA x0 y [c])
}
func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XORshiftLL (MOVDconst [c]) x [d])
// cond:
// result: (XORconst [c] (SLLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRA (MOVDconst [c]) x [d])
// cond:
// result: (XORconst [c] (SRAconst <x.Type> x [d]))
}
func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORshiftRL (MOVDconst [c]) x [d])
// cond:
// result: (XORconst [c] (SRLconst <x.Type> x [d]))
}
func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAnd8 ptr val mem)
// cond:
// result: (Select1 (LoweredAtomicAnd8 ptr val mem))
}
func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicOr8 ptr val mem)
// cond:
// result: (Select1 (LoweredAtomicOr8 ptr val mem))
}
func rewriteValueARM64_OpAvg64u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg64u <t> x y)
// cond:
// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
}
func rewriteValueARM64_OpBitLen32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen32 x)
// cond:
// result: (SUB (MOVDconst [32]) (CLZW <typ.Int> x))
}
func rewriteValueARM64_OpBitLen64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen64 x)
// cond:
// result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x))
}
func rewriteValueARM64_OpBitRev16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitRev16 x)
// cond:
// result: (SRLconst [48] (RBIT <typ.UInt64> x))
}
func rewriteValueARM64_OpBitRev8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitRev8 x)
// cond:
// result: (SRLconst [56] (RBIT <typ.UInt64> x))
}
func rewriteValueARM64_OpCondSelect_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CondSelect x y bool)
// cond: flagArg(bool) != nil
// result: (CSEL {bool.Op} x y flagArg(bool))
}
func rewriteValueARM64_OpCtz16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz16 <t> x)
// cond:
// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x)))
}
func rewriteValueARM64_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Ctz32 <t> x)
// cond:
// result: (CLZW (RBITW <t> x))
}
func rewriteValueARM64_OpCtz64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Ctz64 <t> x)
// cond:
// result: (CLZ (RBIT <t> x))
}
func rewriteValueARM64_OpCtz8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz8 <t> x)
// cond:
// result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x100] x)))
}
func rewriteValueARM64_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValueARM64_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValueARM64_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValueARM64_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM64_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x y)
// cond:
// result: (Equal (CMPW x y))
}
func rewriteValueARM64_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (Equal (FCMPS x y))
}
func rewriteValueARM64_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64 x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValueARM64_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (Equal (FCMPD x y))
}
func rewriteValueARM64_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y))
}
func rewriteValueARM64_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqPtr x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValueARM64_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM64_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32 x y)
// cond:
// result: (GreaterEqual (CMPW x y))
}
func rewriteValueARM64_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (GreaterEqualF (FCMPS x y))
}
func rewriteValueARM64_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32U x y)
// cond:
// result: (GreaterEqualU (CMPW x y))
}
func rewriteValueARM64_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64 x y)
// cond:
// result: (GreaterEqual (CMP x y))
}
func rewriteValueARM64_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (GreaterEqualF (FCMPD x y))
}
func rewriteValueARM64_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64U x y)
// cond:
// result: (GreaterEqualU (CMP x y))
}
func rewriteValueARM64_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM64_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM64_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32 x y)
// cond:
// result: (GreaterThan (CMPW x y))
}
func rewriteValueARM64_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (GreaterThanF (FCMPS x y))
}
func rewriteValueARM64_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32U x y)
// cond:
// result: (GreaterThanU (CMPW x y))
}
func rewriteValueARM64_OpGreater64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64 x y)
// cond:
// result: (GreaterThan (CMP x y))
}
func rewriteValueARM64_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (GreaterThanF (FCMPD x y))
}
func rewriteValueARM64_OpGreater64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64U x y)
// cond:
// result: (GreaterThanU (CMP x y))
}
func rewriteValueARM64_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM64_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpHmul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32 x y)
// cond:
// result: (SRAconst (MULL <typ.Int64> x y) [32])
}
func rewriteValueARM64_OpHmul32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32u x y)
// cond:
// result: (SRAconst (UMULL <typ.UInt64> x y) [32])
}
func rewriteValueARM64_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsInBounds idx len)
// cond:
// result: (LessThanU (CMP idx len))
}
func rewriteValueARM64_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsNonNil ptr)
// cond:
// result: (NotEqual (CMPconst [0] ptr))
}
func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsSliceInBounds idx len)
// cond:
// result: (LessEqualU (CMP idx len))
}
func rewriteValueARM64_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM64_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32 x y)
// cond:
// result: (LessEqual (CMPW x y))
}
func rewriteValueARM64_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (LessEqualF (FCMPS x y))
}
func rewriteValueARM64_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32U x y)
// cond:
// result: (LessEqualU (CMPW x y))
}
func rewriteValueARM64_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64 x y)
// cond:
// result: (LessEqual (CMP x y))
}
func rewriteValueARM64_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (LessEqualF (FCMPD x y))
}
func rewriteValueARM64_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64U x y)
// cond:
// result: (LessEqualU (CMP x y))
}
func rewriteValueARM64_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM64_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueARM64_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32 x y)
// cond:
// result: (LessThan (CMPW x y))
}
func rewriteValueARM64_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (LessThanF (FCMPS x y))
}
func rewriteValueARM64_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32U x y)
// cond:
// result: (LessThanU (CMPW x y))
}
func rewriteValueARM64_OpLess64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64 x y)
// cond:
// result: (LessThan (CMP x y))
}
func rewriteValueARM64_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (LessThanF (FCMPD x y))
}
func rewriteValueARM64_OpLess64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64U x y)
// cond:
// result: (LessThanU (CMP x y))
}
func rewriteValueARM64_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueARM64_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (MODW (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValueARM64_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValueARM64_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (MODW (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValueARM64_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueARM64_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueARM64_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [16] dst src mem)
// cond:
// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
}
func rewriteValueARM64_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueARM64_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x y)
// cond:
// result: (NotEqual (CMPW x y))
}
func rewriteValueARM64_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (NotEqual (FCMPS x y))
}
func rewriteValueARM64_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64 x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValueARM64_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (NotEqual (FCMPD x y))
}
func rewriteValueARM64_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueARM64_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqPtr x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValueARM64_OpNot_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Not x)
// cond:
// result: (XOR (MOVDconst [1]) x)
}
func rewriteValueARM64_OpPopCount16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount16 <t> x)
// cond:
// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x)))))
}
func rewriteValueARM64_OpPopCount32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount32 <t> x)
// cond:
// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x)))))
}
func rewriteValueARM64_OpPopCount64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount64 <t> x)
// cond:
// result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x))))
}
func rewriteValueARM64_OpRotateLeft32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RotateLeft32 x y)
// cond:
// result: (RORW x (NEG <y.Type> y))
}
func rewriteValueARM64_OpRotateLeft64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (RotateLeft64 x y)
// cond:
// result: (ROR x (NEG <y.Type> y))
}
func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
}
func rewriteValueARM64_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
}
func rewriteValueARM64_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
}
func rewriteValueARM64_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond:
// result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
}
func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
}
func rewriteValueARM64_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x32 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
}
func rewriteValueARM64_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
}
func rewriteValueARM64_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond:
// result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
}
func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 x y)
// cond:
// result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
}
func rewriteValueARM64_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x32 x y)
// cond:
// result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
}
func rewriteValueARM64_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x64 x y)
// cond:
// result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
}
func rewriteValueARM64_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 x y)
// cond:
// result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
}
func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y)))
}
func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y)))
}
func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y))
}
func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y)))
}
func rewriteValueARM64_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y))))
}
func rewriteValueARM64_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y))))
}
func rewriteValueARM64_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y)))
}
func rewriteValueARM64_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond:
// result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y))))
}
func rewriteValueARM64_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRAconst (NEG <t> x) [63])
}
func rewriteValueARM64_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueARM64_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [10] ptr mem)
// cond:
// result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
}
func rewriteValueARM64_OpZero_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Zero [s] ptr mem)
// cond: s%16 != 0 && s%16 <= 8 && s > 16
// result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem))
}
func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Add32withcarry <t> x y c)
// cond:
// result: (ADD c (ADD <t> x y))
}
func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAnd8 ptr val mem)
// cond: !config.BigEndian
// result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem)
}
func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicOr8 ptr val mem)
// cond: !config.BigEndian
// result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem)
}
func rewriteValueMIPS_OpAvg32u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg32u <t> x y)
// cond:
// result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y)
}
func rewriteValueMIPS_OpBitLen32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen32 <t> x)
// cond:
// result: (SUB (MOVWconst [32]) (CLZ <t> x))
}
func rewriteValueMIPS_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz32 <t> x)
// cond:
// result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x)))))
}
func rewriteValueMIPS_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueMIPS_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 x y)
// cond:
// result: (Select1 (DIV x y))
}
func rewriteValueMIPS_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (Select1 (DIVU x y))
}
func rewriteValueMIPS_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueMIPS_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32 x y)
// cond:
// result: (SGTUconst [1] (XOR x y))
}
func rewriteValueMIPS_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (FPFlagTrue (CMPEQF x y))
}
func rewriteValueMIPS_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (FPFlagTrue (CMPEQD x y))
}
func rewriteValueMIPS_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (XORconst [1] (XOR <typ.Bool> x y))
}
func rewriteValueMIPS_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqPtr x y)
// cond:
// result: (SGTUconst [1] (XOR x y))
}
func rewriteValueMIPS_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x)))
}
func rewriteValueMIPS_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)))
}
func rewriteValueMIPS_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32 x y)
// cond:
// result: (XORconst [1] (SGT y x))
}
func rewriteValueMIPS_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (FPFlagTrue (CMPGEF x y))
}
func rewriteValueMIPS_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32U x y)
// cond:
// result: (XORconst [1] (SGTU y x))
}
func rewriteValueMIPS_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (FPFlagTrue (CMPGED x y))
}
func rewriteValueMIPS_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x)))
}
func rewriteValueMIPS_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)))
}
func rewriteValueMIPS_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (SGT (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValueMIPS_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValueMIPS_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (FPFlagTrue (CMPGTF x y))
}
func rewriteValueMIPS_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (FPFlagTrue (CMPGTD x y))
}
func rewriteValueMIPS_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (SGT (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValueMIPS_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValueMIPS_OpHmul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32 x y)
// cond:
// result: (Select0 (MULT x y))
}
func rewriteValueMIPS_OpHmul32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32u x y)
// cond:
// result: (Select0 (MULTU x y))
}
func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsNonNil ptr)
// cond:
// result: (SGTU ptr (MOVWconst [0]))
}
func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsSliceInBounds idx len)
// cond:
// result: (XORconst [1] (SGTU idx len))
}
func rewriteValueMIPS_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueMIPS_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32 x y)
// cond:
// result: (XORconst [1] (SGT x y))
}
func rewriteValueMIPS_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (FPFlagTrue (CMPGEF y x))
}
func rewriteValueMIPS_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32U x y)
// cond:
// result: (XORconst [1] (SGTU x y))
}
func rewriteValueMIPS_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (FPFlagTrue (CMPGED y x))
}
func rewriteValueMIPS_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueMIPS_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (SGT (SignExt16to32 y) (SignExt16to32 x))
}
func rewriteValueMIPS_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))
}
func rewriteValueMIPS_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (FPFlagTrue (CMPGTF y x))
}
func rewriteValueMIPS_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (FPFlagTrue (CMPGTD y x))
}
func rewriteValueMIPS_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (SGT (SignExt8to32 y) (SignExt8to32 x))
}
func rewriteValueMIPS_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))
}
func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 <t> x y)
// cond:
// result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool {
b := v.Block
- _ = b
// match: (AND x (MOVWconst [c]))
// cond:
// result: (ANDconst [c] x)
return false
}
func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool {
- b := v.Block
- _ = b
- // match: (CMOVZ _ b (MOVWconst [0]))
+ // match: (CMOVZ _ f (MOVWconst [0]))
// cond:
- // result: b
+ // result: f
for {
_ = v.Args[2]
- b := v.Args[1]
+ f := v.Args[1]
v_2 := v.Args[2]
if v_2.Op != OpMIPSMOVWconst {
break
break
}
v.reset(OpCopy)
- v.Type = b.Type
- v.AddArg(b)
+ v.Type = f.Type
+ v.AddArg(f)
return true
}
// match: (CMOVZ a _ (MOVWconst [c]))
}
func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBUreg x:(MOVBUload _ _))
// cond:
// result: (MOVWreg x)
}
func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVBreg x:(MOVBload _ _))
// cond:
// result: (MOVWreg x)
}
func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHUreg x:(MOVBUload _ _))
// cond:
// result: (MOVWreg x)
}
func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHreg x:(MOVBload _ _))
// cond:
// result: (MOVWreg x)
}
func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool {
b := v.Block
- _ = b
// match: (OR x (MOVWconst [c]))
// cond:
// result: (ORconst [c] x)
}
func rewriteValueMIPS_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValueMIPS_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (Select0 (DIV x y))
}
func rewriteValueMIPS_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (Select0 (DIVU x y))
}
func rewriteValueMIPS_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValueMIPS_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueMIPS_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [6] {t} dst src mem)
// cond: t.(*types.Type).Alignment()%2 == 0
// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
}
func rewriteValueMIPS_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0]))
}
func rewriteValueMIPS_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32 x y)
// cond:
// result: (SGTU (XOR x y) (MOVWconst [0]))
}
func rewriteValueMIPS_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (FPFlagFalse (CMPEQF x y))
}
func rewriteValueMIPS_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (FPFlagFalse (CMPEQD x y))
}
func rewriteValueMIPS_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0]))
}
func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqPtr x y)
// cond:
// result: (SGTU (XOR x y) (MOVWconst [0]))
}
func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint32(c) < 16
// result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16])
}
func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
}
func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
}
func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint32(c) < 16
// result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16])
}
func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
}
func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond:
// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
}
func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x32 x y)
// cond:
// result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
}
func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond:
// result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
}
func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y)))
}
func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y))
}
func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint32(c) < 8
// result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24])
}
func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y)))
}
func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y))))
}
func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y)))
}
func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint32(c) < 8
// result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24])
}
func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond:
// result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y))))
}
func rewriteValueMIPS_OpSelect0_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select0 (Add32carry <t> x y))
// cond:
// result: (ADD <t.FieldType(0)> x y)
}
func rewriteValueMIPS_OpSelect1_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Select1 (Add32carry <t> x y))
// cond:
// result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y))
}
func rewriteValueMIPS_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRAconst (NEG <t> x) [31])
}
func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub32withcarry <t> x y c)
// cond:
// result: (SUB (SUB <t> x y) c)
}
func rewriteValueMIPS_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueMIPS_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [12] {t} ptr mem)
// cond: t.(*types.Type).Alignment()%4 == 0
// result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))
}
func rewriteValueMIPS_OpZeromask_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zeromask x)
// cond:
// result: (NEG (SGTU x (MOVWconst [0])))
}
func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg64u <t> x y)
// cond:
// result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y)
}
func rewriteValueMIPS64_OpCom16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com16 x)
// cond:
// result: (NOR (MOVVconst [0]) x)
}
func rewriteValueMIPS64_OpCom32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com32 x)
// cond:
// result: (NOR (MOVVconst [0]) x)
}
func rewriteValueMIPS64_OpCom64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com64 x)
// cond:
// result: (NOR (MOVVconst [0]) x)
}
func rewriteValueMIPS64_OpCom8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com8 x)
// cond:
// result: (NOR (MOVVconst [0]) x)
}
func rewriteValueMIPS64_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
}
func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 x y)
// cond:
// result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
}
func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpDiv64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64 x y)
// cond:
// result: (Select1 (DIVV x y))
}
func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64u x y)
// cond:
// result: (Select1 (DIVVU x y))
}
func rewriteValueMIPS64_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
}
func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32 x y)
// cond:
// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (FPFlagTrue (CMPEQF x y))
}
func rewriteValueMIPS64_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq64 x y)
// cond:
// result: (SGTU (MOVVconst [1]) (XOR x y))
}
func rewriteValueMIPS64_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (FPFlagTrue (CMPEQD x y))
}
func rewriteValueMIPS64_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y))
}
func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqPtr x y)
// cond:
// result: (SGTU (MOVVconst [1]) (XOR x y))
}
func rewriteValueMIPS64_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x)))
}
func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)))
}
func rewriteValueMIPS64_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x)))
}
func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (FPFlagTrue (CMPGEF x y))
}
func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)))
}
func rewriteValueMIPS64_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT y x))
}
func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (FPFlagTrue (CMPGED x y))
}
func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU y x))
}
func rewriteValueMIPS64_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x)))
}
func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)))
}
func rewriteValueMIPS64_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (SGT (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueMIPS64_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32 x y)
// cond:
// result: (SGT (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (FPFlagTrue (CMPGTF x y))
}
func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32U x y)
// cond:
// result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (FPFlagTrue (CMPGTD x y))
}
func rewriteValueMIPS64_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (SGT (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueMIPS64_OpHmul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32 x y)
// cond:
// result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32])
}
func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32u x y)
// cond:
// result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32])
}
func rewriteValueMIPS64_OpHmul64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul64 x y)
// cond:
// result: (Select0 (MULV x y))
}
func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul64u x y)
// cond:
// result: (Select0 (MULVU x y))
}
func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsNonNil ptr)
// cond:
// result: (SGTU ptr (MOVVconst [0]))
}
func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsSliceInBounds idx len)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU idx len))
}
func rewriteValueMIPS64_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y)))
}
func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y)))
}
func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (FPFlagTrue (CMPGEF y x))
}
func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT x y))
}
func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (FPFlagTrue (CMPGED y x))
}
func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU x y))
}
func rewriteValueMIPS64_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y)))
}
func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (SGT (SignExt16to64 y) (SignExt16to64 x))
}
func rewriteValueMIPS64_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))
}
func rewriteValueMIPS64_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32 x y)
// cond:
// result: (SGT (SignExt32to64 y) (SignExt32to64 x))
}
func rewriteValueMIPS64_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (FPFlagTrue (CMPGTF y x))
}
func rewriteValueMIPS64_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32U x y)
// cond:
// result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))
}
func rewriteValueMIPS64_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (FPFlagTrue (CMPGTD y x))
}
func rewriteValueMIPS64_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (SGT (SignExt8to64 y) (SignExt8to64 x))
}
func rewriteValueMIPS64_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))
}
func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
}
func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
}
func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
}
func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y))
}
func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y)))
}
func rewriteValueMIPS64_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y)))
}
func rewriteValueMIPS64_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpMod64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64 x y)
// cond:
// result: (Select0 (DIVV x y))
}
func rewriteValueMIPS64_OpMod64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64u x y)
// cond:
// result: (Select0 (DIVVU x y))
}
func rewriteValueMIPS64_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y)))
}
func rewriteValueMIPS64_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueMIPS64_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [3] dst src mem)
// cond:
// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
}
func rewriteValueMIPS64_OpMul16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul16 x y)
// cond:
// result: (Select1 (MULVU x y))
}
func rewriteValueMIPS64_OpMul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul32 x y)
// cond:
// result: (Select1 (MULVU x y))
}
func rewriteValueMIPS64_OpMul64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul64 x y)
// cond:
// result: (Select1 (MULVU x y))
}
func rewriteValueMIPS64_OpMul8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul8 x y)
// cond:
// result: (Select1 (MULVU x y))
}
func rewriteValueMIPS64_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0]))
}
func rewriteValueMIPS64_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32 x y)
// cond:
// result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0]))
}
func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (FPFlagFalse (CMPEQF x y))
}
func rewriteValueMIPS64_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq64 x y)
// cond:
// result: (SGTU (XOR x y) (MOVVconst [0]))
}
func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (FPFlagFalse (CMPEQD x y))
}
func rewriteValueMIPS64_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0]))
}
func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqPtr x y)
// cond:
// result: (SGTU (XOR x y) (MOVVconst [0]))
}
func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y))
}
func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
}
func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 <t> x y)
// cond:
// result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y))
}
func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x32 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
}
func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 <t> x y)
// cond:
// result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y))
}
func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x32 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
}
func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 <t> x y)
// cond:
// result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y))
}
func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 <t> x y)
// cond:
// result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y)))
}
func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y)))
}
func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y))
}
func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 <t> x y)
// cond:
// result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y)))
}
func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRAVconst (NEGV <t> x) [63])
}
func rewriteValueMIPS64_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueMIPS64_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [3] ptr mem)
// cond:
// result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))
}
func rewriteValuePPC64_OpAvg64u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg64u <t> x y)
// cond:
// result: (ADD (SRDconst <t> (SUB <t> x y) [1]) y)
}
func rewriteValuePPC64_OpBitLen32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen32 x)
// cond:
// result: (SUB (MOVDconst [32]) (CNTLZW <typ.Int> x))
}
func rewriteValuePPC64_OpBitLen64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen64 x)
// cond:
// result: (SUB (MOVDconst [64]) (CNTLZD <typ.Int> x))
}
func rewriteValuePPC64_OpCtz16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz16 x)
// cond:
// result: (POPCNTW (MOVHZreg (ANDN <typ.Int16> (ADDconst <typ.Int16> [-1] x) x)))
}
func rewriteValuePPC64_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz32 x)
// cond:
// result: (POPCNTW (MOVWZreg (ANDN <typ.Int> (ADDconst <typ.Int> [-1] x) x)))
}
func rewriteValuePPC64_OpCtz64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz64 x)
// cond:
// result: (POPCNTD (ANDN <typ.Int64> (ADDconst <typ.Int64> [-1] x) x))
}
func rewriteValuePPC64_OpCtz8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz8 x)
// cond:
// result: (POPCNTB (MOVBZreg (ANDN <typ.UInt8> (ADDconst <typ.UInt8> [-1] x) x)))
}
func rewriteValuePPC64_OpCvt32Fto32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32Fto32 x)
// cond:
// result: (MFVSRD (FCTIWZ x))
}
func rewriteValuePPC64_OpCvt32Fto64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32Fto64 x)
// cond:
// result: (MFVSRD (FCTIDZ x))
}
func rewriteValuePPC64_OpCvt32to32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32to32F x)
// cond:
// result: (FCFIDS (MTVSRD (SignExt32to64 x)))
}
func rewriteValuePPC64_OpCvt32to64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32to64F x)
// cond:
// result: (FCFID (MTVSRD (SignExt32to64 x)))
}
func rewriteValuePPC64_OpCvt64Fto32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64Fto32 x)
// cond:
// result: (MFVSRD (FCTIWZ x))
}
func rewriteValuePPC64_OpCvt64Fto64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64Fto64 x)
// cond:
// result: (MFVSRD (FCTIDZ x))
}
func rewriteValuePPC64_OpCvt64to32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64to32F x)
// cond:
// result: (FCFIDS (MTVSRD x))
}
func rewriteValuePPC64_OpCvt64to64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64to64F x)
// cond:
// result: (FCFID (MTVSRD x))
}
func rewriteValuePPC64_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValuePPC64_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (DIVWU (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValuePPC64_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValuePPC64_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (DIVWU (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValuePPC64_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
// result: (Equal (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x y)
// cond:
// result: (Equal (CMPW x y))
}
func rewriteValuePPC64_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32F x y)
// cond:
// result: (Equal (FCMPU x y))
}
func rewriteValuePPC64_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64 x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValuePPC64_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64F x y)
// cond:
// result: (Equal (FCMPU x y))
}
func rewriteValuePPC64_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
// result: (Equal (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (ANDconst [1] (EQV x y))
}
func rewriteValuePPC64_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (EqPtr x y)
// cond:
// result: (Equal (CMP x y))
}
func rewriteValuePPC64_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (GreaterEqual (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValuePPC64_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32 x y)
// cond:
// result: (GreaterEqual (CMPW x y))
}
func rewriteValuePPC64_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32F x y)
// cond:
// result: (FGreaterEqual (FCMPU x y))
}
func rewriteValuePPC64_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq32U x y)
// cond:
// result: (GreaterEqual (CMPWU x y))
}
func rewriteValuePPC64_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64 x y)
// cond:
// result: (GreaterEqual (CMP x y))
}
func rewriteValuePPC64_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64F x y)
// cond:
// result: (FGreaterEqual (FCMPU x y))
}
func rewriteValuePPC64_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Geq64U x y)
// cond:
// result: (GreaterEqual (CMPU x y))
}
func rewriteValuePPC64_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (GreaterEqual (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValuePPC64_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (GreaterThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValuePPC64_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32 x y)
// cond:
// result: (GreaterThan (CMPW x y))
}
func rewriteValuePPC64_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32F x y)
// cond:
// result: (FGreaterThan (FCMPU x y))
}
func rewriteValuePPC64_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater32U x y)
// cond:
// result: (GreaterThan (CMPWU x y))
}
func rewriteValuePPC64_OpGreater64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64 x y)
// cond:
// result: (GreaterThan (CMP x y))
}
func rewriteValuePPC64_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64F x y)
// cond:
// result: (FGreaterThan (FCMPU x y))
}
func rewriteValuePPC64_OpGreater64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Greater64U x y)
// cond:
// result: (GreaterThan (CMPU x y))
}
func rewriteValuePPC64_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (GreaterThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValuePPC64_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsInBounds idx len)
// cond:
// result: (LessThan (CMPU idx len))
}
func rewriteValuePPC64_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsNonNil ptr)
// cond:
// result: (NotEqual (CMPconst [0] ptr))
}
func rewriteValuePPC64_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
// match: (IsSliceInBounds idx len)
// cond:
// result: (LessEqual (CMPU idx len))
}
func rewriteValuePPC64_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (LessEqual (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValuePPC64_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32 x y)
// cond:
// result: (LessEqual (CMPW x y))
}
func rewriteValuePPC64_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32F x y)
// cond:
// result: (FLessEqual (FCMPU x y))
}
func rewriteValuePPC64_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq32U x y)
// cond:
// result: (LessEqual (CMPWU x y))
}
func rewriteValuePPC64_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64 x y)
// cond:
// result: (LessEqual (CMP x y))
}
func rewriteValuePPC64_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64F x y)
// cond:
// result: (FLessEqual (FCMPU x y))
}
func rewriteValuePPC64_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Leq64U x y)
// cond:
// result: (LessEqual (CMPU x y))
}
func rewriteValuePPC64_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (LessEqual (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValuePPC64_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (LessThan (CMPWU (ZeroExt16to32 x) (ZeroExt16to32 y)))
}
func rewriteValuePPC64_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32 x y)
// cond:
// result: (LessThan (CMPW x y))
}
func rewriteValuePPC64_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32F x y)
// cond:
// result: (FLessThan (FCMPU x y))
}
func rewriteValuePPC64_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less32U x y)
// cond:
// result: (LessThan (CMPWU x y))
}
func rewriteValuePPC64_OpLess64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64 x y)
// cond:
// result: (LessThan (CMP x y))
}
func rewriteValuePPC64_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64F x y)
// cond:
// result: (FLessThan (FCMPU x y))
}
func rewriteValuePPC64_OpLess64U_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Less64U x y)
// cond:
// result: (LessThan (CMPU x y))
}
func rewriteValuePPC64_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (LessThan (CMPWU (ZeroExt8to32 x) (ZeroExt8to32 y)))
}
func rewriteValuePPC64_OpLoad_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Load <t> ptr mem)
// cond: (is64BitInt(t) || isPtr(t))
// result: (MOVDload ptr mem)
}
func rewriteValuePPC64_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 x (Const64 [c]))
// cond: uint32(c) < 16
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 x (Const64 [c]))
// cond: uint32(c) < 32
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValuePPC64_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 x (Const64 [c]))
// cond: uint32(c) < 64
// result: (SLDconst x [c])
}
func rewriteValuePPC64_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 x (Const64 [c]))
// cond: uint64(c) < 64
// result: (SLDconst x [c])
}
func rewriteValuePPC64_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValuePPC64_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 x (Const64 [c]))
// cond: uint32(c) < 8
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
// result: (SLWconst x [c])
}
func rewriteValuePPC64_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValuePPC64_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (Mod32 (SignExt16to32 x) (SignExt16to32 y))
}
func rewriteValuePPC64_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (Mod32u (ZeroExt16to32 x) (ZeroExt16to32 y))
}
func rewriteValuePPC64_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (SUB x (MULLW y (DIVW x y)))
}
func rewriteValuePPC64_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (SUB x (MULLW y (DIVWU x y)))
}
func rewriteValuePPC64_OpMod64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64 x y)
// cond:
// result: (SUB x (MULLD y (DIVD x y)))
}
func rewriteValuePPC64_OpMod64u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod64u x y)
// cond:
// result: (SUB x (MULLD y (DIVDU x y)))
}
func rewriteValuePPC64_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (Mod32 (SignExt8to32 x) (SignExt8to32 y))
}
func rewriteValuePPC64_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (Mod32u (ZeroExt8to32 x) (ZeroExt8to32 y))
}
func rewriteValuePPC64_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValuePPC64_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
// result: (NotEqual (CMPW (SignExt16to32 x) (SignExt16to32 y)))
}
func rewriteValuePPC64_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x y)
// cond:
// result: (NotEqual (CMPW x y))
}
func rewriteValuePPC64_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32F x y)
// cond:
// result: (NotEqual (FCMPU x y))
}
func rewriteValuePPC64_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64 x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValuePPC64_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64F x y)
// cond:
// result: (NotEqual (FCMPU x y))
}
func rewriteValuePPC64_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
// result: (NotEqual (CMPW (SignExt8to32 x) (SignExt8to32 y)))
}
func rewriteValuePPC64_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
// match: (NeqPtr x y)
// cond:
// result: (NotEqual (CMP x y))
}
func rewriteValuePPC64_OpOffPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OffPtr [off] ptr)
// cond:
// result: (ADD (MOVDconst <typ.Int64> [off]) ptr)
}
func rewriteValuePPC64_OpPPC64ADD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ADD (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
}
func rewriteValuePPC64_OpPPC64CMP_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMP x (MOVDconst [c]))
// cond: is16Bit(c)
// result: (CMPconst x [c])
}
func rewriteValuePPC64_OpPPC64CMPU_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPU x (MOVDconst [c]))
// cond: isU16Bit(c)
// result: (CMPUconst x [c])
}
func rewriteValuePPC64_OpPPC64CMPW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPW x (MOVWreg y))
// cond:
// result: (CMPW x y)
}
func rewriteValuePPC64_OpPPC64CMPWU_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPWU x (MOVWZreg y))
// cond:
// result: (CMPWU x y)
}
func rewriteValuePPC64_OpPPC64MFVSRD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MFVSRD (FMOVDconst [c]))
// cond:
// result: (MOVDconst [c])
}
func rewriteValuePPC64_OpPPC64MOVBZreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBZreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0xFF
// result: y
}
func rewriteValuePPC64_OpPPC64MOVBreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0x7F
// result: y
}
func rewriteValuePPC64_OpPPC64MOVBstore_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBstore [off] {sym} ptr (SRWconst (MOVHreg x) [c]) mem)
// cond: c <= 8
// result: (MOVBstore [off] {sym} ptr (SRWconst <typ.UInt32> x [c]) mem)
}
func rewriteValuePPC64_OpPPC64MOVBstore_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBstore [i7] {s} p (SRDconst w [56]) x0:(MOVBstore [i6] {s} p (SRDconst w [48]) x1:(MOVBstore [i5] {s} p (SRDconst w [40]) x2:(MOVBstore [i4] {s} p (SRDconst w [32]) x3:(MOVWstore [i0] {s} p w mem)))))
// cond: !config.BigEndian && i0%4 == 0 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3)
// result: (MOVDstore [i0] {s} p w mem)
}
func rewriteValuePPC64_OpPPC64MOVBstoreidx_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBstoreidx ptr (MOVDconst [c]) val mem)
// cond: is16Bit(c)
// result: (MOVBstore [c] ptr val mem)
}
func rewriteValuePPC64_OpPPC64MOVBstoreidx_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBstoreidx [off] {sym} ptr idx (SRWconst (MOVWreg x) [c]) mem)
// cond: c <= 24
// result: (MOVBstoreidx [off] {sym} ptr idx (SRWconst <typ.UInt32> x [c]) mem)
}
func rewriteValuePPC64_OpPPC64MOVHZreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVHZreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0xFFFF
// result: y
}
func rewriteValuePPC64_OpPPC64MOVHreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVHreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0x7FFF
// result: y
}
func rewriteValuePPC64_OpPPC64MOVHstore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (MOVHstore [off1] {sym} (ADDconst [off2] x) val mem)
// cond: is16Bit(off1+off2)
// result: (MOVHstore [off1+off2] {sym} x val mem)
}
func rewriteValuePPC64_OpPPC64MOVWZreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVWZreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0xFFFFFFFF
// result: y
}
func rewriteValuePPC64_OpPPC64MOVWreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVWreg y:(ANDconst [c] _))
// cond: uint64(c) <= 0xFFFF
// result: y
}
func rewriteValuePPC64_OpPPC64MTVSRD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MTVSRD (MOVDconst [c]))
// cond:
// result: (FMOVDconst [c])
}
func rewriteValuePPC64_OpPPC64OR_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
}
func rewriteValuePPC64_OpPPC64OR_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR x (MOVDconst [c]))
// cond: isU32Bit(c)
// result: (ORconst [c] x)
}
func rewriteValuePPC64_OpPPC64OR_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> s0:(SLWconst x0:(MOVBZload [i1] {s} p mem) [n1]) s1:(SLWconst x1:(MOVBZload [i0] {s} p mem) [n2]))
// cond: !config.BigEndian && i1 == i0+1 && n1%8 == 0 && n2 == n1+8 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1)
// result: @mergePoint(b,x0,x1) (SLDconst <t> (MOVHBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [n1])
}
func rewriteValuePPC64_OpPPC64OR_30(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> o0:(OR <t> s0:(SLDconst x1:(MOVBZload [i2] {s} p mem) [16]) x0:(MOVHZload [i0] {s} p mem)) s1:(SLDconst x2:(MOVBZload [i3] {s} p mem) [24]))
// cond: !config.BigEndian && i2 == i0+2 && i3 == i0+3 && x0.Uses ==1 && x1.Uses == 1 && x2.Uses == 1 && o0.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (MOVWZload <t> {s} [i0] p mem)
}
func rewriteValuePPC64_OpPPC64OR_40(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> x0:(MOVBZload [i3] {s} p mem) o0:(OR <t> s0:(SLWconst x1:(MOVBZload [i2] {s} p mem) [8]) s1:(SLWconst x2:(MOVHBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [16])))
// cond: !config.BigEndian && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && o0.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem)
}
func rewriteValuePPC64_OpPPC64OR_50(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> o0:(OR <t> s1:(SLDconst x1:(MOVBZload [i2] {s} p mem) [40]) s0:(SLDconst x0:(MOVHBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [48])) s2:(SLDconst x2:(MOVBZload [i3] {s} p mem) [32]))
// cond: !config.BigEndian && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && o0.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && s2.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(s0) && clobber(s1) && clobber(s2) && clobber(o0)
// result: @mergePoint(b,x0,x1,x2) (SLDconst <t> (MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [32])
}
func rewriteValuePPC64_OpPPC64OR_60(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (OR <t> s6:(SLDconst x7:(MOVBZload [i7] {s} p mem) [56]) o5:(OR <t> o4:(OR <t> s4:(SLDconst x5:(MOVBZload [i5] {s} p mem) [40]) o3:(OR <t> s3:(SLDconst x4:(MOVBZload [i4] {s} p mem) [32]) x0:(MOVWZload {s} [i0] p mem))) s5:(SLDconst x6:(MOVBZload [i6] {s} p mem) [48])))
// cond: !config.BigEndian && i0%4 == 0 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses ==1 && x7.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s3.Uses == 1 && s4.Uses == 1 && s5.Uses == 1 && s6.Uses == 1 && mergePoint(b, x0, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(s3) && clobber(s4) && clobber(s5) && clobber (s6) && clobber(o3) && clobber(o4) && clobber(o5)
// result: @mergePoint(b,x0,x4,x5,x6,x7) (MOVDload <t> {s} [i0] p mem)
}
func rewriteValuePPC64_OpPPC64OR_70(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> o5:(OR <t> o4:(OR <t> o3:(OR <t> s3:(SLDconst x4:(MOVBZload [i4] {s} p mem) [32]) x0:(MOVWZload {s} [i0] p mem)) s4:(SLDconst x5:(MOVBZload [i5] {s} p mem) [40])) s5:(SLDconst x6:(MOVBZload [i6] {s} p mem) [48])) s6:(SLDconst x7:(MOVBZload [i7] {s} p mem) [56]))
// cond: !config.BigEndian && i0%4 == 0 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses ==1 && x7.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s3.Uses == 1 && s4.Uses == 1 && s5.Uses == 1 && s6.Uses == 1 && mergePoint(b, x0, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(s3) && clobber(s4) && clobber(s5) && clobber (s6) && clobber(o3) && clobber(o4) && clobber(o5)
// result: @mergePoint(b,x0,x4,x5,x6,x7) (MOVDload <t> {s} [i0] p mem)
}
func rewriteValuePPC64_OpPPC64OR_80(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> o0:(OR <t> s1:(SLDconst x1:(MOVBZload [i1] {s} p mem) [48]) o1:(OR <t> s2:(SLDconst x2:(MOVBZload [i2] {s} p mem) [40]) o2:(OR <t> s3:(SLDconst x3:(MOVBZload [i3] {s} p mem) [32]) x4:(MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i4] p) mem)))) s0:(SLDconst x0:(MOVBZload [i0] {s} p mem) [56]))
// cond: !config.BigEndian && i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(s0) && clobber(s1) && clobber(s2) && clobber(s3)
// result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem)
}
func rewriteValuePPC64_OpPPC64OR_90(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> x7:(MOVBZload [i7] {s} p mem) o5:(OR <t> s6:(SLDconst x6:(MOVBZload [i6] {s} p mem) [8]) o4:(OR <t> o3:(OR <t> s4:(SLDconst x4:(MOVBZload [i4] {s} p mem) [24]) s0:(SLWconst x3:(MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [32])) s5:(SLDconst x5:(MOVBZload [i5] {s} p mem) [16]))))
// cond: !config.BigEndian && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && s4.Uses == 1 && s5.Uses == 1 && s6.Uses == 1 && mergePoint(b, x3, x4, x5, x6, x7) != nil && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) && clobber(s4) && clobber(s5) && clobber(s6)
// result: @mergePoint(b,x3,x4,x5,x6,x7) (MOVDBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem)
}
func rewriteValuePPC64_OpPPC64OR_100(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> o5:(OR <t> o4:(OR <t> s5:(SLDconst x5:(MOVBZload [i5] {s} p mem) [16]) o3:(OR <t> s4:(SLDconst x4:(MOVBZload [i4] {s} p mem) [24]) s0:(SLWconst x3:(MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [32]))) s6:(SLDconst x6:(MOVBZload [i6] {s} p mem) [8])) x7:(MOVBZload [i7] {s} p mem))
// cond: !config.BigEndian && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && s4.Uses == 1 && s5.Uses == 1 && s6.Uses == 1 && mergePoint(b, x3, x4, x5, x6, x7) != nil && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) && clobber(s4) && clobber(s5) && clobber(s6)
// result: @mergePoint(b,x3,x4,x5,x6,x7) (MOVDBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem)
}
func rewriteValuePPC64_OpPPC64OR_110(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (OR <t> x7:(MOVBZload [i7] {s} p mem) o5:(OR <t> o4:(OR <t> o3:(OR <t> s4:(SLDconst x4:(MOVBZload [i4] {s} p mem) [24]) s0:(SLDconst x3:(MOVWBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem) [32])) s5:(SLDconst x5:(MOVBZload [i5] {s} p mem) [16])) s6:(SLDconst x6:(MOVBZload [i6] {s} p mem) [8])))
// cond: !config.BigEndian && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && s4.Uses == 1 && s5.Uses == 1 && s6.Uses == 1 && mergePoint(b, x3, x4, x5, x6, x7) != nil && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) && clobber(s4) && clobber(s5) && clobber(s6)
// result: @mergePoint(b,x3,x4,x5,x6,x7) (MOVDBRload <t> (MOVDaddr <typ.Uintptr> [i0] {s} p) mem)
}
func rewriteValuePPC64_OpPPC64XOR_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (XOR (SLDconst x [c]) (SRDconst x [d]))
// cond: d == 64-c
// result: (ROTLconst [c] x)
}
func rewriteValuePPC64_OpPopCount16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount16 x)
// cond:
// result: (POPCNTW (MOVHZreg x))
}
func rewriteValuePPC64_OpPopCount32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount32 x)
// cond:
// result: (POPCNTW (MOVWZreg x))
}
func rewriteValuePPC64_OpPopCount8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount8 x)
// cond:
// result: (POPCNTB (MOVBZreg x))
}
func rewriteValuePPC64_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValuePPC64_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 x (Const64 [c]))
// cond: uint32(c) < 16
// result: (SRWconst (ZeroExt16to32 x) [c])
}
func rewriteValuePPC64_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint64(c) < 16
// result: (SRWconst (ZeroExt16to32 x) [c])
}
func rewriteValuePPC64_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValuePPC64_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValuePPC64_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x (Const64 [c]))
// cond: uint32(c) < 16
// result: (SRAWconst (SignExt16to32 x) [c])
}
func rewriteValuePPC64_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
// result: (SRAWconst (SignExt16to32 x) [c])
}
func rewriteValuePPC64_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValuePPC64_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValuePPC64_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 x (Const64 [c]))
// cond: uint32(c) < 32
// result: (SRWconst x [c])
}
func rewriteValuePPC64_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 x (Const64 [c]))
// cond: uint64(c) < 32
// result: (SRWconst x [c])
}
func rewriteValuePPC64_OpRsh32Ux64_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 x y)
// cond:
// result: (SRW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
}
func rewriteValuePPC64_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValuePPC64_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValuePPC64_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x32 x (Const64 [c]))
// cond: uint32(c) < 32
// result: (SRAWconst x [c])
}
func rewriteValuePPC64_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
// result: (SRAWconst x [c])
}
func rewriteValuePPC64_OpRsh32x64_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 x y)
// cond:
// result: (SRAW x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-32] y))))
}
func rewriteValuePPC64_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValuePPC64_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValuePPC64_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 x (Const64 [c]))
// cond: uint32(c) < 64
// result: (SRDconst x [c])
}
func rewriteValuePPC64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 x (Const64 [c]))
// cond: uint64(c) < 64
// result: (SRDconst x [c])
}
func rewriteValuePPC64_OpRsh64Ux64_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 x y)
// cond:
// result: (SRD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
}
func rewriteValuePPC64_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValuePPC64_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValuePPC64_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x32 x (Const64 [c]))
// cond: uint32(c) < 64
// result: (SRADconst x [c])
}
func rewriteValuePPC64_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 x (Const64 [c]))
// cond: uint64(c) < 64
// result: (SRADconst x [c])
}
func rewriteValuePPC64_OpRsh64x64_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 x y)
// cond:
// result: (SRAD x (ORN y <typ.Int64> (MaskIfNotCarry (ADDconstForCarry [-64] y))))
}
func rewriteValuePPC64_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValuePPC64_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValuePPC64_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 x (Const64 [c]))
// cond: uint32(c) < 8
// result: (SRWconst (ZeroExt8to32 x) [c])
}
func rewriteValuePPC64_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint64(c) < 8
// result: (SRWconst (ZeroExt8to32 x) [c])
}
func rewriteValuePPC64_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValuePPC64_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValuePPC64_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x (Const64 [c]))
// cond: uint32(c) < 8
// result: (SRAWconst (SignExt8to32 x) [c])
}
func rewriteValuePPC64_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
// result: (SRAWconst (SignExt8to32 x) [c])
}
func rewriteValuePPC64_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValuePPC64_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRADconst (NEG <t> x) [63])
}
func rewriteValuePPC64_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValuePPC64_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Zero [12] {t} destptr mem)
// cond: t.(*types.Type).Alignment()%4 == 0
// result: (MOVWstorezero [8] destptr (MOVDstorezero [0] destptr mem))
}
func rewriteValueS390X_OpAtomicAdd32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAdd32 ptr val mem)
// cond:
// result: (AddTupleFirst32 val (LAA ptr val mem))
}
func rewriteValueS390X_OpAtomicAdd64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AtomicAdd64 ptr val mem)
// cond:
// result: (AddTupleFirst64 val (LAAG ptr val mem))
}
func rewriteValueS390X_OpAvg64u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Avg64u <t> x y)
// cond:
// result: (ADD (SRDconst <t> (SUB <t> x y) [1]) y)
}
func rewriteValueS390X_OpBitLen64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen64 x)
// cond:
// result: (SUB (MOVDconst [64]) (FLOGR x))
}
func rewriteValueS390X_OpCtz32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz32 <t> x)
// cond:
// result: (SUB (MOVDconst [64]) (FLOGR (MOVWZreg (ANDW <t> (SUBWconst <t> [1] x) (NOTW <t> x)))))
}
func rewriteValueS390X_OpCtz64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz64 <t> x)
// cond:
// result: (SUB (MOVDconst [64]) (FLOGR (AND <t> (SUBconst <t> [1] x) (NOT <t> x))))
}
func rewriteValueS390X_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (DIVW (MOVHreg x) (MOVHreg y))
}
func rewriteValueS390X_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (DIVWU (MOVHZreg x) (MOVHZreg y))
}
func rewriteValueS390X_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 x y)
// cond:
// result: (DIVW (MOVWreg x) y)
}
func rewriteValueS390X_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (DIVWU (MOVWZreg x) y)
}
func rewriteValueS390X_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (DIVW (MOVBreg x) (MOVBreg y))
}
func rewriteValueS390X_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (DIVWU (MOVBZreg x) (MOVBZreg y))
}
func rewriteValueS390X_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32 x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32F x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (FCMPS x y))
}
func rewriteValueS390X_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq64 x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpEq64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq64F x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (FCMP x y))
}
func rewriteValueS390X_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpEqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqB x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpEqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqPtr x y)
// cond:
// result: (MOVDEQ (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVHZreg x) (MOVHZreg y)))
}
func rewriteValueS390X_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32 x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32F x y)
// cond:
// result: (MOVDGEnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMPS x y))
}
func rewriteValueS390X_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32U x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPWU x y))
}
func rewriteValueS390X_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64 x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpGeq64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64F x y)
// cond:
// result: (MOVDGEnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMP x y))
}
func rewriteValueS390X_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64U x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPU x y))
}
func rewriteValueS390X_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (MOVDGE (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVBZreg x) (MOVBZreg y)))
}
func rewriteValueS390X_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVHZreg x) (MOVHZreg y)))
}
func rewriteValueS390X_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32 x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32F x y)
// cond:
// result: (MOVDGTnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMPS x y))
}
func rewriteValueS390X_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32U x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPWU x y))
}
func rewriteValueS390X_OpGreater64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater64 x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpGreater64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater64F x y)
// cond:
// result: (MOVDGTnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMP x y))
}
func rewriteValueS390X_OpGreater64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater64U x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPU x y))
}
func rewriteValueS390X_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (MOVDGT (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVBZreg x) (MOVBZreg y)))
}
func rewriteValueS390X_OpHmul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32 x y)
// cond:
// result: (SRDconst [32] (MULLD (MOVWreg x) (MOVWreg y)))
}
func rewriteValueS390X_OpHmul32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Hmul32u x y)
// cond:
// result: (SRDconst [32] (MULLD (MOVWZreg x) (MOVWZreg y)))
}
func rewriteValueS390X_OpIsInBounds_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsInBounds idx len)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPU idx len))
}
func rewriteValueS390X_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsNonNil p)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMPconst p [0]))
}
func rewriteValueS390X_OpIsSliceInBounds_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsSliceInBounds idx len)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPU idx len))
}
func rewriteValueS390X_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVHZreg x) (MOVHZreg y)))
}
func rewriteValueS390X_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32 x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32F x y)
// cond:
// result: (MOVDGEnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMPS y x))
}
func rewriteValueS390X_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32U x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPWU x y))
}
func rewriteValueS390X_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64 x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpLeq64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64F x y)
// cond:
// result: (MOVDGEnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMP y x))
}
func rewriteValueS390X_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64U x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPU x y))
}
func rewriteValueS390X_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (MOVDLE (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVBZreg x) (MOVBZreg y)))
}
func rewriteValueS390X_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVHZreg x) (MOVHZreg y)))
}
func rewriteValueS390X_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32 x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32F x y)
// cond:
// result: (MOVDGTnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMPS y x))
}
func rewriteValueS390X_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32U x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPWU x y))
}
func rewriteValueS390X_OpLess64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less64 x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpLess64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less64F x y)
// cond:
// result: (MOVDGTnoinv (MOVDconst [0]) (MOVDconst [1]) (FCMP y x))
}
func rewriteValueS390X_OpLess64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less64U x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPU x y))
}
func rewriteValueS390X_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (MOVDLT (MOVDconst [0]) (MOVDconst [1]) (CMPWU (MOVBZreg x) (MOVBZreg y)))
}
func rewriteValueS390X_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValueS390X_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValueS390X_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValueS390X_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SLD x y)
}
func rewriteValueS390X_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x64 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SLW x y)
}
func rewriteValueS390X_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (MODW (MOVHreg x) (MOVHreg y))
}
func rewriteValueS390X_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (MODWU (MOVHZreg x) (MOVHZreg y))
}
func rewriteValueS390X_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (MODW (MOVWreg x) y)
}
func rewriteValueS390X_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (MODWU (MOVWZreg x) y)
}
func rewriteValueS390X_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (MODW (MOVBreg x) (MOVBreg y))
}
func rewriteValueS390X_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (MODWU (MOVBZreg x) (MOVBZreg y))
}
func rewriteValueS390X_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueS390X_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [7] dst src mem)
// cond:
// result: (MOVBstore [6] dst (MOVBZload [6] src mem) (MOVHstore [4] dst (MOVHZload [4] src mem) (MOVWstore dst (MOVWZload src mem) mem)))
}
func rewriteValueS390X_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVHreg x) (MOVHreg y)))
}
func rewriteValueS390X_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32 x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMPW x y))
}
func rewriteValueS390X_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32F x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (FCMPS x y))
}
func rewriteValueS390X_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq64 x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpNeq64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq64F x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (FCMP x y))
}
func rewriteValueS390X_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpNeqB_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqB x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMPW (MOVBreg x) (MOVBreg y)))
}
func rewriteValueS390X_OpNeqPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqPtr x y)
// cond:
// result: (MOVDNE (MOVDconst [0]) (MOVDconst [1]) (CMP x y))
}
func rewriteValueS390X_OpOffPtr_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OffPtr [off] ptr:(SP))
// cond:
// result: (MOVDaddr [off] ptr)
}
func rewriteValueS390X_OpPopCount16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount16 x)
// cond:
// result: (MOVBZreg (SumBytes2 (POPCNT <typ.UInt16> x)))
}
func rewriteValueS390X_OpPopCount32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount32 x)
// cond:
// result: (MOVBZreg (SumBytes4 (POPCNT <typ.UInt32> x)))
}
func rewriteValueS390X_OpPopCount64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount64 x)
// cond:
// result: (MOVBZreg (SumBytes8 (POPCNT <typ.UInt64> x)))
}
func rewriteValueS390X_OpPopCount8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (PopCount8 x)
// cond:
// result: (POPCNT (MOVBZreg x))
}
func rewriteValueS390X_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValueS390X_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValueS390X_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValueS390X_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVHZreg x) y)
}
func rewriteValueS390X_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValueS390X_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValueS390X_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValueS390X_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVHreg x) y)
}
func rewriteValueS390X_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValueS390X_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValueS390X_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValueS390X_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW x y)
}
func rewriteValueS390X_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValueS390X_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValueS390X_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValueS390X_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW x y)
}
func rewriteValueS390X_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValueS390X_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValueS390X_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValueS390X_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRD x y)
}
func rewriteValueS390X_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValueS390X_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValueS390X_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValueS390X_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAD x y)
}
func rewriteValueS390X_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValueS390X_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValueS390X_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValueS390X_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 x y)
// cond: shiftIsBounded(v)
// result: (SRW (MOVBZreg x) y)
}
func rewriteValueS390X_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValueS390X_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValueS390X_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValueS390X_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond: shiftIsBounded(v)
// result: (SRAW (MOVBreg x) y)
}
func rewriteValueS390X_OpS390XADDload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ADDload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (ADD x (LGDR <t> y))
}
func rewriteValueS390X_OpS390XAND_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (AND x (MOVDconst [c]))
// cond: is32Bit(c) && c < 0
// result: (ANDconst [c] x)
}
func rewriteValueS390X_OpS390XANDload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ANDload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (AND x (LGDR <t> y))
}
func rewriteValueS390X_OpS390XCMP_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMP x (MOVDconst [c]))
// cond: is32Bit(c)
// result: (CMPconst x [c])
}
func rewriteValueS390X_OpS390XCMPU_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPU x (MOVDconst [c]))
// cond: isU32Bit(c)
// result: (CMPUconst x [int64(int32(c))])
}
func rewriteValueS390X_OpS390XCMPW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPW x (MOVDconst [c]))
// cond:
// result: (CMPWconst x [int64(int32(c))])
}
func rewriteValueS390X_OpS390XCMPWU_0(v *Value) bool {
b := v.Block
- _ = b
// match: (CMPWU x (MOVDconst [c]))
// cond:
// result: (CMPWUconst x [int64(int32(c))])
}
func rewriteValueS390X_OpS390XLDGR_0(v *Value) bool {
b := v.Block
- _ = b
// match: (LDGR <t> (SRDconst [1] (SLDconst [1] x)))
// cond:
// result: (LPDFR (LDGR <t> x))
}
func rewriteValueS390X_OpS390XMOVBZreg_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBZreg x:(MOVBZreg _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVBreg_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVBreg x:(MOVBload _ _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVDnop_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVDnop <t> x)
// cond: t.Compare(x.Type) == types.CMPeq
// result: x
}
func rewriteValueS390X_OpS390XMOVDnop_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVDnop <t> x:(MOVBloadidx [off] {sym} ptr idx mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBloadidx <t> [off] {sym} ptr idx mem)
}
func rewriteValueS390X_OpS390XMOVDreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVDreg <t> x)
// cond: t.Compare(x.Type) == types.CMPeq
// result: x
}
func rewriteValueS390X_OpS390XMOVDreg_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVDreg <t> x:(MOVBZloadidx [off] {sym} ptr idx mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVBZloadidx <t> [off] {sym} ptr idx mem)
}
func rewriteValueS390X_OpS390XMOVHZreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHZreg x:(MOVBZload _ _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVHZreg_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVHZreg x:(MOVHloadidx [off] {sym} ptr idx mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVHZloadidx <v.Type> [off] {sym} ptr idx mem)
}
func rewriteValueS390X_OpS390XMOVHreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVHreg x:(MOVBload _ _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVHreg_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVHreg x:(MOVHload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVHload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueS390X_OpS390XMOVHstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVHstoreconst [sc] {s} (ADDconst [off] ptr) mem)
// cond: isU12Bit(ValAndOff(sc).Off()+off)
// result: (MOVHstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValueS390X_OpS390XMOVWZreg_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWZreg x:(MOVBZload _ _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVWZreg_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWZreg x:(MOVWload [off] {sym} ptr mem))
// cond: x.Uses == 1 && clobber(x)
// result: @x.Block (MOVWZload <v.Type> [off] {sym} ptr mem)
}
func rewriteValueS390X_OpS390XMOVWreg_10(v *Value) bool {
b := v.Block
- _ = b
// match: (MOVWreg x:(MOVWreg _))
// cond:
// result: (MOVDreg x)
}
func rewriteValueS390X_OpS390XMOVWstoreconst_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem)
// cond: isU12Bit(ValAndOff(sc).Off()+off)
// result: (MOVWstoreconst [ValAndOff(sc).add(off)] {s} ptr mem)
}
func rewriteValueS390X_OpS390XMULLDconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLDconst [-1] x)
// cond:
// result: (NEG x)
}
func rewriteValueS390X_OpS390XMULLDload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLDload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (MULLD x (LGDR <t> y))
}
func rewriteValueS390X_OpS390XMULLWconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (MULLWconst [-1] x)
// cond:
// result: (NEGW x)
}
func rewriteValueS390X_OpS390XNOT_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NOT x)
// cond: true
// result: (XOR (MOVDconst [-1]) x)
}
func rewriteValueS390X_OpS390XOR_0(v *Value) bool {
b := v.Block
- _ = b
// match: (OR x (MOVDconst [c]))
// cond: isU32Bit(c)
// result: (ORconst [c] x)
}
func rewriteValueS390X_OpS390XOR_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR (MOVDconst [c]) (MOVDconst [d]))
// cond:
// result: (MOVDconst [c|d])
}
func rewriteValueS390X_OpS390XOR_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR sh:(SLDconst [16] x0:(MOVHZload [i0] {s} p mem)) x1:(MOVHZload [i1] {s} p mem))
// cond: i1 == i0+2 && p.Op != OpSB && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWZload [i0] {s} p mem)
}
func rewriteValueS390X_OpS390XOR_30(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR y s1:(SLDconst [j1] x1:(MOVHZload [i1] {s} p mem))) s0:(SLDconst [j0] x0:(MOVHZload [i0] {s} p mem)))
// cond: i1 == i0+2 && j1 == j0-16 && j1 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j1] (MOVWZload [i0] {s} p mem)) y)
}
func rewriteValueS390X_OpS390XOR_40(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR x1:(MOVHZloadidx [i1] {s} idx p mem) sh:(SLDconst [16] x0:(MOVHZloadidx [i0] {s} p idx mem)))
// cond: i1 == i0+2 && p.Op != OpSB && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWZloadidx [i0] {s} p idx mem)
}
func rewriteValueS390X_OpS390XOR_50(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR x1:(MOVWZloadidx [i1] {s} idx p mem) sh:(SLDconst [32] x0:(MOVWZloadidx [i0] {s} idx p mem)))
// cond: i1 == i0+4 && p.Op != OpSB && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVDloadidx [i0] {s} p idx mem)
}
func rewriteValueS390X_OpS390XOR_60(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR s0:(SLDconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)) or:(OR y s1:(SLDconst [j1] x1:(MOVBZloadidx [i1] {s} p idx mem))))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j1] (MOVHZloadidx [i0] {s} p idx mem)) y)
}
func rewriteValueS390X_OpS390XOR_70(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR y s1:(SLDconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem))) s0:(SLDconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j1] (MOVHZloadidx [i0] {s} p idx mem)) y)
}
func rewriteValueS390X_OpS390XOR_80(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR s1:(SLDconst [j1] x1:(MOVHZloadidx [i1] {s} idx p mem)) y) s0:(SLDconst [j0] x0:(MOVHZloadidx [i0] {s} p idx mem)))
// cond: i1 == i0+2 && j1 == j0-16 && j1 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j1] (MOVWZloadidx [i0] {s} p idx mem)) y)
}
func rewriteValueS390X_OpS390XOR_90(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR sh:(SLDconst [16] r1:(MOVHZreg x1:(MOVHBRload [i1] {s} p mem))) r0:(MOVHZreg x0:(MOVHBRload [i0] {s} p mem)))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWZreg (MOVWBRload [i0] {s} p mem))
}
func rewriteValueS390X_OpS390XOR_100(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR y s0:(SLDconst [j0] r0:(MOVHZreg x0:(MOVHBRload [i0] {s} p mem)))) s1:(SLDconst [j1] r1:(MOVHZreg x1:(MOVHBRload [i1] {s} p mem))))
// cond: i1 == i0+2 && j1 == j0+16 && j0 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j0] (MOVWZreg (MOVWBRload [i0] {s} p mem))) y)
}
func rewriteValueS390X_OpS390XOR_110(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR r0:(MOVHZreg x0:(MOVHBRloadidx [i0] {s} idx p mem)) sh:(SLDconst [16] r1:(MOVHZreg x1:(MOVHBRloadidx [i1] {s} p idx mem))))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWZreg (MOVWBRloadidx [i0] {s} p idx mem))
}
func rewriteValueS390X_OpS390XOR_120(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR r0:(MOVWZreg x0:(MOVWBRloadidx [i0] {s} idx p mem)) sh:(SLDconst [32] r1:(MOVWZreg x1:(MOVWBRloadidx [i1] {s} idx p mem))))
// cond: i1 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVDBRloadidx [i0] {s} p idx mem)
}
func rewriteValueS390X_OpS390XOR_130(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR s1:(SLDconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)) or:(OR y s0:(SLDconst [j0] x0:(MOVBZloadidx [i0] {s} p idx mem))))
// cond: p.Op != OpSB && i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j0] (MOVHZreg (MOVHBRloadidx [i0] {s} p idx mem))) y)
}
func rewriteValueS390X_OpS390XOR_140(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR y s0:(SLDconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem))) s1:(SLDconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)))
// cond: p.Op != OpSB && i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j0] (MOVHZreg (MOVHBRloadidx [i0] {s} p idx mem))) y)
}
func rewriteValueS390X_OpS390XOR_150(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (OR or:(OR s0:(SLDconst [j0] r0:(MOVHZreg x0:(MOVHBRloadidx [i0] {s} idx p mem))) y) s1:(SLDconst [j1] r1:(MOVHZreg x1:(MOVHBRloadidx [i1] {s} p idx mem))))
// cond: i1 == i0+2 && j1 == j0+16 && j0 % 32 == 0 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (OR <v.Type> (SLDconst <v.Type> [j0] (MOVWZreg (MOVWBRloadidx [i0] {s} p idx mem))) y)
}
func rewriteValueS390X_OpS390XORW_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW <t> g:(MOVWZload [off] {sym} ptr mem) x)
// cond: ptr.Op != OpSB && is20Bit(off) && canMergeLoadClobber(v, g, x) && clobber(g)
// result: (ORWload <t> [off] {sym} x ptr mem)
}
func rewriteValueS390X_OpS390XORW_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW or:(ORW y s1:(SLWconst [j1] x1:(MOVBZload [i1] {s} p mem))) s0:(SLWconst [j0] x0:(MOVBZload [i0] {s} p mem)))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j1] (MOVHZload [i0] {s} p mem)) y)
}
func rewriteValueS390X_OpS390XORW_30(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW x1:(MOVHZloadidx [i1] {s} idx p mem) sh:(SLWconst [16] x0:(MOVHZloadidx [i0] {s} p idx mem)))
// cond: i1 == i0+2 && p.Op != OpSB && x0.Uses == 1 && x1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWZloadidx [i0] {s} p idx mem)
}
func rewriteValueS390X_OpS390XORW_40(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW s0:(SLWconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)) or:(ORW s1:(SLWconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)) y))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j1] (MOVHZloadidx [i0] {s} p idx mem)) y)
}
func rewriteValueS390X_OpS390XORW_50(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW or:(ORW s1:(SLWconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)) y) s0:(SLWconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)))
// cond: i1 == i0+1 && j1 == j0-8 && j1 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j1] (MOVHZloadidx [i0] {s} p idx mem)) y)
}
func rewriteValueS390X_OpS390XORW_60(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW or:(ORW y s0:(SLWconst [j0] x0:(MOVBZload [i0] {s} p mem))) s1:(SLWconst [j1] x1:(MOVBZload [i1] {s} p mem)))
// cond: p.Op != OpSB && i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j0] (MOVHZreg (MOVHBRload [i0] {s} p mem))) y)
}
func rewriteValueS390X_OpS390XORW_70(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW r0:(MOVHZreg x0:(MOVHBRloadidx [i0] {s} idx p mem)) sh:(SLWconst [16] r1:(MOVHZreg x1:(MOVHBRloadidx [i1] {s} p idx mem))))
// cond: i1 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && r0.Uses == 1 && r1.Uses == 1 && sh.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(r0) && clobber(r1) && clobber(sh)
// result: @mergePoint(b,x0,x1) (MOVWBRloadidx [i0] {s} p idx mem)
}
func rewriteValueS390X_OpS390XORW_80(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW s1:(SLWconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)) or:(ORW s0:(SLWconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)) y))
// cond: p.Op != OpSB && i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j0] (MOVHZreg (MOVHBRloadidx [i0] {s} p idx mem))) y)
}
func rewriteValueS390X_OpS390XORW_90(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ORW or:(ORW s0:(SLWconst [j0] x0:(MOVBZloadidx [i0] {s} idx p mem)) y) s1:(SLWconst [j1] x1:(MOVBZloadidx [i1] {s} idx p mem)))
// cond: p.Op != OpSB && i1 == i0+1 && j1 == j0+8 && j0 % 16 == 0 && x0.Uses == 1 && x1.Uses == 1 && s0.Uses == 1 && s1.Uses == 1 && or.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(s0) && clobber(s1) && clobber(or)
// result: @mergePoint(b,x0,x1) (ORW <v.Type> (SLWconst <v.Type> [j0] (MOVHZreg (MOVHBRloadidx [i0] {s} p idx mem))) y)
}
func rewriteValueS390X_OpS390XORload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (ORload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (OR x (LGDR <t> y))
}
func rewriteValueS390X_OpS390XSLD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SLD x (MOVDconst [c]))
// cond:
// result: (SLDconst x [c&63])
}
func rewriteValueS390X_OpS390XSLW_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SLW x (MOVDconst [c]))
// cond:
// result: (SLWconst x [c&63])
}
func rewriteValueS390X_OpS390XSRAD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SRAD x (MOVDconst [c]))
// cond:
// result: (SRADconst x [c&63])
}
func rewriteValueS390X_OpS390XSRAW_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SRAW x (MOVDconst [c]))
// cond:
// result: (SRAWconst x [c&63])
}
func rewriteValueS390X_OpS390XSRD_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SRD x (MOVDconst [c]))
// cond:
// result: (SRDconst x [c&63])
}
func rewriteValueS390X_OpS390XSRDconst_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SRDconst [1] (SLDconst [1] (LGDR <t> x)))
// cond:
// result: (LGDR <t> (LPDFR <x.Type> x))
}
func rewriteValueS390X_OpS390XSRW_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SRW x (MOVDconst [c]))
// cond:
// result: (SRWconst x [c&63])
}
func rewriteValueS390X_OpS390XSUB_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUB x (MOVDconst [c]))
// cond: is32Bit(c)
// result: (SUBconst x [c])
}
func rewriteValueS390X_OpS390XSUBW_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBW x (MOVDconst [c]))
// cond:
// result: (SUBWconst x [int64(int32(c))])
}
func rewriteValueS390X_OpS390XSUBload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (SUBload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (SUB x (LGDR <t> y))
}
func rewriteValueS390X_OpS390XSumBytes2_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SumBytes2 x)
// cond:
// result: (ADDW (SRWconst <typ.UInt8> x [8]) x)
}
func rewriteValueS390X_OpS390XSumBytes4_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SumBytes4 x)
// cond:
// result: (SumBytes2 (ADDW <typ.UInt16> (SRWconst <typ.UInt16> x [16]) x))
}
func rewriteValueS390X_OpS390XSumBytes8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SumBytes8 x)
// cond:
// result: (SumBytes4 (ADDW <typ.UInt32> (SRDconst <typ.UInt32> x [32]) x))
}
func rewriteValueS390X_OpS390XXORload_0(v *Value) bool {
b := v.Block
- _ = b
// match: (XORload <t> [off] {sym} x ptr1 (FMOVDstore [off] {sym} ptr2 y _))
// cond: isSamePtr(ptr1, ptr2)
// result: (XOR x (LGDR <t> y))
}
func rewriteValueS390X_OpSelect0_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Select0 <t> (AddTupleFirst32 val tuple))
// cond:
// result: (ADDW val (Select0 <t> tuple))
}
func rewriteValueS390X_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Slicemask <t> x)
// cond:
// result: (SRADconst (NEG <t> x) [63])
}
func rewriteValueS390X_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueS390X_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Zero [s] destptr mem)
// cond: s > 1024
// result: (LoweredZero [s%256] destptr (ADDconst <destptr.Type> destptr [(s/256)*256]) mem)
}
func rewriteValueWasm_OpCom16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com16 x)
// cond:
// result: (I64Xor x (I64Const [-1]))
}
func rewriteValueWasm_OpCom32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com32 x)
// cond:
// result: (I64Xor x (I64Const [-1]))
}
func rewriteValueWasm_OpCom64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com64 x)
// cond:
// result: (I64Xor x (I64Const [-1]))
}
func rewriteValueWasm_OpCom8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com8 x)
// cond:
// result: (I64Xor x (I64Const [-1]))
}
func rewriteValueWasm_OpCvt32Uto32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32Uto32F x)
// cond:
// result: (LoweredRound32F (F64ConvertI64U (ZeroExt32to64 x)))
}
func rewriteValueWasm_OpCvt32Uto64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32Uto64F x)
// cond:
// result: (F64ConvertI64U (ZeroExt32to64 x))
}
func rewriteValueWasm_OpCvt32to32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32to32F x)
// cond:
// result: (LoweredRound32F (F64ConvertI64S (SignExt32to64 x)))
}
func rewriteValueWasm_OpCvt32to64F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt32to64F x)
// cond:
// result: (F64ConvertI64S (SignExt32to64 x))
}
func rewriteValueWasm_OpCvt64Uto32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64Uto32F x)
// cond:
// result: (LoweredRound32F (F64ConvertI64U x))
}
func rewriteValueWasm_OpCvt64to32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Cvt64to32F x)
// cond:
// result: (LoweredRound32F (F64ConvertI64S x))
}
func rewriteValueWasm_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 x y)
// cond:
// result: (I64DivS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u x y)
// cond:
// result: (I64DivU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 x y)
// cond:
// result: (I64DivS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u x y)
// cond:
// result: (I64DivU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 x y)
// cond:
// result: (I64DivS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u x y)
// cond:
// result: (I64DivU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq16 x y)
// cond:
// result: (I64Eq (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32 x y)
// cond:
// result: (I64Eq (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpEq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq32F x y)
// cond:
// result: (F64Eq (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq8 x y)
// cond:
// result: (I64Eq (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpGeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16 x y)
// cond:
// result: (I64GeS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpGeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq16U x y)
// cond:
// result: (I64GeU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpGeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32 x y)
// cond:
// result: (I64GeS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpGeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32F x y)
// cond:
// result: (F64Ge (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpGeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq32U x y)
// cond:
// result: (I64GeU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpGeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8 x y)
// cond:
// result: (I64GeS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpGeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq8U x y)
// cond:
// result: (I64GeU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpGreater16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16 x y)
// cond:
// result: (I64GtS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpGreater16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater16U x y)
// cond:
// result: (I64GtU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpGreater32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32 x y)
// cond:
// result: (I64GtS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpGreater32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32F x y)
// cond:
// result: (F64Gt (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpGreater32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater32U x y)
// cond:
// result: (I64GtU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpGreater8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8 x y)
// cond:
// result: (I64GtS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpGreater8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater8U x y)
// cond:
// result: (I64GtU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpIsNonNil_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (IsNonNil p)
// cond:
// result: (I64Eqz (I64Eqz p))
}
func rewriteValueWasm_OpLeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16 x y)
// cond:
// result: (I64LeS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpLeq16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq16U x y)
// cond:
// result: (I64LeU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32 x y)
// cond:
// result: (I64LeS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpLeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32F x y)
// cond:
// result: (F64Le (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpLeq32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq32U x y)
// cond:
// result: (I64LeU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8 x y)
// cond:
// result: (I64LeS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpLeq8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq8U x y)
// cond:
// result: (I64LeU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpLess16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16 x y)
// cond:
// result: (I64LtS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpLess16U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less16U x y)
// cond:
// result: (I64LtU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLess32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32 x y)
// cond:
// result: (I64LtS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpLess32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32F x y)
// cond:
// result: (F64Lt (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpLess32U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less32U x y)
// cond:
// result: (I64LtU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLess8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8 x y)
// cond:
// result: (I64LtS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpLess8U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less8U x y)
// cond:
// result: (I64LtU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x16 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x32 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x8 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x16 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x32 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x8 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 x y)
// cond:
// result: (Select (I64Shl x y) (I64Const [0]) (I64LtU y (I64Const [64])))
}
func rewriteValueWasm_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x16 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x32 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x8 x y)
// cond:
// result: (Lsh64x64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16 x y)
// cond:
// result: (I64RemS (SignExt16to64 x) (SignExt16to64 y))
}
func rewriteValueWasm_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod16u x y)
// cond:
// result: (I64RemU (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32 x y)
// cond:
// result: (I64RemS (SignExt32to64 x) (SignExt32to64 y))
}
func rewriteValueWasm_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod32u x y)
// cond:
// result: (I64RemU (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8 x y)
// cond:
// result: (I64RemS (SignExt8to64 x) (SignExt8to64 y))
}
func rewriteValueWasm_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mod8u x y)
// cond:
// result: (I64RemU (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [0] _ _ mem)
// cond:
// result: mem
}
func rewriteValueWasm_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Move [s] dst src mem)
// cond: s > 8 && s < 16
// result: (I64Store [s-8] dst (I64Load [s-8] src mem) (I64Store dst (I64Load src mem) mem))
}
func rewriteValueWasm_OpNeg16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg16 x)
// cond:
// result: (I64Sub (I64Const [0]) x)
}
func rewriteValueWasm_OpNeg32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg32 x)
// cond:
// result: (I64Sub (I64Const [0]) x)
}
func rewriteValueWasm_OpNeg64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg64 x)
// cond:
// result: (I64Sub (I64Const [0]) x)
}
func rewriteValueWasm_OpNeg8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neg8 x)
// cond:
// result: (I64Sub (I64Const [0]) x)
}
func rewriteValueWasm_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq16 x y)
// cond:
// result: (I64Ne (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32 x y)
// cond:
// result: (I64Ne (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpNeq32F_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq32F x y)
// cond:
// result: (F64Ne (LoweredRound32F x) (LoweredRound32F y))
}
func rewriteValueWasm_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq8 x y)
// cond:
// result: (I64Ne (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux16 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux32 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt16to64 x) y)
}
func rewriteValueWasm_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux8 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt16to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x16 x y)
// cond:
// result: (Rsh64x64 (SignExt16to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x32 x y)
// cond:
// result: (Rsh64x64 (SignExt16to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x y)
// cond:
// result: (Rsh64x64 (SignExt16to64 x) y)
}
func rewriteValueWasm_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x8 x y)
// cond:
// result: (Rsh64x64 (SignExt16to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux16 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux32 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt32to64 x) y)
}
func rewriteValueWasm_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux8 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt32to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x16 x y)
// cond:
// result: (Rsh64x64 (SignExt32to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x32 x y)
// cond:
// result: (Rsh64x64 (SignExt32to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 x y)
// cond:
// result: (Rsh64x64 (SignExt32to64 x) y)
}
func rewriteValueWasm_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x8 x y)
// cond:
// result: (Rsh64x64 (SignExt32to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 x y)
// cond:
// result: (Rsh64Ux64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 x y)
// cond:
// result: (Rsh64Ux64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 x y)
// cond:
// result: (Select (I64ShrU x y) (I64Const [0]) (I64LtU y (I64Const [64])))
}
func rewriteValueWasm_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 x y)
// cond:
// result: (Rsh64Ux64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 x y)
// cond:
// result: (Rsh64x64 x (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x32 x y)
// cond:
// result: (Rsh64x64 x (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 x y)
// cond:
// result: (I64ShrS x (Select <typ.Int64> y (I64Const [63]) (I64LtU y (I64Const [64]))))
}
func rewriteValueWasm_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 x y)
// cond:
// result: (Rsh64x64 x (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux16 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux32 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt8to64 x) y)
}
func rewriteValueWasm_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux8 x y)
// cond:
// result: (Rsh64Ux64 (ZeroExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x16 x y)
// cond:
// result: (Rsh64x64 (SignExt8to64 x) (ZeroExt16to64 y))
}
func rewriteValueWasm_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x32 x y)
// cond:
// result: (Rsh64x64 (SignExt8to64 x) (ZeroExt32to64 y))
}
func rewriteValueWasm_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x y)
// cond:
// result: (Rsh64x64 (SignExt8to64 x) y)
}
func rewriteValueWasm_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x8 x y)
// cond:
// result: (Rsh64x64 (SignExt8to64 x) (ZeroExt8to64 y))
}
func rewriteValueWasm_OpSignExt16to32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt16to32 x:(I64Load16S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSignExt16to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt16to64 x:(I64Load16S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSignExt32to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt32to64 x:(I64Load32S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSignExt8to16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt8to16 x:(I64Load8S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSignExt8to32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt8to32 x:(I64Load8S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSignExt8to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt8to64 x:(I64Load8S _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpSlicemask_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Slicemask x)
// cond:
// result: (I64ShrS (I64Sub (I64Const [0]) x) (I64Const [63]))
}
func rewriteValueWasm_OpWasmF64Add_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (F64Add (F64Const [x]) (F64Const [y]))
// cond:
// result: (F64Const [auxFrom64F(auxTo64F(x) + auxTo64F(y))])
}
func rewriteValueWasm_OpWasmF64Mul_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (F64Mul (F64Const [x]) (F64Const [y]))
// cond:
// result: (F64Const [auxFrom64F(auxTo64F(x) * auxTo64F(y))])
}
func rewriteValueWasm_OpWasmI64Add_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Add (I64Const [x]) (I64Const [y]))
// cond:
// result: (I64Const [x + y])
}
func rewriteValueWasm_OpWasmI64And_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64And (I64Const [x]) (I64Const [y]))
// cond:
// result: (I64Const [x & y])
}
func rewriteValueWasm_OpWasmI64Eq_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Eq (I64Const [x]) (I64Const [y]))
// cond: x == y
// result: (I64Const [1])
}
func rewriteValueWasm_OpWasmI64Mul_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Mul (I64Const [x]) (I64Const [y]))
// cond:
// result: (I64Const [x * y])
}
func rewriteValueWasm_OpWasmI64Ne_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Ne (I64Const [x]) (I64Const [y]))
// cond: x == y
// result: (I64Const [0])
}
func rewriteValueWasm_OpWasmI64Or_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Or (I64Const [x]) (I64Const [y]))
// cond:
// result: (I64Const [x | y])
}
func rewriteValueWasm_OpWasmI64Xor_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (I64Xor (I64Const [x]) (I64Const [y]))
// cond:
// result: (I64Const [x ^ y])
}
func rewriteValueWasm_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [0] _ mem)
// cond:
// result: mem
}
func rewriteValueWasm_OpZero_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Zero [16] destptr mem)
// cond:
// result: (I64Store [8] destptr (I64Const [0]) (I64Store destptr (I64Const [0]) mem))
}
func rewriteValueWasm_OpZeroExt16to32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt16to32 x:(I64Load16U _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpZeroExt16to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt16to64 x:(I64Load16U _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpZeroExt32to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt32to64 x:(I64Load32U _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpZeroExt8to16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt8to16 x:(I64Load8U _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpZeroExt8to32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt8to32 x:(I64Load8U _ _))
// cond:
// result: x
}
func rewriteValueWasm_OpZeroExt8to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt8to64 x:(I64Load8U _ _))
// cond:
// result: x
return false
}
func rewriteValuedec_OpITab_0(v *Value) bool {
- b := v.Block
- _ = b
// match: (ITab (IMake itab _))
// cond:
// result: itab
}
func rewriteValuedec_OpLoad_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Load <t> ptr mem)
// cond: t.IsComplex() && t.Size() == 8
// result: (ComplexMake (Load <typ.Float32> ptr mem) (Load <typ.Float32> (OffPtr <typ.Float32Ptr> [4] ptr) mem) )
}
func rewriteValuedec_OpStore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Store {t} dst (ComplexMake real imag) mem)
// cond: t.(*types.Type).Size() == 8
// result: (Store {typ.Float32} (OffPtr <typ.Float32Ptr> [4] dst) imag (Store {typ.Float32} dst real mem))
}
func rewriteValuedec64_OpAdd64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Add64 x y)
// cond:
// result: (Int64Make (Add32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Add32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Add32carry (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpAnd64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (And64 x y)
// cond:
// result: (Int64Make (And32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (And32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
}
func rewriteValuedec64_OpArg_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Arg {n} [off])
// cond: is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned()
// result: (Int64Make (Arg <typ.Int32> {n} [off+4]) (Arg <typ.UInt32> {n} [off]))
}
func rewriteValuedec64_OpBitLen64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (BitLen64 x)
// cond:
// result: (Add32 <typ.Int> (BitLen32 <typ.Int> (Int64Hi x)) (BitLen32 <typ.Int> (Or32 <typ.UInt32> (Int64Lo x) (Zeromask (Int64Hi x)))))
}
func rewriteValuedec64_OpBswap64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Bswap64 x)
// cond:
// result: (Int64Make (Bswap32 <typ.UInt32> (Int64Lo x)) (Bswap32 <typ.UInt32> (Int64Hi x)))
}
func rewriteValuedec64_OpCom64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Com64 x)
// cond:
// result: (Int64Make (Com32 <typ.UInt32> (Int64Hi x)) (Com32 <typ.UInt32> (Int64Lo x)))
}
func rewriteValuedec64_OpConst64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Const64 <t> [c])
// cond: t.IsSigned()
// result: (Int64Make (Const32 <typ.Int32> [c>>32]) (Const32 <typ.UInt32> [int64(int32(c))]))
}
func rewriteValuedec64_OpCtz64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Ctz64 x)
// cond:
// result: (Add32 <typ.UInt32> (Ctz32 <typ.UInt32> (Int64Lo x)) (And32 <typ.UInt32> (Com32 <typ.UInt32> (Zeromask (Int64Lo x))) (Ctz32 <typ.UInt32> (Int64Hi x))))
}
func rewriteValuedec64_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Eq64 x y)
// cond:
// result: (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Eq32 (Int64Lo x) (Int64Lo y)))
}
func rewriteValuedec64_OpGeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64 x y)
// cond:
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpGeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Geq64U x y)
// cond:
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Geq32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpGreater64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater64 x y)
// cond:
// result: (OrB (Greater32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpGreater64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Greater64U x y)
// cond:
// result: (OrB (Greater32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Greater32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpLeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64 x y)
// cond:
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpLeq64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Leq64U x y)
// cond:
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Leq32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpLess64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less64 x y)
// cond:
// result: (OrB (Less32 (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpLess64U_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Less64U x y)
// cond:
// result: (OrB (Less32U (Int64Hi x) (Int64Hi y)) (AndB (Eq32 (Int64Hi x) (Int64Hi y)) (Less32U (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpLoad_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Load <t> ptr mem)
// cond: is64BitInt(t) && !config.BigEndian && t.IsSigned()
// result: (Int64Make (Load <typ.Int32> (OffPtr <typ.Int32Ptr> [4] ptr) mem) (Load <typ.UInt32> ptr mem))
}
func rewriteValuedec64_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x16 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x16 <typ.UInt32> hi s) (Rsh32Ux16 <typ.UInt32> lo (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Lsh32x16 <typ.UInt32> lo (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))) (Lsh32x16 <typ.UInt32> lo s))
}
func rewriteValuedec64_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x32 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x32 <typ.UInt32> hi s) (Rsh32Ux32 <typ.UInt32> lo (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Lsh32x32 <typ.UInt32> lo (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))) (Lsh32x32 <typ.UInt32> lo s))
}
func rewriteValuedec64_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const64 [0])
}
func rewriteValuedec64_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x8 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Lsh32x8 <typ.UInt32> hi s) (Rsh32Ux8 <typ.UInt32> lo (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Lsh32x8 <typ.UInt32> lo (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))) (Lsh32x8 <typ.UInt32> lo s))
}
func rewriteValuedec64_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpMul64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul64 x y)
// cond:
// result: (Int64Make (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Lo x) (Int64Hi y)) (Add32 <typ.UInt32> (Mul32 <typ.UInt32> (Int64Hi x) (Int64Lo y)) (Select0 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))) (Select1 <typ.UInt32> (Mul32uhilo (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpNeg64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neg64 <t> x)
// cond:
// result: (Sub64 (Const64 <t> [0]) x)
}
func rewriteValuedec64_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Neq64 x y)
// cond:
// result: (OrB (Neq32 (Int64Hi x) (Int64Hi y)) (Neq32 (Int64Lo x) (Int64Lo y)))
}
func rewriteValuedec64_OpOr64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Or64 x y)
// cond:
// result: (Int64Make (Or32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Or32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
}
func rewriteValuedec64_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Signmask (SignExt16to32 x))
}
func rewriteValuedec64_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Signmask x)
}
func rewriteValuedec64_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux16 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32Ux16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (Rsh32Ux16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32])))))
}
func rewriteValuedec64_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux32 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32Ux32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (Rsh32Ux32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32])))))
}
func rewriteValuedec64_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const64 [0])
}
func rewriteValuedec64_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux8 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32Ux8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (Rsh32Ux8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32])))))
}
func rewriteValuedec64_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x16 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32x16 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux16 <typ.UInt32> lo s) (Lsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> (Const16 <typ.UInt16> [32]) s))) (And32 <typ.UInt32> (Rsh32x16 <typ.UInt32> hi (Sub16 <typ.UInt16> s (Const16 <typ.UInt16> [32]))) (Zeromask (ZeroExt16to32 (Rsh16Ux32 <typ.UInt16> s (Const32 <typ.UInt32> [5])))))))
}
func rewriteValuedec64_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x32 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32x32 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux32 <typ.UInt32> lo s) (Lsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> (Const32 <typ.UInt32> [32]) s))) (And32 <typ.UInt32> (Rsh32x32 <typ.UInt32> hi (Sub32 <typ.UInt32> s (Const32 <typ.UInt32> [32]))) (Zeromask (Rsh32Ux32 <typ.UInt32> s (Const32 <typ.UInt32> [5]))))))
}
func rewriteValuedec64_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Int64Make (Signmask (Int64Hi x)) (Signmask (Int64Hi x)))
}
func rewriteValuedec64_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x8 (Int64Make hi lo) s)
// cond:
// result: (Int64Make (Rsh32x8 <typ.UInt32> hi s) (Or32 <typ.UInt32> (Or32 <typ.UInt32> (Rsh32Ux8 <typ.UInt32> lo s) (Lsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> (Const8 <typ.UInt8> [32]) s))) (And32 <typ.UInt32> (Rsh32x8 <typ.UInt32> hi (Sub8 <typ.UInt8> s (Const8 <typ.UInt8> [32]))) (Zeromask (ZeroExt8to32 (Rsh8Ux32 <typ.UInt8> s (Const32 <typ.UInt32> [5])))))))
}
func rewriteValuedec64_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Const32 [0])
}
func rewriteValuedec64_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
// result: (Signmask (SignExt8to32 x))
}
func rewriteValuedec64_OpSignExt16to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt16to64 x)
// cond:
// result: (SignExt32to64 (SignExt16to32 x))
}
func rewriteValuedec64_OpSignExt32to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt32to64 x)
// cond:
// result: (Int64Make (Signmask x) x)
}
func rewriteValuedec64_OpSignExt8to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (SignExt8to64 x)
// cond:
// result: (SignExt32to64 (SignExt8to32 x))
}
func rewriteValuedec64_OpStore_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Store {t} dst (Int64Make hi lo) mem)
// cond: t.(*types.Type).Size() == 8 && !config.BigEndian
// result: (Store {hi.Type} (OffPtr <hi.Type.PtrTo()> [4] dst) hi (Store {lo.Type} dst lo mem))
}
func rewriteValuedec64_OpSub64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Sub64 x y)
// cond:
// result: (Int64Make (Sub32withcarry <typ.Int32> (Int64Hi x) (Int64Hi y) (Select1 <types.TypeFlags> (Sub32carry (Int64Lo x) (Int64Lo y)))) (Select0 <typ.UInt32> (Sub32carry (Int64Lo x) (Int64Lo y))))
}
func rewriteValuedec64_OpXor64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Xor64 x y)
// cond:
// result: (Int64Make (Xor32 <typ.UInt32> (Int64Hi x) (Int64Hi y)) (Xor32 <typ.UInt32> (Int64Lo x) (Int64Lo y)))
}
func rewriteValuedec64_OpZeroExt16to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt16to64 x)
// cond:
// result: (ZeroExt32to64 (ZeroExt16to32 x))
}
func rewriteValuedec64_OpZeroExt32to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt32to64 x)
// cond:
// result: (Int64Make (Const32 <typ.UInt32> [0]) x)
}
func rewriteValuedec64_OpZeroExt8to64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ZeroExt8to64 x)
// cond:
// result: (ZeroExt32to64 (ZeroExt8to32 x))
}
func rewriteValuedecArgs_OpArg_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
fe := b.Func.fe
- _ = fe
typ := &b.Func.Config.Types
- _ = typ
// match: (Arg {n} [off])
// cond: v.Type.IsString()
// result: (StringMake (Arg <typ.BytePtr> {n} [off]) (Arg <typ.Int> {n} [off+config.PtrSize]))
}
func rewriteValuedecArgs_OpArg_10(v *Value) bool {
b := v.Block
- _ = b
fe := b.Func.fe
- _ = fe
// match: (Arg <t>)
// cond: t.IsArray() && t.NumElem() == 0
// result: (ArrayMake0)
}
func rewriteValuegeneric_OpAdd16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Add16 (Const16 [c]) (Const16 [d]))
// cond:
// result: (Const16 [int64(int16(c+d))])
}
func rewriteValuegeneric_OpAdd16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Add16 (Const16 [0]) x)
// cond:
// result: x
}
func rewriteValuegeneric_OpAdd16_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Add16 x (Sub16 i:(Const16 <t>) z))
// cond: (z.Op != OpConst16 && x.Op != OpConst16)
// result: (Add16 i (Sub16 <t> x z))
}
func rewriteValuegeneric_OpAdd16_30(v *Value) bool {
b := v.Block
- _ = b
// match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
// cond:
// result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x)
}
func rewriteValuegeneric_OpAdd32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Add32 (Const32 [c]) (Const32 [d]))
// cond:
// result: (Const32 [int64(int32(c+d))])
}
func rewriteValuegeneric_OpAdd32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Add32 (Const32 [0]) x)
// cond:
// result: x
}
func rewriteValuegeneric_OpAdd32_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Add32 x (Sub32 i:(Const32 <t>) z))
// cond: (z.Op != OpConst32 && x.Op != OpConst32)
// result: (Add32 i (Sub32 <t> x z))
}
func rewriteValuegeneric_OpAdd32_30(v *Value) bool {
b := v.Block
- _ = b
// match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
// cond:
// result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x)
}
func rewriteValuegeneric_OpAdd64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Add64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c+d])
}
func rewriteValuegeneric_OpAdd64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Add64 (Const64 [0]) x)
// cond:
// result: x
}
func rewriteValuegeneric_OpAdd64_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Add64 x (Sub64 i:(Const64 <t>) z))
// cond: (z.Op != OpConst64 && x.Op != OpConst64)
// result: (Add64 i (Sub64 <t> x z))
}
func rewriteValuegeneric_OpAdd64_30(v *Value) bool {
b := v.Block
- _ = b
// match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
// cond:
// result: (Sub64 (Const64 <t> [c+d]) x)
}
func rewriteValuegeneric_OpAdd8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Add8 (Const8 [c]) (Const8 [d]))
// cond:
// result: (Const8 [int64(int8(c+d))])
}
func rewriteValuegeneric_OpAdd8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Add8 (Const8 [0]) x)
// cond:
// result: x
}
func rewriteValuegeneric_OpAdd8_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Add8 x (Sub8 i:(Const8 <t>) z))
// cond: (z.Op != OpConst8 && x.Op != OpConst8)
// result: (Add8 i (Sub8 <t> x z))
}
func rewriteValuegeneric_OpAdd8_30(v *Value) bool {
b := v.Block
- _ = b
// match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
// cond:
// result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x)
}
func rewriteValuegeneric_OpAnd16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (And16 _ (Const16 [0]))
// cond:
// result: (Const16 [0])
}
func rewriteValuegeneric_OpAnd16_20(v *Value) bool {
b := v.Block
- _ = b
// match: (And16 (Const16 <t> [c]) (And16 x (Const16 <t> [d])))
// cond:
// result: (And16 (Const16 <t> [int64(int16(c&d))]) x)
}
func rewriteValuegeneric_OpAnd32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (And32 _ (Const32 [0]))
// cond:
// result: (Const32 [0])
}
func rewriteValuegeneric_OpAnd32_20(v *Value) bool {
b := v.Block
- _ = b
// match: (And32 (Const32 <t> [c]) (And32 x (Const32 <t> [d])))
// cond:
// result: (And32 (Const32 <t> [int64(int32(c&d))]) x)
}
func rewriteValuegeneric_OpAnd64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (And64 _ (Const64 [0]))
// cond:
// result: (Const64 [0])
}
func rewriteValuegeneric_OpAnd64_20(v *Value) bool {
b := v.Block
- _ = b
// match: (And64 (And64 z i:(Const64 <t>)) x)
// cond: (z.Op != OpConst64 && x.Op != OpConst64)
// result: (And64 i (And64 <t> z x))
}
func rewriteValuegeneric_OpAnd8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (And8 _ (Const8 [0]))
// cond:
// result: (Const8 [0])
}
func rewriteValuegeneric_OpAnd8_20(v *Value) bool {
b := v.Block
- _ = b
// match: (And8 (Const8 <t> [c]) (And8 x (Const8 <t> [d])))
// cond:
// result: (And8 (Const8 <t> [int64(int8(c&d))]) x)
}
func rewriteValuegeneric_OpConstInterface_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (ConstInterface)
// cond:
// result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
}
func rewriteValuegeneric_OpConstSlice_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (ConstSlice)
// cond: config.PtrSize == 4
// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
}
func rewriteValuegeneric_OpConstString_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
fe := b.Func.fe
- _ = fe
typ := &b.Func.Config.Types
- _ = typ
// match: (ConstString {s})
// cond: config.PtrSize == 4 && s.(string) == ""
// result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
}
func rewriteValuegeneric_OpDiv16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16 (Const16 [c]) (Const16 [d]))
// cond: d != 0
// result: (Const16 [int64(int16(c)/int16(d))])
}
func rewriteValuegeneric_OpDiv16u_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Div16u (Const16 [c]) (Const16 [d]))
// cond: d != 0
// result: (Const16 [int64(int16(uint16(c)/uint16(d)))])
}
func rewriteValuegeneric_OpDiv32_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32 (Const32 [c]) (Const32 [d]))
// cond: d != 0
// result: (Const32 [int64(int32(c)/int32(d))])
}
func rewriteValuegeneric_OpDiv32F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Div32F (Const32F [c]) (Const32F [d]))
// cond:
// result: (Const32F [auxFrom32F(auxTo32F(c) / auxTo32F(d))])
}
func rewriteValuegeneric_OpDiv32u_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Div32u (Const32 [c]) (Const32 [d]))
// cond: d != 0
// result: (Const32 [int64(int32(uint32(c)/uint32(d)))])
}
func rewriteValuegeneric_OpDiv64_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64 (Const64 [c]) (Const64 [d]))
// cond: d != 0
// result: (Const64 [c/d])
}
func rewriteValuegeneric_OpDiv64F_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Div64F (Const64F [c]) (Const64F [d]))
// cond:
// result: (Const64F [auxFrom64F(auxTo64F(c) / auxTo64F(d))])
}
func rewriteValuegeneric_OpDiv64u_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (Div64u (Const64 [c]) (Const64 [d]))
// cond: d != 0
// result: (Const64 [int64(uint64(c)/uint64(d))])
}
func rewriteValuegeneric_OpDiv8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8 (Const8 [c]) (Const8 [d]))
// cond: d != 0
// result: (Const8 [int64(int8(c)/int8(d))])
}
func rewriteValuegeneric_OpDiv8u_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Div8u (Const8 [c]) (Const8 [d]))
// cond: d != 0
// result: (Const8 [int64(int8(uint8(c)/uint8(d)))])
}
func rewriteValuegeneric_OpEq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq16 x x)
// cond:
// result: (ConstBool [1])
}
func rewriteValuegeneric_OpEq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq32 x x)
// cond:
// result: (ConstBool [1])
}
func rewriteValuegeneric_OpEq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq64 x x)
// cond:
// result: (ConstBool [1])
}
func rewriteValuegeneric_OpEq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Eq8 x x)
// cond:
// result: (ConstBool [1])
}
func rewriteValuegeneric_OpEqInter_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqInter x y)
// cond:
// result: (EqPtr (ITab x) (ITab y))
}
func rewriteValuegeneric_OpEqPtr_10(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqPtr (Const32 [d]) (Const32 [c]))
// cond:
// result: (ConstBool [b2i(c == d)])
}
func rewriteValuegeneric_OpEqPtr_20(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqPtr p (Const32 [0]))
// cond:
// result: (Not (IsNonNil p))
}
func rewriteValuegeneric_OpEqSlice_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (EqSlice x y)
// cond:
// result: (EqPtr (SlicePtr x) (SlicePtr y))
}
func rewriteValuegeneric_OpLoad_0(v *Value) bool {
b := v.Block
- _ = b
fe := b.Func.fe
- _ = fe
// match: (Load <t1> p1 (Store {t2} p2 x _))
// cond: isSamePtr(p1, p2) && t1.Compare(x.Type) == types.CMPeq && t1.Size() == sizeof(t2)
// result: x
}
func rewriteValuegeneric_OpLoad_10(v *Value) bool {
b := v.Block
- _ = b
fe := b.Func.fe
- _ = fe
// match: (Load <t1> op:(OffPtr [o1] p1) (Store {t2} p2 _ (Store {t3} p3 _ (Store {t4} p4 _ mem:(Zero [n] p5 _)))))
// cond: o1 >= 0 && o1+t1.Size() <= n && isSamePtr(p1, p5) && fe.CanSSA(t1) && disjoint(op, t1.Size(), p2, sizeof(t2)) && disjoint(op, t1.Size(), p3, sizeof(t3)) && disjoint(op, t1.Size(), p4, sizeof(t4))
// result: @mem.Block (Load <t1> (OffPtr <op.Type> [o1] p5) mem)
}
func rewriteValuegeneric_OpLoad_20(v *Value) bool {
b := v.Block
- _ = b
fe := b.Func.fe
- _ = fe
// match: (Load <t> ptr mem)
// cond: t.IsStruct() && t.NumFields() == 1 && fe.CanSSA(t)
// result: (StructMake1 (Load <t.FieldType(0)> (OffPtr <t.FieldType(0).PtrTo()> [0] ptr) mem))
}
func rewriteValuegeneric_OpLsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x16 <t> x (Const16 [c]))
// cond:
// result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpLsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x32 <t> x (Const32 [c]))
// cond:
// result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpLsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
// cond:
// result: (Const16 [int64(int16(c) << uint64(d))])
}
func rewriteValuegeneric_OpLsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh16x8 <t> x (Const8 [c]))
// cond:
// result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpLsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x16 <t> x (Const16 [c]))
// cond:
// result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpLsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x32 <t> x (Const32 [c]))
// cond:
// result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpLsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
// cond:
// result: (Const32 [int64(int32(c) << uint64(d))])
}
func rewriteValuegeneric_OpLsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh32x8 <t> x (Const8 [c]))
// cond:
// result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpLsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x16 <t> x (Const16 [c]))
// cond:
// result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpLsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x32 <t> x (Const32 [c]))
// cond:
// result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpLsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c << uint64(d)])
}
func rewriteValuegeneric_OpLsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh64x8 <t> x (Const8 [c]))
// cond:
// result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpLsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x16 <t> x (Const16 [c]))
// cond:
// result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpLsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x32 <t> x (Const32 [c]))
// cond:
// result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpLsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
// cond:
// result: (Const8 [int64(int8(c) << uint64(d))])
}
func rewriteValuegeneric_OpLsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Lsh8x8 <t> x (Const8 [c]))
// cond:
// result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpMod16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod16 (Const16 [c]) (Const16 [d]))
// cond: d != 0
// result: (Const16 [int64(int16(c % d))])
}
func rewriteValuegeneric_OpMod16u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod16u (Const16 [c]) (Const16 [d]))
// cond: d != 0
// result: (Const16 [int64(uint16(c) % uint16(d))])
}
func rewriteValuegeneric_OpMod32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod32 (Const32 [c]) (Const32 [d]))
// cond: d != 0
// result: (Const32 [int64(int32(c % d))])
}
func rewriteValuegeneric_OpMod32u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod32u (Const32 [c]) (Const32 [d]))
// cond: d != 0
// result: (Const32 [int64(uint32(c) % uint32(d))])
}
func rewriteValuegeneric_OpMod64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod64 (Const64 [c]) (Const64 [d]))
// cond: d != 0
// result: (Const64 [c % d])
}
func rewriteValuegeneric_OpMod64u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod64u (Const64 [c]) (Const64 [d]))
// cond: d != 0
// result: (Const64 [int64(uint64(c) % uint64(d))])
}
func rewriteValuegeneric_OpMod8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod8 (Const8 [c]) (Const8 [d]))
// cond: d != 0
// result: (Const8 [int64(int8(c % d))])
}
func rewriteValuegeneric_OpMod8u_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Mod8u (Const8 [c]) (Const8 [d]))
// cond: d != 0
// result: (Const8 [int64(uint8(c) % uint8(d))])
}
func rewriteValuegeneric_OpMove_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Move {t} [n] dst1 src mem:(Zero {t} [n] dst2 _))
// cond: isSamePtr(src, dst2)
// result: (Zero {t} [n] dst1 mem)
}
func rewriteValuegeneric_OpMove_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} op2:(OffPtr <tt2> [o2] p2) d1 (Store {t3} op3:(OffPtr <tt3> [0] p3) d2 _))))
// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && o2 == sizeof(t3) && n == sizeof(t2) + sizeof(t3)
// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [0] dst) d2 mem))
}
func rewriteValuegeneric_OpMove_20(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Move {t1} [n] dst p1 mem:(VarDef (Store {t2} (OffPtr <tt2> [o2] p2) d1 (Store {t3} (OffPtr <tt3> [o3] p3) d2 (Store {t4} (OffPtr <tt4> [o4] p4) d3 (Store {t5} (OffPtr <tt5> [o5] p5) d4 (Zero {t6} [n] p6 _)))))))
// cond: isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && isSamePtr(p4, p5) && isSamePtr(p5, p6) && alignof(t2) <= alignof(t1) && alignof(t3) <= alignof(t1) && alignof(t4) <= alignof(t1) && alignof(t5) <= alignof(t1) && alignof(t6) <= alignof(t1) && registerizable(b, t2) && registerizable(b, t3) && registerizable(b, t4) && registerizable(b, t5) && n >= o2 + sizeof(t2) && n >= o3 + sizeof(t3) && n >= o4 + sizeof(t4) && n >= o5 + sizeof(t5)
// result: (Store {t2} (OffPtr <tt2> [o2] dst) d1 (Store {t3} (OffPtr <tt3> [o3] dst) d2 (Store {t4} (OffPtr <tt4> [o4] dst) d3 (Store {t5} (OffPtr <tt5> [o5] dst) d4 (Zero {t1} [n] dst mem)))))
}
func rewriteValuegeneric_OpMul16_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul16 (Const16 [c]) (Const16 [d]))
// cond:
// result: (Const16 [int64(int16(c*d))])
}
func rewriteValuegeneric_OpMul16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Mul16 (Const16 [0]) _)
// cond:
// result: (Const16 [0])
}
func rewriteValuegeneric_OpMul32_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul32 (Const32 [c]) (Const32 [d]))
// cond:
// result: (Const32 [int64(int32(c*d))])
}
func rewriteValuegeneric_OpMul32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x))
// cond:
// result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x))
}
func rewriteValuegeneric_OpMul64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c*d])
}
func rewriteValuegeneric_OpMul64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x))
// cond:
// result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x))
}
func rewriteValuegeneric_OpMul8_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Mul8 (Const8 [c]) (Const8 [d]))
// cond:
// result: (Const8 [int64(int8(c*d))])
}
func rewriteValuegeneric_OpMul8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Mul8 (Const8 [0]) _)
// cond:
// result: (Const8 [0])
}
func rewriteValuegeneric_OpNeq16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq16 x x)
// cond:
// result: (ConstBool [0])
}
func rewriteValuegeneric_OpNeq32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq32 x x)
// cond:
// result: (ConstBool [0])
}
func rewriteValuegeneric_OpNeq64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq64 x x)
// cond:
// result: (ConstBool [0])
}
func rewriteValuegeneric_OpNeq8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Neq8 x x)
// cond:
// result: (ConstBool [0])
}
func rewriteValuegeneric_OpNeqInter_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqInter x y)
// cond:
// result: (NeqPtr (ITab x) (ITab y))
}
func rewriteValuegeneric_OpNeqSlice_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (NeqSlice x y)
// cond:
// result: (NeqPtr (SlicePtr x) (SlicePtr y))
}
func rewriteValuegeneric_OpNilCheck_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
fe := b.Func.fe
- _ = fe
// match: (NilCheck (GetG mem) mem)
// cond:
// result: mem
}
func rewriteValuegeneric_OpOr16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Or16 (Or16 y x) x)
// cond:
// result: (Or16 x y)
}
func rewriteValuegeneric_OpOr16_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Or16 (Const16 <t> [c]) (Or16 x (Const16 <t> [d])))
// cond:
// result: (Or16 (Const16 <t> [int64(int16(c|d))]) x)
}
func rewriteValuegeneric_OpOr32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Or32 (Or32 y x) x)
// cond:
// result: (Or32 x y)
}
func rewriteValuegeneric_OpOr32_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Or32 (Const32 <t> [c]) (Or32 x (Const32 <t> [d])))
// cond:
// result: (Or32 (Const32 <t> [int64(int32(c|d))]) x)
}
func rewriteValuegeneric_OpOr64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Or64 (Or64 y x) x)
// cond:
// result: (Or64 x y)
}
func rewriteValuegeneric_OpOr64_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Or64 (Const64 <t> [c]) (Or64 x (Const64 <t> [d])))
// cond:
// result: (Or64 (Const64 <t> [c|d]) x)
}
func rewriteValuegeneric_OpOr8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Or8 (Or8 y x) x)
// cond:
// result: (Or8 x y)
}
func rewriteValuegeneric_OpOr8_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Or8 (Const8 <t> [c]) (Or8 x (Const8 <t> [d])))
// cond:
// result: (Or8 (Const8 <t> [int64(int8(c|d))]) x)
}
func rewriteValuegeneric_OpPtrIndex_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
typ := &b.Func.Config.Types
- _ = typ
// match: (PtrIndex <t> ptr idx)
// cond: config.PtrSize == 4
// result: (AddPtr ptr (Mul32 <typ.Int> idx (Const32 <typ.Int> [t.Elem().Size()])))
}
func rewriteValuegeneric_OpRsh16Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux16 <t> x (Const16 [c]))
// cond:
// result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh16Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux32 <t> x (Const32 [c]))
// cond:
// result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh16Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
// cond:
// result: (Const16 [int64(int16(uint16(c) >> uint64(d)))])
}
func rewriteValuegeneric_OpRsh16Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16Ux8 <t> x (Const8 [c]))
// cond:
// result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh16x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x16 <t> x (Const16 [c]))
// cond:
// result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh16x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x32 <t> x (Const32 [c]))
// cond:
// result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh16x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
// cond:
// result: (Const16 [int64(int16(c) >> uint64(d))])
}
func rewriteValuegeneric_OpRsh16x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh16x8 <t> x (Const8 [c]))
// cond:
// result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh32Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux16 <t> x (Const16 [c]))
// cond:
// result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh32Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux32 <t> x (Const32 [c]))
// cond:
// result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh32Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
// cond:
// result: (Const32 [int64(int32(uint32(c) >> uint64(d)))])
}
func rewriteValuegeneric_OpRsh32Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32Ux8 <t> x (Const8 [c]))
// cond:
// result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh32x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x16 <t> x (Const16 [c]))
// cond:
// result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh32x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x32 <t> x (Const32 [c]))
// cond:
// result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh32x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
// cond:
// result: (Const32 [int64(int32(c) >> uint64(d))])
}
func rewriteValuegeneric_OpRsh32x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh32x8 <t> x (Const8 [c]))
// cond:
// result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh64Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux16 <t> x (Const16 [c]))
// cond:
// result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh64Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux32 <t> x (Const32 [c]))
// cond:
// result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh64Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [int64(uint64(c) >> uint64(d))])
}
func rewriteValuegeneric_OpRsh64Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64Ux8 <t> x (Const8 [c]))
// cond:
// result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh64x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x16 <t> x (Const16 [c]))
// cond:
// result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh64x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x32 <t> x (Const32 [c]))
// cond:
// result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh64x64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c >> uint64(d)])
}
func rewriteValuegeneric_OpRsh64x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh64x8 <t> x (Const8 [c]))
// cond:
// result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh8Ux16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux16 <t> x (Const16 [c]))
// cond:
// result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh8Ux32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux32 <t> x (Const32 [c]))
// cond:
// result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh8Ux64_0(v *Value) bool {
b := v.Block
- _ = b
typ := &b.Func.Config.Types
- _ = typ
// match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
// cond:
// result: (Const8 [int64(int8(uint8(c) >> uint64(d)))])
}
func rewriteValuegeneric_OpRsh8Ux8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8Ux8 <t> x (Const8 [c]))
// cond:
// result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpRsh8x16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x16 <t> x (Const16 [c]))
// cond:
// result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))]))
}
func rewriteValuegeneric_OpRsh8x32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x32 <t> x (Const32 [c]))
// cond:
// result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))]))
}
func rewriteValuegeneric_OpRsh8x64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x64 (Const8 [c]) (Const64 [d]))
// cond:
// result: (Const8 [int64(int8(c) >> uint64(d))])
}
func rewriteValuegeneric_OpRsh8x8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Rsh8x8 <t> x (Const8 [c]))
// cond:
// result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))]))
}
func rewriteValuegeneric_OpStaticCall_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (StaticCall {sym} s1:(Store _ (Const64 [sz]) s2:(Store _ src s3:(Store {t} _ dst mem))))
// cond: isSameSym(sym,"runtime.memmove") && t.(*types.Type).IsPtr() && s1.Uses == 1 && s2.Uses == 1 && s3.Uses == 1 && isInlinableMemmove(dst,src,sz,config) && clobber(s1) && clobber(s2) && clobber(s3)
// result: (Move {t.(*types.Type).Elem()} [sz] dst src mem)
}
func rewriteValuegeneric_OpStore_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Store {t1} p1 (Load <t2> p2 mem) mem)
// cond: isSamePtr(p1, p2) && t2.Size() == sizeof(t1)
// result: mem
}
func rewriteValuegeneric_OpStore_10(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
fe := b.Func.fe
- _ = fe
// match: (Store dst (StructMake2 <t> f0 f1) mem)
// cond:
// result: (Store {t.FieldType(1)} (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store {t.FieldType(0)} (OffPtr <t.FieldType(0).PtrTo()> [0] dst) f0 mem))
}
func rewriteValuegeneric_OpStore_20(v *Value) bool {
b := v.Block
- _ = b
// match: (Store {t1} op1:(OffPtr [o1] p1) d1 m2:(Store {t2} op2:(OffPtr [o2] p2) d2 m3:(Store {t3} op3:(OffPtr [0] p3) d3 m4:(Move [n] p4 _ mem))))
// cond: m2.Uses == 1 && m3.Uses == 1 && m4.Uses == 1 && o2 == sizeof(t3) && o1-o2 == sizeof(t2) && n == sizeof(t3) + sizeof(t2) + sizeof(t1) && isSamePtr(p1, p2) && isSamePtr(p2, p3) && isSamePtr(p3, p4) && clobber(m2) && clobber(m3) && clobber(m4)
// result: (Store {t1} op1 d1 (Store {t2} op2 d2 (Store {t3} op3 d3 mem)))
}
func rewriteValuegeneric_OpStructSelect_10(v *Value) bool {
b := v.Block
- _ = b
fe := b.Func.fe
- _ = fe
// match: (StructSelect [i] x:(Load <t> ptr mem))
// cond: !fe.CanSSA(t)
// result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem)
}
func rewriteValuegeneric_OpSub16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub16 (Const16 [c]) (Const16 [d]))
// cond:
// result: (Const16 [int64(int16(c-d))])
}
func rewriteValuegeneric_OpSub16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub16 (Add16 y x) y)
// cond:
// result: x
}
func rewriteValuegeneric_OpSub32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub32 (Const32 [c]) (Const32 [d]))
// cond:
// result: (Const32 [int64(int32(c-d))])
}
func rewriteValuegeneric_OpSub32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub32 (Add32 y x) y)
// cond:
// result: x
}
func rewriteValuegeneric_OpSub64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c-d])
}
func rewriteValuegeneric_OpSub64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub64 (Add64 y x) y)
// cond:
// result: x
}
func rewriteValuegeneric_OpSub8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub8 (Const8 [c]) (Const8 [d]))
// cond:
// result: (Const8 [int64(int8(c-d))])
}
func rewriteValuegeneric_OpSub8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Sub8 (Add8 y x) y)
// cond:
// result: x
}
func rewriteValuegeneric_OpXor16_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor16 (Const16 [c]) (Const16 [d]))
// cond:
// result: (Const16 [int64(int16(c^d))])
}
func rewriteValuegeneric_OpXor16_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor16 (Xor16 z i:(Const16 <t>)) x)
// cond: (z.Op != OpConst16 && x.Op != OpConst16)
// result: (Xor16 i (Xor16 <t> z x))
}
func rewriteValuegeneric_OpXor32_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor32 (Const32 [c]) (Const32 [d]))
// cond:
// result: (Const32 [int64(int32(c^d))])
}
func rewriteValuegeneric_OpXor32_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor32 (Xor32 z i:(Const32 <t>)) x)
// cond: (z.Op != OpConst32 && x.Op != OpConst32)
// result: (Xor32 i (Xor32 <t> z x))
}
func rewriteValuegeneric_OpXor64_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor64 (Const64 [c]) (Const64 [d]))
// cond:
// result: (Const64 [c^d])
}
func rewriteValuegeneric_OpXor64_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor64 (Xor64 z i:(Const64 <t>)) x)
// cond: (z.Op != OpConst64 && x.Op != OpConst64)
// result: (Xor64 i (Xor64 <t> z x))
}
func rewriteValuegeneric_OpXor8_0(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor8 (Const8 [c]) (Const8 [d]))
// cond:
// result: (Const8 [int64(int8(c^d))])
}
func rewriteValuegeneric_OpXor8_10(v *Value) bool {
b := v.Block
- _ = b
// match: (Xor8 (Xor8 z i:(Const8 <t>)) x)
// cond: (z.Op != OpConst8 && x.Op != OpConst8)
// result: (Xor8 i (Xor8 <t> z x))
}
func rewriteValuegeneric_OpZero_0(v *Value) bool {
b := v.Block
- _ = b
config := b.Func.Config
- _ = config
// match: (Zero (Load (OffPtr [c] (SP)) mem) mem)
// cond: mem.Op == OpStaticCall && isSameSym(mem.Aux, "runtime.newobject") && c == config.ctxt.FixedFrameSize() + config.RegSize
// result: mem