Suggested by mdempsky in CL 38232.
This allows us to use the Frontend field
to associate frontend state and information
with a function.
See the following CL in the series for examples.
This is a giant CL, but it is almost entirely routine refactoring.
The ssa test API is starting to feel a bit unwieldy.
I will clean it up separately, once the dust has settled.
Passes toolstash -cmp.
Updates #15756
Change-Id: I71c573bd96ff7251935fce1391b06b1f133c3caf
Reviewed-on: https://go-review.googlesource.com/38327
Run-TryBot: Josh Bleecher Snyder <josharian@gmail.com>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
var ssaCache *ssa.Cache
func initssaconfig() {
- ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, &ssaExp, Ctxt, Debug['N'] == 0)
+ ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, Ctxt, Debug['N'] == 0)
if thearch.LinkArch.Name == "386" {
ssaConfig.Set387(thearch.Use387)
}
ssaExp.log = printssa
- s.f = ssa.NewFunc()
+ s.f = ssa.NewFunc(&ssaExp)
s.config = ssaConfig
s.f.Config = ssaConfig
s.f.Cache = ssaCache
s.panics = map[funcLine]*ssa.Block{}
if name == os.Getenv("GOSSAFUNC") {
- s.f.HTMLWriter = ssa.NewHTMLWriter("ssa.html", ssaConfig, name)
+ s.f.HTMLWriter = ssa.NewHTMLWriter("ssa.html", s.f.Frontend(), name)
// TODO: generate and print a mapping from nodes to values and blocks
}
return lab
}
-func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) }
-func (s *state) Log() bool { return s.config.Log() }
-func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(s.peekPos(), msg, args...) }
-func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) {
- s.config.Warnl(pos, msg, args...)
+func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) }
+func (s *state) Log() bool { return s.f.Log() }
+func (s *state) Fatalf(msg string, args ...interface{}) {
+ s.f.Frontend().Fatalf(s.peekPos(), msg, args...)
}
-func (s *state) Debug_checknil() bool { return s.config.Debug_checknil() }
+func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) }
+func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
var (
// dummy node for the memory variable
func (s *state) exprPtr(n *Node, bounded bool, lineno src.XPos) *ssa.Value {
p := s.expr(n)
if bounded || n.NonNil() {
- if s.f.Config.Debug_checknil() && lineno.Line() > 1 {
- s.f.Config.Warnl(lineno, "removed nil check")
+ if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 {
+ s.f.Warnl(lineno, "removed nil check")
}
return p
}
func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) {
var s SSAGenState
- e := f.Config.Frontend().(*ssaExport)
+ e := f.Frontend().(*ssaExport)
// Remember where each block starts.
s.bstart = make([]*obj.Prog, f.NumBlocks())
for _, b := range f.Blocks {
for _, v := range b.Values {
if v.Op == OpIsInBounds || v.Op == OpIsSliceInBounds {
- f.Config.Warnl(v.Pos, "Found %v", v.Op)
+ f.Warnl(v.Pos, "Found %v", v.Op)
}
}
}
fi, err := os.Create(fname)
if err != nil {
- f.Config.Warnl(src.NoXPos, "Unable to create after-phase dump file %s", fname)
+ f.Warnl(src.NoXPos, "Unable to create after-phase dump file %s", fname)
return
}
FPReg int8 // register number of frame pointer, -1 if not used
LinkReg int8 // register number of link register if it is a general purpose register, -1 if not used
hasGReg bool // has hardware g register
- fe Frontend // callbacks into compiler frontend
ctxt *obj.Link // Generic arch information
optimize bool // Do optimization
noDuffDevice bool // Don't use Duff's device
}
// NewConfig returns a new configuration object for the given architecture.
-func NewConfig(arch string, fe Frontend, ctxt *obj.Link, optimize bool) *Config {
- c := &Config{arch: arch, fe: fe}
+func NewConfig(arch string, ctxt *obj.Link, optimize bool) *Config {
+ c := &Config{arch: arch}
switch arch {
case "amd64":
c.IntSize = 8
c.hasGReg = true
c.noDuffDevice = true
default:
- fe.Fatalf(src.NoXPos, "arch %s not implemented", arch)
+ ctxt.Diag("arch %s not implemented", arch)
}
c.ctxt = ctxt
c.optimize = optimize
if ev != "" {
v, err := strconv.ParseInt(ev, 10, 64)
if err != nil {
- fe.Fatalf(src.NoXPos, "Environment variable GO_SSA_PHI_LOC_CUTOFF (value '%s') did not parse as a number", ev)
+ ctxt.Diag("Environment variable GO_SSA_PHI_LOC_CUTOFF (value '%s') did not parse as a number", ev)
}
c.sparsePhiCutoff = uint64(v) // convert -1 to maxint, for never use sparse
}
c.use387 = b
}
-func (c *Config) Frontend() Frontend { return c.fe }
func (c *Config) SparsePhiCutoff() uint64 { return c.sparsePhiCutoff }
func (c *Config) Ctxt() *obj.Link { return c.ctxt }
-
-func (c *Config) Logf(msg string, args ...interface{}) { c.fe.Logf(msg, args...) }
-func (c *Config) Log() bool { return c.fe.Log() }
-func (c *Config) Fatalf(pos src.XPos, msg string, args ...interface{}) { c.fe.Fatalf(pos, msg, args...) }
-func (c *Config) Error(pos src.XPos, msg string, args ...interface{}) { c.fe.Error(pos, msg, args...) }
-func (c *Config) Warnl(pos src.XPos, msg string, args ...interface{}) { c.fe.Warnl(pos, msg, args...) }
-func (c *Config) Debug_checknil() bool { return c.fe.Debug_checknil() }
-func (c *Config) Debug_wb() bool { return c.fe.Debug_wb() }
}
for i := 0; i < b.N; i++ {
- fun := Fun(c, "entry", Bloc("entry", values...))
+ fun := Fun(c, DummyFrontend{b}, "entry", Bloc("entry", values...))
Copyelim(fun.f)
}
}
d.Pos = p.Pos
blocks[argID] = d
if f.pass.debug > 0 {
- f.Config.Warnl(p.Pos, "split critical edge")
+ f.Warnl(p.Pos, "split critical edge")
}
} else {
reusedBlock = true
d = f.NewBlock(BlockPlain)
d.Pos = p.Pos
if f.pass.debug > 0 {
- f.Config.Warnl(p.Pos, "split critical edge")
+ f.Warnl(p.Pos, "split critical edge")
}
}
// construct lots of values with args that have aux values and place
// them in an order that triggers the bug
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sp", OpSP, TypeBytePtr, 0, nil),
func TestZCSE(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sp", OpSP, TypeBytePtr, 0, nil),
func TestDeadLoop(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("exit")),
func TestDeadValue(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("deadval", OpConst64, TypeInt64, 37, nil),
func TestNeverTaken(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("cond", OpConstBool, TypeBool, 0, nil),
Valu("mem", OpInitMem, TypeMem, 0, nil),
func TestNestedDeadBlocks(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("cond", OpConstBool, TypeBool, 0, nil),
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
- fun := Fun(c, "entry", blocks...)
+ fun := Fun(c, DummyFrontend{b}, "entry", blocks...)
Deadcode(fun.f)
}
})
c := testConfig(t)
elemType := &TypeImpl{Size_: 1, Name: "testtype"}
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr", Elem_: elemType} // dummy for testing
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// make sure we don't get into an infinite loop with phi values.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
c := testConfig(t)
t1 := &TypeImpl{Size_: 8, Ptr: true, Name: "t1"}
t2 := &TypeImpl{Size_: 4, Ptr: true, Name: "t2"}
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// can get to a point where the size is changed but type unchanged.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
case t.IsInteger() && t.Size() == 8 && f.Config.IntSize == 4:
var elemType Type
if t.IsSigned() {
- elemType = f.Config.fe.TypeInt32()
+ elemType = f.fe.TypeInt32()
} else {
- elemType = f.Config.fe.TypeUInt32()
+ elemType = f.fe.TypeUInt32()
}
- hiName, loName := f.Config.fe.SplitInt64(name)
+ hiName, loName := f.fe.SplitInt64(name)
newNames = append(newNames, hiName, loName)
for _, v := range f.NamedValues[name] {
hi := v.Block.NewValue1(v.Pos, OpInt64Hi, elemType, v)
- lo := v.Block.NewValue1(v.Pos, OpInt64Lo, f.Config.fe.TypeUInt32(), v)
+ lo := v.Block.NewValue1(v.Pos, OpInt64Lo, f.fe.TypeUInt32(), v)
f.NamedValues[hiName] = append(f.NamedValues[hiName], hi)
f.NamedValues[loName] = append(f.NamedValues[loName], lo)
}
case t.IsComplex():
var elemType Type
if t.Size() == 16 {
- elemType = f.Config.fe.TypeFloat64()
+ elemType = f.fe.TypeFloat64()
} else {
- elemType = f.Config.fe.TypeFloat32()
+ elemType = f.fe.TypeFloat32()
}
- rName, iName := f.Config.fe.SplitComplex(name)
+ rName, iName := f.fe.SplitComplex(name)
newNames = append(newNames, rName, iName)
for _, v := range f.NamedValues[name] {
r := v.Block.NewValue1(v.Pos, OpComplexReal, elemType, v)
}
delete(f.NamedValues, name)
case t.IsString():
- ptrType := f.Config.fe.TypeBytePtr()
- lenType := f.Config.fe.TypeInt()
- ptrName, lenName := f.Config.fe.SplitString(name)
+ ptrType := f.fe.TypeBytePtr()
+ lenType := f.fe.TypeInt()
+ ptrName, lenName := f.fe.SplitString(name)
newNames = append(newNames, ptrName, lenName)
for _, v := range f.NamedValues[name] {
ptr := v.Block.NewValue1(v.Pos, OpStringPtr, ptrType, v)
}
delete(f.NamedValues, name)
case t.IsSlice():
- ptrType := f.Config.fe.TypeBytePtr()
- lenType := f.Config.fe.TypeInt()
- ptrName, lenName, capName := f.Config.fe.SplitSlice(name)
+ ptrType := f.fe.TypeBytePtr()
+ lenType := f.fe.TypeInt()
+ ptrName, lenName, capName := f.fe.SplitSlice(name)
newNames = append(newNames, ptrName, lenName, capName)
for _, v := range f.NamedValues[name] {
ptr := v.Block.NewValue1(v.Pos, OpSlicePtr, ptrType, v)
}
delete(f.NamedValues, name)
case t.IsInterface():
- ptrType := f.Config.fe.TypeBytePtr()
- typeName, dataName := f.Config.fe.SplitInterface(name)
+ ptrType := f.fe.TypeBytePtr()
+ typeName, dataName := f.fe.SplitInterface(name)
newNames = append(newNames, typeName, dataName)
for _, v := range f.NamedValues[name] {
typ := v.Block.NewValue1(v.Pos, OpITab, ptrType, v)
}
func decomposeStringPhi(v *Value) {
- fe := v.Block.Func.Config.fe
+ fe := v.Block.Func.fe
ptrType := fe.TypeBytePtr()
lenType := fe.TypeInt()
}
func decomposeSlicePhi(v *Value) {
- fe := v.Block.Func.Config.fe
+ fe := v.Block.Func.fe
ptrType := fe.TypeBytePtr()
lenType := fe.TypeInt()
}
func decomposeInt64Phi(v *Value) {
- fe := v.Block.Func.Config.fe
+ fe := v.Block.Func.fe
var partType Type
if v.Type.IsSigned() {
partType = fe.TypeInt32()
}
func decomposeComplexPhi(v *Value) {
- fe := v.Block.Func.Config.fe
+ fe := v.Block.Func.fe
var partType Type
switch z := v.Type.Size(); z {
case 8:
}
func decomposeInterfacePhi(v *Value) {
- ptrType := v.Block.Func.Config.fe.TypeBytePtr()
+ ptrType := v.Block.Func.fe.TypeBytePtr()
itab := v.Block.NewValue0(v.Pos, OpPhi, ptrType)
data := v.Block.NewValue0(v.Pos, OpPhi, ptrType)
n := t.NumFields()
fnames = fnames[:0]
for i := 0; i < n; i++ {
- fnames = append(fnames, f.Config.fe.SplitStruct(name, i))
+ fnames = append(fnames, f.fe.SplitStruct(name, i))
}
for _, v := range f.NamedValues[name] {
for i := 0; i < n; i++ {
if t.NumElem() != 1 {
f.Fatalf("array not of size 1")
}
- elemName := f.Config.fe.SplitArray(name)
+ elemName := f.fe.SplitArray(name)
for _, v := range f.NamedValues[name] {
e := v.Block.NewValue1I(v.Pos, OpArraySelect, t.ElemType(), 0, v)
f.NamedValues[elemName] = append(f.NamedValues[elemName], e)
var domBenchRes []*Block
func benchmarkDominators(b *testing.B, size int, bg blockGen) {
- c := NewConfig("amd64", DummyFrontend{b}, nil, true)
- fun := Fun(c, "entry", bg(size)...)
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...)
CheckFunc(fun.f)
b.SetBytes(int64(size))
func TestDominatorsSingleBlock(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem")))
func TestDominatorsSimple(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("a")),
func TestDominatorsMultPredFwd(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestDominatorsDeadCode(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 0, nil),
func TestDominatorsMultPredRev(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Goto("first")),
Bloc("first",
func TestDominatorsMultPred(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestInfiniteLoop(t *testing.T) {
c := testConfig(t)
// note lack of an exit block
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
b := 1 & i >> 1
c := 1 & i >> 2
- fun := Fun(testConfig(t), "1",
+ fun := Fun(testConfig(t), DummyFrontend{t}, "1",
Bloc("1",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
func TestDominatorsPostTricky(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "b1",
+ fun := Fun(c, DummyFrontend{t}, "b1",
Bloc("b1",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("p", OpConstBool, TypeBool, 1, nil),
var TestCtxt = obj.Linknew(&x86.Linkamd64)
func testConfig(t testing.TB) *Config {
- return NewConfig("amd64", DummyFrontend{t}, TestCtxt, true)
+ return NewConfig("amd64", TestCtxt, true)
}
func testConfigS390X(t testing.TB) *Config {
- return NewConfig("s390x", DummyFrontend{t}, obj.Linknew(&s390x.Links390x), true)
+ return NewConfig("s390x", obj.Linknew(&s390x.Links390x), true)
}
// DummyFrontend is a test-only frontend.
type Func struct {
Config *Config // architecture information
Cache *Cache // re-usable cache
+ fe Frontend // frontend state associated with this Func, callbacks into compiler frontend
pass *pass // current pass information (name, options, etc.)
Name string // e.g. bytes·Compare
Type Type // type signature of the function.
// NewFunc returns a new, empty function object.
// Caller must set f.Config and f.Cache before using f.
-func NewFunc() *Func {
- return &Func{NamedValues: make(map[LocalSlot][]*Value)}
+func NewFunc(fe Frontend) *Func {
+ return &Func{fe: fe, NamedValues: make(map[LocalSlot][]*Value)}
}
// NumBlocks returns an integer larger than the id of any Block in the Func.
if f.pass != nil {
n = strings.Replace(f.pass.name, " ", "_", -1)
}
- f.Config.Warnl(f.Entry.Pos, "\t%s\t%s%s\t%s", n, key, value, f.Name)
+ f.Warnl(f.Entry.Pos, "\t%s\t%s%s\t%s", n, key, value, f.Name)
}
// freeValue frees a value. It must no longer be referenced.
}
-func (f *Func) Logf(msg string, args ...interface{}) { f.Config.Logf(msg, args...) }
-func (f *Func) Log() bool { return f.Config.Log() }
-func (f *Func) Fatalf(msg string, args ...interface{}) { f.Config.Fatalf(f.Entry.Pos, msg, args...) }
+func (f *Func) Frontend() Frontend { return f.fe }
+func (f *Func) Warnl(pos src.XPos, msg string, args ...interface{}) { f.fe.Warnl(pos, msg, args...) }
+func (f *Func) Logf(msg string, args ...interface{}) { f.fe.Logf(msg, args...) }
+func (f *Func) Log() bool { return f.fe.Log() }
+func (f *Func) Fatalf(msg string, args ...interface{}) { f.fe.Fatalf(f.Entry.Pos, msg, args...) }
// postorder returns the reachable blocks in f in a postorder traversal.
func (f *Func) postorder() []*Block {
// returns a fun containing the composed Func. entry must be a name
// supplied to one of the Bloc functions. Each of the bloc names and
// valu names should be unique across the Fun.
-func Fun(c *Config, entry string, blocs ...bloc) fun {
- f := NewFunc()
+func Fun(c *Config, fe Frontend, entry string, blocs ...bloc) fun {
+ f := NewFunc(fe)
f.Config = c
// TODO: Either mark some SSA tests as t.Parallel,
// or set up a shared Cache and Reset it between tests.
func TestArgs(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
equivalentCases := []struct{ f, g fun }{
// simple case
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
},
// block order changed
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("exit",
Exit("mem")),
Bloc("entry",
differentCases := []struct{ f, g fun }{
// different shape
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Goto("exit")),
Bloc("exit",
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem"))),
},
// value order changed
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
},
// value auxint different
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 26, nil),
},
// value aux different
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, 14),
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, 26),
},
// value args different
{
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 14, nil),
Valu("b", OpConst64, TypeInt64, 26, nil),
Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"),
Exit("mem"))),
- Fun(testConfig(t), "entry",
+ Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("a", OpConst64, TypeInt64, 0, nil),
// TestConstCache ensures that the cache will not return
// reused free'd values with a non-matching AuxInt
func TestConstCache(t *testing.T) {
- f := Fun(testConfig(t), "entry",
+ f := Fun(testConfig(t), DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Exit("mem")))
func TestFuseEliminatesOneBranch(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestFuseEliminatesBothBranches(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
func TestFuseHandlesPhis(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
}
func TestFuseEliminatesEmptyBlocks(t *testing.T) {
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
b.ResetTimer()
for i := 0; i < b.N; i++ {
- fun := Fun(c, "entry", blocks...)
+ fun := Fun(c, DummyFrontend{b}, "entry", blocks...)
fuse(fun.f)
}
})
// It's not precise--thus the blank assignments--but it's good enough
// to avoid generating needless code and doing pointless nil checks.
hasb := strings.Contains(body, "b.")
- hasconfig := strings.Contains(body, "config.")
+ hasconfig := strings.Contains(body, "config.") || strings.Contains(body, "config)")
hasfe := strings.Contains(body, "fe.")
fmt.Fprintf(w, "func rewriteValue%s_%s(v *Value) bool {\n", arch.name, op)
if hasb || hasconfig || hasfe {
fmt.Fprintln(w, "b := v.Block")
fmt.Fprintln(w, "_ = b")
}
- if hasconfig || hasfe {
+ if hasconfig {
fmt.Fprintln(w, "config := b.Func.Config")
fmt.Fprintln(w, "_ = config")
}
if hasfe {
- fmt.Fprintln(w, "fe := config.fe")
+ fmt.Fprintln(w, "fe := b.Func.fe")
fmt.Fprintln(w, "_ = fe")
}
fmt.Fprint(w, body)
fmt.Fprintf(w, "func rewriteBlock%s(b *Block) bool {\n", arch.name)
fmt.Fprintln(w, "config := b.Func.Config")
fmt.Fprintln(w, "_ = config")
- fmt.Fprintln(w, "fe := config.fe")
+ fmt.Fprintln(w, "fe := b.Func.fe")
fmt.Fprintln(w, "_ = fe")
fmt.Fprintf(w, "switch b.Kind {\n")
ops = nil
}
func testLCAgen(t *testing.T, bg blockGen, size int) {
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry", bg(size)...)
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry", bg(size)...)
CheckFunc(fun.f)
if size == 4 {
t.Logf(fun.f.String())
}
func describeBranchPrediction(f *Func, b *Block, likely, not int8, prediction BranchPrediction) {
- f.Config.Warnl(b.Pos, "Branch prediction rule %s < %s%s",
+ f.Warnl(b.Pos, "Branch prediction rule %s < %s%s",
bllikelies[likely-blMin], bllikelies[not-blMin], describePredictionAgrees(b, prediction))
}
noprediction = true
}
if f.pass.debug > 0 && !noprediction {
- f.Config.Warnl(b.Pos, "Branch prediction rule stay in loop%s",
+ f.Warnl(b.Pos, "Branch prediction rule stay in loop%s",
describePredictionAgrees(b, prediction))
}
}
}
if f.pass.debug > 2 {
- f.Config.Warnl(b.Pos, "BP: Block %s, local=%s, certain=%s", b, bllikelies[local[b.ID]-blMin], bllikelies[certain[b.ID]-blMin])
+ f.Warnl(b.Pos, "BP: Block %s, local=%s, certain=%s", b, bllikelies[local[b.ID]-blMin], bllikelies[certain[b.ID]-blMin])
}
}
// done:
//
c := testConfigS390X(t)
- fun := Fun(c, "entry",
+ fe := DummyFrontend{t}
+ fun := Fun(c, fe, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
Valu("ret", OpAddr, TypeInt64Ptr, 0, nil, "SP"),
- Valu("N", OpArg, TypeInt64, 0, c.fe.Auto(TypeInt64)),
+ Valu("N", OpArg, TypeInt64, 0, fe.Auto(TypeInt64)),
Valu("starti", OpConst64, TypeInt64, 0, nil),
Valu("startsum", OpConst64, TypeInt64, 0, nil),
Goto("b1")),
if f.pass.debug > 1 {
if min.Op == OpConst64 {
- b.Func.Config.Warnl(b.Pos, "Induction variable with minimum %d and increment %d", min.AuxInt, inc.AuxInt)
+ b.Func.Warnl(b.Pos, "Induction variable with minimum %d and increment %d", min.AuxInt, inc.AuxInt)
} else {
- b.Func.Config.Warnl(b.Pos, "Induction variable with non-const minimum and increment %d", inc.AuxInt)
+ b.Func.Warnl(b.Pos, "Induction variable with non-const minimum and increment %d", inc.AuxInt)
}
}
if iv, has := m[ind]; has && sdom.isAncestorEq(iv.entry, b) && isNonNegative(iv.min) {
if v.Args[1] == iv.max {
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "Found redundant %s", v.Op)
+ f.Warnl(b.Pos, "Found redundant %s", v.Op)
}
goto simplify
}
if iv, has := m[ind]; has && sdom.isAncestorEq(iv.entry, b) && isNonNegative(iv.min) {
if v.Args[1].Op == OpSliceCap && iv.max.Op == OpSliceLen && v.Args[1].Args[0] == iv.max.Args[0] {
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "Found redundant %s (len promoted to cap)", v.Op)
+ f.Warnl(b.Pos, "Found redundant %s (len promoted to cap)", v.Op)
}
goto simplify
}
if max := iv.max.AuxInt + add; 0 <= max && max <= limit { // handle overflow
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "Found redundant (%s ind %d), ind < %d", v.Op, v.Args[1].AuxInt, iv.max.AuxInt+add)
+ f.Warnl(b.Pos, "Found redundant (%s ind %d), ind < %d", v.Op, v.Args[1].AuxInt, iv.max.AuxInt+add)
}
goto simplify
}
// if sp < g.limit { goto sched }
// goto header
- pt := f.Config.Frontend().TypeUintptr()
+ pt := f.fe.TypeUintptr()
g := test.NewValue1(bb.Pos, OpGetG, pt, mem0)
sp := test.NewValue0(bb.Pos, OpSP, pt)
cmpOp := OpLess64U
}
limaddr := test.NewValue1I(bb.Pos, OpOffPtr, pt, 2*pt.Size(), g)
lim := test.NewValue2(bb.Pos, OpLoad, pt, limaddr, mem0)
- cmp := test.NewValue2(bb.Pos, cmpOp, f.Config.fe.TypeBool(), sp, lim)
+ cmp := test.NewValue2(bb.Pos, cmpOp, f.fe.TypeBool(), sp, lim)
test.SetControl(cmp)
// if true, goto sched
// sched:
// mem1 := call resched (mem0)
// goto header
- resched := f.Config.fe.Syslook("goschedguarded")
+ resched := f.fe.Syslook("goschedguarded")
mem1 := sched.NewValue1A(bb.Pos, OpStaticCall, TypeMem, resched, mem0)
sched.AddEdgeTo(h)
headerMemPhi.AddArg(mem1)
// This is a redundant implicit nil check.
// Logging in the style of the former compiler -- and omit line 1,
// which is usually in generated code.
- if f.Config.Debug_checknil() && v.Pos.Line() > 1 {
- f.Config.Warnl(v.Pos, "removed nil check")
+ if f.fe.Debug_checknil() && v.Pos.Line() > 1 {
+ f.Warnl(v.Pos, "removed nil check")
}
v.reset(OpUnknown)
// TODO: f.freeValue(v)
for i := len(b.Values) - 1; i >= 0; i-- {
v := b.Values[i]
if opcodeTable[v.Op].nilCheck && unnecessary.contains(v.Args[0].ID) {
- if f.Config.Debug_checknil() && v.Pos.Line() > 1 {
- f.Config.Warnl(v.Pos, "removed nil check")
+ if f.fe.Debug_checknil() && v.Pos.Line() > 1 {
+ f.Warnl(v.Pos, "removed nil check")
}
v.reset(OpUnknown)
continue
Bloc("exit", Exit("mem")),
)
- c := NewConfig("amd64", DummyFrontend{b}, nil, true)
- fun := Fun(c, "entry", blocs...)
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{b}, "entry", blocs...)
CheckFunc(fun.f)
b.SetBytes(int64(depth)) // helps for eyeballing linearity
// TestNilcheckSimple verifies that a second repeated nilcheck is removed.
func TestNilcheckSimple(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// on the order of the dominees.
func TestNilcheckDomOrder(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// TestNilcheckAddr verifies that nilchecks of OpAddr constructed values are removed.
func TestNilcheckAddr(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// TestNilcheckAddPtr verifies that nilchecks of OpAddPtr constructed values are removed.
func TestNilcheckAddPtr(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// non-nil are removed.
func TestNilcheckPhi(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// are removed, but checks of different pointers are not.
func TestNilcheckKeepRemove(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// block are *not* removed.
func TestNilcheckInFalseBranch(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// wil remove the generated nil check.
func TestNilcheckUser(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// TestNilcheckBug reproduces a bug in nilcheckelim found by compiling math/big
func TestNilcheckBug(t *testing.T) {
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- c := NewConfig("amd64", DummyFrontend{t}, nil, true)
- fun := Fun(c, "entry",
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// benchFnPass runs passFunc b.N times across a single function.
func benchFnPass(b *testing.B, fn passFunc, size int, bg blockGen) {
b.ReportAllocs()
- c := NewConfig("amd64", DummyFrontend{b}, nil, true)
- fun := Fun(c, "entry", bg(size)...)
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < b.N; i++ {
// benchFnPass runs passFunc across a function with b.N blocks.
func benchFnBlock(b *testing.B, fn passFunc, bg blockGen) {
b.ReportAllocs()
- c := NewConfig("amd64", DummyFrontend{b}, nil, true)
- fun := Fun(c, "entry", bg(b.N)...)
+ c := NewConfig("amd64", nil, true)
+ fun := Fun(c, DummyFrontend{b}, "entry", bg(b.N)...)
CheckFunc(fun.f)
b.ResetTimer()
for i := 0; i < passCount; i++ {
v.SetArgs1(w)
f := v.Block.Func
if f.pass.debug > 0 {
- f.Config.Warnl(v.Pos, "eliminated phi")
+ f.Warnl(v.Pos, "eliminated phi")
}
return true
}
v.reset(ops[v.Args[reverse].AuxInt])
v.AddArg(b0.Control)
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
+ f.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
}
continue
}
v.reset(OpOrB)
v.SetArgs2(b0.Control, tmp)
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
+ f.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
}
continue
}
v.reset(OpAndB)
v.SetArgs2(b0.Control, tmp)
if f.pass.debug > 0 {
- f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
+ f.Warnl(b.Pos, "converted OpPhi to %v", v.Op)
}
continue
}
f := b0.Func
if f.pass.debug > 0 {
- f.Config.Warnl(v.Block.Pos, "converted OpPhi bool -> int%d", v.Type.Size()*8)
+ f.Warnl(v.Block.Pos, "converted OpPhi bool -> int%d", v.Type.Size()*8)
}
}
func (p stringFuncPrinter) value(v *Value, live bool) {
fmt.Fprint(p.w, " ")
- //fmt.Fprint(p.w, v.Block.Func.Config.fe.Pos(v.Pos))
+ //fmt.Fprint(p.w, v.Block.Func.fe.Pos(v.Pos))
//fmt.Fprint(p.w, ": ")
fmt.Fprint(p.w, v.LongString())
if !live {
ft.limitStack = append(ft.limitStack, limitFact{v.ID, old})
ft.limits[v.ID] = lim
if v.Block.Func.pass.debug > 2 {
- v.Block.Func.Config.Warnl(parent.Pos, "parent=%s, new limits %s %s %s", parent, v, w, lim.String())
+ v.Block.Func.Warnl(parent.Pos, "parent=%s, new limits %s %s %s", parent, v, w, lim.String())
}
}
}
v.reset(OpConst32)
}
if b.Func.pass.debug > 0 {
- b.Func.Config.Warnl(v.Pos, "Proved slicemask not needed")
+ b.Func.Warnl(v.Pos, "Proved slicemask not needed")
}
v.AuxInt = -1
}
if m == lt|gt {
if b.Func.pass.debug > 0 {
if b.Func.pass.debug > 1 {
- b.Func.Config.Warnl(b.Pos, "Proved boolean %s (%s)", b.Control.Op, b.Control)
+ b.Func.Warnl(b.Pos, "Proved boolean %s (%s)", b.Control.Op, b.Control)
} else {
- b.Func.Config.Warnl(b.Pos, "Proved boolean %s", b.Control.Op)
+ b.Func.Warnl(b.Pos, "Proved boolean %s", b.Control.Op)
}
}
return positive
if m == eq {
if b.Func.pass.debug > 0 {
if b.Func.pass.debug > 1 {
- b.Func.Config.Warnl(b.Pos, "Disproved boolean %s (%s)", b.Control.Op, b.Control)
+ b.Func.Warnl(b.Pos, "Disproved boolean %s (%s)", b.Control.Op, b.Control)
} else {
- b.Func.Config.Warnl(b.Pos, "Disproved boolean %s", b.Control.Op)
+ b.Func.Warnl(b.Pos, "Disproved boolean %s", b.Control.Op)
}
}
return negative
if m != 0 && tr.r&m == m {
if b.Func.pass.debug > 0 {
if b.Func.pass.debug > 1 {
- b.Func.Config.Warnl(b.Pos, "Proved %s (%s)", c.Op, c)
+ b.Func.Warnl(b.Pos, "Proved %s (%s)", c.Op, c)
} else {
- b.Func.Config.Warnl(b.Pos, "Proved %s", c.Op)
+ b.Func.Warnl(b.Pos, "Proved %s", c.Op)
}
}
return positive
if m != 0 && ((lt|eq|gt)^tr.r)&m == m {
if b.Func.pass.debug > 0 {
if b.Func.pass.debug > 1 {
- b.Func.Config.Warnl(b.Pos, "Disproved %s (%s)", c.Op, c)
+ b.Func.Warnl(b.Pos, "Disproved %s (%s)", c.Op, c)
} else {
- b.Func.Config.Warnl(b.Pos, "Disproved %s", c.Op)
+ b.Func.Warnl(b.Pos, "Disproved %s", c.Op)
}
}
return negative
if m != 0 && tr.r&m == m {
if b.Func.pass.debug > 0 {
if b.Func.pass.debug > 1 {
- b.Func.Config.Warnl(b.Pos, "Proved non-negative bounds %s (%s)", c.Op, c)
+ b.Func.Warnl(b.Pos, "Proved non-negative bounds %s (%s)", c.Op, c)
} else {
- b.Func.Config.Warnl(b.Pos, "Proved non-negative bounds %s", c.Op)
+ b.Func.Warnl(b.Pos, "Proved non-negative bounds %s", c.Op)
}
}
return positive
// Load v from its spill location.
spill := s.makeSpill(v, s.curBlock)
if s.f.pass.debug > logSpills {
- s.f.Config.Warnl(vi.spill.Pos, "load spill for %v from %v", v, spill)
+ s.f.Warnl(vi.spill.Pos, "load spill for %v from %v", v, spill)
}
c = s.curBlock.NewValue1(pos, OpLoadReg, v.Type, spill)
}
case "s390x":
// nothing to do, R10 & R11 already reserved
default:
- s.f.Config.fe.Fatalf(src.NoXPos, "arch %s not implemented", s.f.Config.arch)
+ s.f.fe.Fatalf(src.NoXPos, "arch %s not implemented", s.f.Config.arch)
}
}
if s.f.Config.nacl {
// Which registers are possibilities.
var m regMask
if typ.IsFloat() {
- m = e.s.compatRegs(e.s.f.Config.fe.TypeFloat64())
+ m = e.s.compatRegs(e.s.f.fe.TypeFloat64())
} else {
- m = e.s.compatRegs(e.s.f.Config.fe.TypeInt64())
+ m = e.s.compatRegs(e.s.f.fe.TypeInt64())
}
// Pick a register. In priority order:
// No register is available. Allocate a temp location to spill a register to.
// The type of the slot is immaterial - it will not be live across
// any safepoint. Just use a type big enough to hold any register.
- typ = e.s.f.Config.fe.TypeInt64()
- t := LocalSlot{e.s.f.Config.fe.Auto(typ), typ, 0}
+ typ = e.s.f.fe.TypeInt64()
+ t := LocalSlot{e.s.f.fe.Auto(typ), typ, 0}
// TODO: reuse these slots.
// Pick a register to spill.
func TestLiveControlOps(t *testing.T) {
c := testConfig(t)
- f := Fun(c, "entry",
+ f := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("x", OpAMD64MOVLconst, TypeInt8, 1, nil),
if !ok {
return nil
}
- lsym := f.Config.Frontend().DerefItab(ext.Sym, offset)
+ lsym := f.fe.DerefItab(ext.Sym, offset)
if f.pass.debug > 0 {
if lsym != nil {
- f.Config.Warnl(v.Pos, "de-virtualizing call")
+ f.Warnl(v.Pos, "de-virtualizing call")
} else {
- f.Config.Warnl(v.Pos, "couldn't de-virtualize call")
+ f.Warnl(v.Pos, "couldn't de-virtualize call")
}
}
return lsym
// cond is true and the rule is fired.
func warnRule(cond bool, v *Value, s string) bool {
if cond {
- v.Block.Func.Config.Warnl(v.Pos, s)
+ v.Block.Func.Warnl(v.Pos, s)
}
return true
}
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVSDconst [c])
// cond: config.ctxt.Flag_shared
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVSSconst [c])
// cond: config.ctxt.Flag_shared
func rewriteValue386_Op386ORL(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ORL x (MOVLconst [c]))
// cond:
func rewriteValue386_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValue386_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValue386_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValue386_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg32F x)
// cond: !config.use387
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg64F x)
// cond: !config.use387
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteBlock386(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case Block386EQ:
func rewriteValueAMD64_OpAMD64MOVLstoreconst(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
func rewriteValueAMD64_OpAMD64MOVLstoreconstidx1(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVLstoreconstidx1 [c] {sym} ptr (SHLQconst [2] idx) mem)
// cond:
func rewriteValueAMD64_OpAMD64MOVLstoreconstidx4(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVLstoreconstidx4 [x] {sym} (ADDQconst [c] ptr) idx mem)
// cond:
func rewriteValueAMD64_OpAMD64ORL(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ORL x (MOVLconst [c]))
// cond:
func rewriteValueAMD64_OpAMD64ORQ(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ORQ x (MOVQconst [c]))
// cond: is32Bit(c)
func rewriteValueAMD64_OpAtomicAdd32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicAdd32 ptr val mem)
// cond:
func rewriteValueAMD64_OpAtomicAdd64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicAdd64 ptr val mem)
// cond:
func rewriteValueAMD64_OpAtomicStore32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicStore32 ptr val mem)
// cond:
func rewriteValueAMD64_OpAtomicStore64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicStore64 ptr val mem)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicStorePtrNoWB ptr val mem)
// cond: config.PtrSize == 8
func rewriteValueAMD64_OpBitLen32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen32 x)
// cond:
func rewriteValueAMD64_OpBitLen64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen64 <t> x)
// cond:
func rewriteValueAMD64_OpCtz32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz32 x)
// cond:
func rewriteValueAMD64_OpCtz64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz64 <t> x)
// cond:
func rewriteValueAMD64_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueAMD64_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValueAMD64_OpDiv32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 x y)
// cond:
func rewriteValueAMD64_OpDiv32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u x y)
// cond:
func rewriteValueAMD64_OpDiv64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64 x y)
// cond:
func rewriteValueAMD64_OpDiv64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64u x y)
// cond:
func rewriteValueAMD64_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueAMD64_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueAMD64_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueAMD64_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValueAMD64_OpMod32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
func rewriteValueAMD64_OpMod32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValueAMD64_OpMod64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64 x y)
// cond:
func rewriteValueAMD64_OpMod64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64u x y)
// cond:
func rewriteValueAMD64_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueAMD64_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueAMD64_OpNeg32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg32F x)
// cond:
func rewriteValueAMD64_OpNeg64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg64F x)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (OffPtr [off] ptr)
// cond: config.PtrSize == 8 && is32Bit(off)
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteBlockAMD64(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockAMD64EQ:
func rewriteValueARM_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueARM_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u x y)
// cond:
func rewriteValueARM_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueARM_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueARM_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond:
func rewriteValueARM_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond:
func rewriteValueARM_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValueARM_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValueARM_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValueARM_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValueARM_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValueARM_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValueARM_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValueARM_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValueARM_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValueARM_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValueARM_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValueARM_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValueARM_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValueARM_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValueARM_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValueARM_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValueARM_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValueARM_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 x y)
// cond:
func rewriteValueARM_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 x y)
// cond:
func rewriteValueARM_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 x y)
// cond:
func rewriteValueARM_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 x y)
// cond:
func rewriteValueARM_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 x y)
// cond:
func rewriteValueARM_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 x y)
// cond:
func rewriteValueARM_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueARM_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValueARM_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueARM_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueARM_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond:
func rewriteValueARM_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond:
func rewriteValueARM_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 x y)
// cond:
func rewriteValueARM_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 x y)
// cond:
func rewriteValueARM_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint64(c) < 16
func rewriteValueARM_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 x y)
// cond:
func rewriteValueARM_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 x y)
// cond:
func rewriteValueARM_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 x y)
// cond:
func rewriteValueARM_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
func rewriteValueARM_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 x y)
// cond:
func rewriteValueARM_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 x y)
// cond:
func rewriteValueARM_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 x y)
// cond:
func rewriteValueARM_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 x y)
// cond:
func rewriteValueARM_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 x y)
// cond:
func rewriteValueARM_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 x y)
// cond:
func rewriteValueARM_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 x y)
// cond:
func rewriteValueARM_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint64(c) < 8
func rewriteValueARM_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 x y)
// cond:
func rewriteValueARM_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 x y)
// cond:
func rewriteValueARM_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 x y)
// cond:
func rewriteValueARM_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
func rewriteValueARM_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteValueARM_OpZeromask(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zeromask x)
// cond:
func rewriteBlockARM(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockARMEQ:
func rewriteValueARM64_OpBitLen64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen64 x)
// cond:
func rewriteValueARM64_OpBitRev16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitRev16 x)
// cond:
func rewriteValueARM64_OpBitRev8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitRev8 x)
// cond:
func rewriteValueARM64_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueARM64_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValueARM64_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueARM64_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueARM64_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond:
func rewriteValueARM64_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond:
func rewriteValueARM64_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValueARM64_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValueARM64_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValueARM64_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValueARM64_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValueARM64_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValueARM64_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValueARM64_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValueARM64_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValueARM64_OpHmul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32 x y)
// cond:
func rewriteValueARM64_OpHmul32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32u x y)
// cond:
func rewriteValueARM64_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValueARM64_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValueARM64_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValueARM64_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValueARM64_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValueARM64_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValueARM64_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValueARM64_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValueARM64_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 <t> x y)
// cond:
func rewriteValueARM64_OpLsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x32 <t> x y)
// cond:
func rewriteValueARM64_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 <t> x y)
// cond:
func rewriteValueARM64_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 <t> x y)
// cond:
func rewriteValueARM64_OpLsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x32 <t> x y)
// cond:
func rewriteValueARM64_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 <t> x y)
// cond:
func rewriteValueARM64_OpLsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x16 <t> x y)
// cond:
func rewriteValueARM64_OpLsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x32 <t> x y)
// cond:
func rewriteValueARM64_OpLsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x8 <t> x y)
// cond:
func rewriteValueARM64_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 <t> x y)
// cond:
func rewriteValueARM64_OpLsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x32 <t> x y)
// cond:
func rewriteValueARM64_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 <t> x y)
// cond:
func rewriteValueARM64_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueARM64_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValueARM64_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueARM64_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueARM64_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond:
func rewriteValueARM64_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond:
func rewriteValueARM64_OpNot(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Not x)
// cond:
func rewriteValueARM64_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 <t> x y)
// cond:
func rewriteValueARM64_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 <t> x y)
// cond:
func rewriteValueARM64_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 16
func rewriteValueARM64_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 <t> x y)
// cond:
func rewriteValueARM64_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 x y)
// cond:
func rewriteValueARM64_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 x y)
// cond:
func rewriteValueARM64_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 x (MOVDconst [c]))
// cond: uint64(c) < 16
func rewriteValueARM64_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 x y)
// cond:
func rewriteValueARM64_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 <t> x y)
// cond:
func rewriteValueARM64_OpRsh32Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux32 <t> x y)
// cond:
func rewriteValueARM64_OpRsh32Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 32
func rewriteValueARM64_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 <t> x y)
// cond:
func rewriteValueARM64_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 x y)
// cond:
func rewriteValueARM64_OpRsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x32 x y)
// cond:
func rewriteValueARM64_OpRsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x64 x (MOVDconst [c]))
// cond: uint64(c) < 32
func rewriteValueARM64_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 x y)
// cond:
func rewriteValueARM64_OpRsh64Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux16 <t> x y)
// cond:
func rewriteValueARM64_OpRsh64Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux32 <t> x y)
// cond:
func rewriteValueARM64_OpRsh64Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux8 <t> x y)
// cond:
func rewriteValueARM64_OpRsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x16 x y)
// cond:
func rewriteValueARM64_OpRsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x32 x y)
// cond:
func rewriteValueARM64_OpRsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x8 x y)
// cond:
func rewriteValueARM64_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 <t> x y)
// cond:
func rewriteValueARM64_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 <t> x y)
// cond:
func rewriteValueARM64_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 x (MOVDconst [c]))
// cond: uint64(c) < 8
func rewriteValueARM64_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 <t> x y)
// cond:
func rewriteValueARM64_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 x y)
// cond:
func rewriteValueARM64_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 x y)
// cond:
func rewriteValueARM64_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 x (MOVDconst [c]))
// cond: uint64(c) < 8
func rewriteValueARM64_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteBlockARM64(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockARM64EQ:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicAnd8 ptr val mem)
// cond: !config.BigEndian
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicOr8 ptr val mem)
// cond: !config.BigEndian
func rewriteValueMIPS_OpBitLen32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen32 <t> x)
// cond:
func rewriteValueMIPS_OpCtz32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz32 <t> x)
// cond:
func rewriteValueMIPS_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueMIPS_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValueMIPS_OpDiv32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 x y)
// cond:
func rewriteValueMIPS_OpDiv32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u x y)
// cond:
func rewriteValueMIPS_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueMIPS_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueMIPS_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond:
func rewriteValueMIPS_OpEq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq32 x y)
// cond:
func rewriteValueMIPS_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond:
func rewriteValueMIPS_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValueMIPS_OpEqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqPtr x y)
// cond:
func rewriteValueMIPS_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValueMIPS_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValueMIPS_OpGeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32 x y)
// cond:
func rewriteValueMIPS_OpGeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32U x y)
// cond:
func rewriteValueMIPS_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValueMIPS_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValueMIPS_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValueMIPS_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValueMIPS_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValueMIPS_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValueMIPS_OpHmul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32 x y)
// cond:
func rewriteValueMIPS_OpHmul32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32u x y)
// cond:
func rewriteValueMIPS_OpIsNonNil(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsNonNil ptr)
// cond:
func rewriteValueMIPS_OpIsSliceInBounds(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsSliceInBounds idx len)
// cond:
func rewriteValueMIPS_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValueMIPS_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValueMIPS_OpLeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32 x y)
// cond:
func rewriteValueMIPS_OpLeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32U x y)
// cond:
func rewriteValueMIPS_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValueMIPS_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValueMIPS_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValueMIPS_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValueMIPS_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValueMIPS_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValueMIPS_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x32 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x32 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x32 <t> x y)
// cond:
func rewriteValueMIPS_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 <t> x y)
// cond:
func rewriteValueMIPS_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueMIPS_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValueMIPS_OpMod32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
func rewriteValueMIPS_OpMod32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValueMIPS_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueMIPS_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueMIPS_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond:
func rewriteValueMIPS_OpNeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq32 x y)
// cond:
func rewriteValueMIPS_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond:
func rewriteValueMIPS_OpNeqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqPtr x y)
// cond:
func rewriteValueMIPS_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint32(c) < 16
func rewriteValueMIPS_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 x y)
// cond:
func rewriteValueMIPS_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 x y)
// cond:
func rewriteValueMIPS_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint32(c) < 16
func rewriteValueMIPS_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 x y)
// cond:
func rewriteValueMIPS_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh32Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux32 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 x y)
// cond:
func rewriteValueMIPS_OpRsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x32 x y)
// cond:
func rewriteValueMIPS_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 x y)
// cond:
func rewriteValueMIPS_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint32(c) < 8
func rewriteValueMIPS_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 <t> x y)
// cond:
func rewriteValueMIPS_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 x y)
// cond:
func rewriteValueMIPS_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 x y)
// cond:
func rewriteValueMIPS_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint32(c) < 8
func rewriteValueMIPS_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 x y)
// cond:
func rewriteValueMIPS_OpSelect0(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Select0 (Add32carry <t> x y))
// cond:
func rewriteValueMIPS_OpSelect1(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Select1 (Add32carry <t> x y))
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteValueMIPS_OpZeromask(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zeromask x)
// cond:
func rewriteBlockMIPS(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockMIPSEQ:
func rewriteValueMIPS64_OpCom16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Com16 x)
// cond:
func rewriteValueMIPS64_OpCom32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Com32 x)
// cond:
func rewriteValueMIPS64_OpCom64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Com64 x)
// cond:
func rewriteValueMIPS64_OpCom8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Com8 x)
// cond:
func rewriteValueMIPS64_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValueMIPS64_OpDiv32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 x y)
// cond:
func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u x y)
// cond:
func rewriteValueMIPS64_OpDiv64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64 x y)
// cond:
func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64u x y)
// cond:
func rewriteValueMIPS64_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueMIPS64_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond:
func rewriteValueMIPS64_OpEq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq32 x y)
// cond:
func rewriteValueMIPS64_OpEq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq64 x y)
// cond:
func rewriteValueMIPS64_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond:
func rewriteValueMIPS64_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqPtr x y)
// cond:
func rewriteValueMIPS64_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValueMIPS64_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValueMIPS64_OpGeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32 x y)
// cond:
func rewriteValueMIPS64_OpGeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32U x y)
// cond:
func rewriteValueMIPS64_OpGeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64 x y)
// cond:
func rewriteValueMIPS64_OpGeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64U x y)
// cond:
func rewriteValueMIPS64_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValueMIPS64_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValueMIPS64_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValueMIPS64_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValueMIPS64_OpGreater32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater32 x y)
// cond:
func rewriteValueMIPS64_OpGreater32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater32U x y)
// cond:
func rewriteValueMIPS64_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValueMIPS64_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValueMIPS64_OpHmul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32 x y)
// cond:
func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32u x y)
// cond:
func rewriteValueMIPS64_OpHmul64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul64 x y)
// cond:
func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul64u x y)
// cond:
func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsNonNil ptr)
// cond:
func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsSliceInBounds idx len)
// cond:
func rewriteValueMIPS64_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValueMIPS64_OpLeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32 x y)
// cond:
func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32U x y)
// cond:
func rewriteValueMIPS64_OpLeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64 x y)
// cond:
func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64U x y)
// cond:
func rewriteValueMIPS64_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValueMIPS64_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValueMIPS64_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValueMIPS64_OpLess32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less32 x y)
// cond:
func rewriteValueMIPS64_OpLess32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less32U x y)
// cond:
func rewriteValueMIPS64_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValueMIPS64_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueMIPS64_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValueMIPS64_OpMod32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
func rewriteValueMIPS64_OpMod32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValueMIPS64_OpMod64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64 x y)
// cond:
func rewriteValueMIPS64_OpMod64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64u x y)
// cond:
func rewriteValueMIPS64_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueMIPS64_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueMIPS64_OpMul16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul16 x y)
// cond:
func rewriteValueMIPS64_OpMul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul32 x y)
// cond:
func rewriteValueMIPS64_OpMul64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul64 x y)
// cond:
func rewriteValueMIPS64_OpMul8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul8 x y)
// cond:
func rewriteValueMIPS64_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond:
func rewriteValueMIPS64_OpNeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq32 x y)
// cond:
func rewriteValueMIPS64_OpNeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq64 x y)
// cond:
func rewriteValueMIPS64_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond:
func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqPtr x y)
// cond:
func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 <t> x y)
// cond:
func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 <t> x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Zero [0] _ mem)
// cond:
func rewriteBlockMIPS64(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockMIPS64EQ:
func rewriteValuePPC64_OpCvt32Fto32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt32Fto32 x)
// cond:
func rewriteValuePPC64_OpCvt32Fto64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt32Fto64 x)
// cond:
func rewriteValuePPC64_OpCvt32to32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt32to32F x)
// cond:
func rewriteValuePPC64_OpCvt32to64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt32to64F x)
// cond:
func rewriteValuePPC64_OpCvt64Fto32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt64Fto32 x)
// cond:
func rewriteValuePPC64_OpCvt64Fto64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt64Fto64 x)
// cond:
func rewriteValuePPC64_OpCvt64to32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt64to32F x)
// cond:
func rewriteValuePPC64_OpCvt64to64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Cvt64to64F x)
// cond:
func rewriteValuePPC64_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValuePPC64_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValuePPC64_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValuePPC64_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValuePPC64_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
func rewriteValuePPC64_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
func rewriteValuePPC64_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValuePPC64_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValuePPC64_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValuePPC64_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValuePPC64_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValuePPC64_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValuePPC64_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValuePPC64_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValuePPC64_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValuePPC64_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValuePPC64_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValuePPC64_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValuePPC64_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValuePPC64_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValuePPC64_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValuePPC64_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValuePPC64_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValuePPC64_OpLoad(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Load <t> ptr mem)
// cond: (is64BitInt(t) || isPtr(t))
func rewriteValuePPC64_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 x y)
// cond:
func rewriteValuePPC64_OpLsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x32 x (Const64 [c]))
// cond: uint32(c) < 16
func rewriteValuePPC64_OpLsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
func rewriteValuePPC64_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 x y)
// cond:
func rewriteValuePPC64_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 x y)
// cond:
func rewriteValuePPC64_OpLsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x32 x (Const64 [c]))
// cond: uint32(c) < 32
func rewriteValuePPC64_OpLsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
func rewriteValuePPC64_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 x y)
// cond:
func rewriteValuePPC64_OpLsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x16 x y)
// cond:
func rewriteValuePPC64_OpLsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x32 x (Const64 [c]))
// cond: uint32(c) < 64
func rewriteValuePPC64_OpLsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x64 x (Const64 [c]))
// cond: uint64(c) < 64
func rewriteValuePPC64_OpLsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x8 x y)
// cond:
func rewriteValuePPC64_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 x y)
// cond:
func rewriteValuePPC64_OpLsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x32 x (Const64 [c]))
// cond: uint32(c) < 8
func rewriteValuePPC64_OpLsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
func rewriteValuePPC64_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 x y)
// cond:
func rewriteValuePPC64_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValuePPC64_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValuePPC64_OpMod32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
func rewriteValuePPC64_OpMod32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValuePPC64_OpMod64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64 x y)
// cond:
func rewriteValuePPC64_OpMod64u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod64u x y)
// cond:
func rewriteValuePPC64_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValuePPC64_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValuePPC64_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
func rewriteValuePPC64_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond: isSigned(x.Type) && isSigned(y.Type)
func rewriteValuePPC64_OpOffPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (OffPtr [off] ptr)
// cond:
func rewriteValuePPC64_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 x y)
// cond:
func rewriteValuePPC64_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 x (Const64 [c]))
// cond: uint32(c) < 16
func rewriteValuePPC64_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 x (Const64 [c]))
// cond: uint64(c) < 16
func rewriteValuePPC64_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 x y)
// cond:
func rewriteValuePPC64_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 x y)
// cond:
func rewriteValuePPC64_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 x (Const64 [c]))
// cond: uint32(c) < 16
func rewriteValuePPC64_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 x (Const64 [c]))
// cond: uint64(c) < 16
func rewriteValuePPC64_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 x y)
// cond:
func rewriteValuePPC64_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 x y)
// cond:
func rewriteValuePPC64_OpRsh32Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux32 x (Const64 [c]))
// cond: uint32(c) < 32
func rewriteValuePPC64_OpRsh32Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux64 x (Const64 [c]))
// cond: uint64(c) < 32
func rewriteValuePPC64_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 x y)
// cond:
func rewriteValuePPC64_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 x y)
// cond:
func rewriteValuePPC64_OpRsh32x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x32 x (Const64 [c]))
// cond: uint32(c) < 32
func rewriteValuePPC64_OpRsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x64 x (Const64 [c]))
// cond: uint64(c) < 32
func rewriteValuePPC64_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 x y)
// cond:
func rewriteValuePPC64_OpRsh64Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux16 x y)
// cond:
func rewriteValuePPC64_OpRsh64Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux32 x (Const64 [c]))
// cond: uint32(c) < 64
func rewriteValuePPC64_OpRsh64Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux64 x (Const64 [c]))
// cond: uint64(c) < 64
func rewriteValuePPC64_OpRsh64Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux8 x y)
// cond:
func rewriteValuePPC64_OpRsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x16 x y)
// cond:
func rewriteValuePPC64_OpRsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x32 x (Const64 [c]))
// cond: uint32(c) < 64
func rewriteValuePPC64_OpRsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x64 x (Const64 [c]))
// cond: uint64(c) < 64
func rewriteValuePPC64_OpRsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x8 x y)
// cond:
func rewriteValuePPC64_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 x y)
// cond:
func rewriteValuePPC64_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 x (Const64 [c]))
// cond: uint32(c) < 8
func rewriteValuePPC64_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 x (Const64 [c]))
// cond: uint64(c) < 8
func rewriteValuePPC64_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 x y)
// cond:
func rewriteValuePPC64_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 x y)
// cond:
func rewriteValuePPC64_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 x (Const64 [c]))
// cond: uint32(c) < 8
func rewriteValuePPC64_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 x (Const64 [c]))
// cond: uint64(c) < 8
func rewriteValuePPC64_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 x y)
// cond:
func rewriteBlockPPC64(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockPPC64EQ:
func rewriteValueS390X_OpAtomicAdd32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicAdd32 ptr val mem)
// cond:
func rewriteValueS390X_OpAtomicAdd64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (AtomicAdd64 ptr val mem)
// cond:
func rewriteValueS390X_OpBitLen64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen64 x)
// cond:
func rewriteValueS390X_OpCtz32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz32 <t> x)
// cond:
func rewriteValueS390X_OpCtz64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz64 <t> x)
// cond:
func rewriteValueS390X_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 x y)
// cond:
func rewriteValueS390X_OpDiv16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u x y)
// cond:
func rewriteValueS390X_OpDiv32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 x y)
// cond:
func rewriteValueS390X_OpDiv32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u x y)
// cond:
func rewriteValueS390X_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 x y)
// cond:
func rewriteValueS390X_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u x y)
// cond:
func rewriteValueS390X_OpEq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq16 x y)
// cond:
func rewriteValueS390X_OpEq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq32 x y)
// cond:
func rewriteValueS390X_OpEq32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq32F x y)
// cond:
func rewriteValueS390X_OpEq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq64 x y)
// cond:
func rewriteValueS390X_OpEq64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq64F x y)
// cond:
func rewriteValueS390X_OpEq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq8 x y)
// cond:
func rewriteValueS390X_OpEqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqB x y)
// cond:
func rewriteValueS390X_OpEqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqPtr x y)
// cond:
func rewriteValueS390X_OpGeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16 x y)
// cond:
func rewriteValueS390X_OpGeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq16U x y)
// cond:
func rewriteValueS390X_OpGeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32 x y)
// cond:
func rewriteValueS390X_OpGeq32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32F x y)
// cond:
func rewriteValueS390X_OpGeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq32U x y)
// cond:
func rewriteValueS390X_OpGeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64 x y)
// cond:
func rewriteValueS390X_OpGeq64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64F x y)
// cond:
func rewriteValueS390X_OpGeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64U x y)
// cond:
func rewriteValueS390X_OpGeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8 x y)
// cond:
func rewriteValueS390X_OpGeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq8U x y)
// cond:
func rewriteValueS390X_OpGreater16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16 x y)
// cond:
func rewriteValueS390X_OpGreater16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater16U x y)
// cond:
func rewriteValueS390X_OpGreater32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater32 x y)
// cond:
func rewriteValueS390X_OpGreater32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater32F x y)
// cond:
func rewriteValueS390X_OpGreater32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater32U x y)
// cond:
func rewriteValueS390X_OpGreater64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater64 x y)
// cond:
func rewriteValueS390X_OpGreater64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater64F x y)
// cond:
func rewriteValueS390X_OpGreater64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater64U x y)
// cond:
func rewriteValueS390X_OpGreater8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8 x y)
// cond:
func rewriteValueS390X_OpGreater8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater8U x y)
// cond:
func rewriteValueS390X_OpHmul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32 x y)
// cond:
func rewriteValueS390X_OpHmul32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Hmul32u x y)
// cond:
func rewriteValueS390X_OpIsInBounds(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsInBounds idx len)
// cond:
func rewriteValueS390X_OpIsNonNil(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsNonNil p)
// cond:
func rewriteValueS390X_OpIsSliceInBounds(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (IsSliceInBounds idx len)
// cond:
func rewriteValueS390X_OpLeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16 x y)
// cond:
func rewriteValueS390X_OpLeq16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq16U x y)
// cond:
func rewriteValueS390X_OpLeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32 x y)
// cond:
func rewriteValueS390X_OpLeq32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32F x y)
// cond:
func rewriteValueS390X_OpLeq32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq32U x y)
// cond:
func rewriteValueS390X_OpLeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64 x y)
// cond:
func rewriteValueS390X_OpLeq64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64F x y)
// cond:
func rewriteValueS390X_OpLeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64U x y)
// cond:
func rewriteValueS390X_OpLeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8 x y)
// cond:
func rewriteValueS390X_OpLeq8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq8U x y)
// cond:
func rewriteValueS390X_OpLess16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16 x y)
// cond:
func rewriteValueS390X_OpLess16U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less16U x y)
// cond:
func rewriteValueS390X_OpLess32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less32 x y)
// cond:
func rewriteValueS390X_OpLess32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less32F x y)
// cond:
func rewriteValueS390X_OpLess32U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less32U x y)
// cond:
func rewriteValueS390X_OpLess64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less64 x y)
// cond:
func rewriteValueS390X_OpLess64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less64F x y)
// cond:
func rewriteValueS390X_OpLess64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less64U x y)
// cond:
func rewriteValueS390X_OpLess8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8 x y)
// cond:
func rewriteValueS390X_OpLess8U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less8U x y)
// cond:
func rewriteValueS390X_OpLsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x16 <t> x y)
// cond:
func rewriteValueS390X_OpLsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x8 <t> x y)
// cond:
func rewriteValueS390X_OpLsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x16 <t> x y)
// cond:
func rewriteValueS390X_OpLsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x8 <t> x y)
// cond:
func rewriteValueS390X_OpLsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x16 <t> x y)
// cond:
func rewriteValueS390X_OpLsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x8 <t> x y)
// cond:
func rewriteValueS390X_OpLsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x16 <t> x y)
// cond:
func rewriteValueS390X_OpLsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x8 <t> x y)
// cond:
func rewriteValueS390X_OpMod16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16 x y)
// cond:
func rewriteValueS390X_OpMod16u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod16u x y)
// cond:
func rewriteValueS390X_OpMod32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32 x y)
// cond:
func rewriteValueS390X_OpMod32u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod32u x y)
// cond:
func rewriteValueS390X_OpMod8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8 x y)
// cond:
func rewriteValueS390X_OpMod8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mod8u x y)
// cond:
func rewriteValueS390X_OpMove(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Move [0] _ _ mem)
// cond:
func rewriteValueS390X_OpNeg16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg16 x)
// cond:
func rewriteValueS390X_OpNeg8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neg8 x)
// cond:
func rewriteValueS390X_OpNeq16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq16 x y)
// cond:
func rewriteValueS390X_OpNeq32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq32 x y)
// cond:
func rewriteValueS390X_OpNeq32F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq32F x y)
// cond:
func rewriteValueS390X_OpNeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq64 x y)
// cond:
func rewriteValueS390X_OpNeq64F(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq64F x y)
// cond:
func rewriteValueS390X_OpNeq8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq8 x y)
// cond:
func rewriteValueS390X_OpNeqB(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqB x y)
// cond:
func rewriteValueS390X_OpNeqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqPtr x y)
// cond:
func rewriteValueS390X_OpOffPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (OffPtr [off] ptr:(SP))
// cond:
func rewriteValueS390X_OpRsh16Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux32 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x32 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 <t> x y)
// cond:
func rewriteValueS390X_OpRsh16x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh32Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh32Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh32x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh32x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh64Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh64Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux32 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux8 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x16 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x32 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 <t> x y)
// cond:
func rewriteValueS390X_OpRsh8x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x8 <t> x y)
// cond:
func rewriteValueS390X_OpS390XMOVWstoreconst(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem)
// cond: ValAndOff(sc).canAdd(off)
func rewriteValueS390X_OpS390XNOT(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NOT x)
// cond: true
func rewriteValueS390X_OpS390XOR(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (OR x (MOVDconst [c]))
// cond: isU32Bit(c)
func rewriteValueS390X_OpS390XORW(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ORW x (MOVDconst [c]))
// cond:
func rewriteBlockS390X(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockS390XEQ:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Load <t> ptr mem)
// cond: t.IsComplex() && t.Size() == 8
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Store {t} dst (ComplexMake real imag) mem)
// cond: t.(Type).Size() == 8
func rewriteBlockdec(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
}
func rewriteValuedec64_OpAdd64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Add64 x y)
// cond:
func rewriteValuedec64_OpAnd64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (And64 x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Arg {n} [off])
// cond: is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned()
func rewriteValuedec64_OpBitLen64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (BitLen64 x)
// cond:
func rewriteValuedec64_OpBswap64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Bswap64 x)
// cond:
func rewriteValuedec64_OpCom64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Com64 x)
// cond:
func rewriteValuedec64_OpConst64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Const64 <t> [c])
// cond: t.IsSigned()
func rewriteValuedec64_OpCtz64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Ctz64 x)
// cond:
func rewriteValuedec64_OpEq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Eq64 x y)
// cond:
func rewriteValuedec64_OpGeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64 x y)
// cond:
func rewriteValuedec64_OpGeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Geq64U x y)
// cond:
func rewriteValuedec64_OpGreater64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater64 x y)
// cond:
func rewriteValuedec64_OpGreater64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Greater64U x y)
// cond:
func rewriteValuedec64_OpLeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64 x y)
// cond:
func rewriteValuedec64_OpLeq64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Leq64U x y)
// cond:
func rewriteValuedec64_OpLess64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less64 x y)
// cond:
func rewriteValuedec64_OpLess64U(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Less64U x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Load <t> ptr mem)
// cond: is64BitInt(t) && !config.BigEndian && t.IsSigned()
func rewriteValuedec64_OpLsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpLsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpLsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x16 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpLsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x32 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpLsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpLsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x8 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpLsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpMul64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul64 x y)
// cond:
func rewriteValuedec64_OpNeq64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Neq64 x y)
// cond:
func rewriteValuedec64_OpOr64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Or64 x y)
// cond:
func rewriteValuedec64_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh32Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh64Ux16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux16 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh64Ux32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux32 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh64Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh64Ux8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux8 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh64x16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x16 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh64x32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x32 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh64x8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x8 (Int64Make hi lo) s)
// cond:
func rewriteValuedec64_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 _ (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpRsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8x64 x (Int64Make (Const32 [c]) _))
// cond: c != 0
func rewriteValuedec64_OpSignExt16to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (SignExt16to64 x)
// cond:
func rewriteValuedec64_OpSignExt32to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (SignExt32to64 x)
// cond:
func rewriteValuedec64_OpSignExt8to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (SignExt8to64 x)
// cond:
func rewriteValuedec64_OpSub64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Sub64 x y)
// cond:
func rewriteValuedec64_OpXor64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Xor64 x y)
// cond:
func rewriteValuedec64_OpZeroExt16to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ZeroExt16to64 x)
// cond:
func rewriteValuedec64_OpZeroExt32to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ZeroExt32to64 x)
// cond:
func rewriteValuedec64_OpZeroExt8to64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ZeroExt8to64 x)
// cond:
func rewriteBlockdec64(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
}
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Arg {n} [off])
// cond: v.Type.IsString()
func rewriteValuegeneric_OpConstInterface(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ConstInterface)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ConstSlice)
// cond: config.PtrSize == 4
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (ConstString {s})
// cond: config.PtrSize == 4 && s.(string) == ""
func rewriteValuegeneric_OpDiv16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16 (Const16 [c]) (Const16 [d]))
// cond: d != 0
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div16u (Const16 [c]) (Const16 [d]))
// cond: d != 0
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32 (Const32 [c]) (Const32 [d]))
// cond: d != 0
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div32u (Const32 [c]) (Const32 [d]))
// cond: d != 0
func rewriteValuegeneric_OpDiv64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64 (Const64 [c]) (Const64 [d]))
// cond: d != 0
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div64u (Const64 [c]) (Const64 [d]))
// cond: d != 0
func rewriteValuegeneric_OpDiv8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8 (Const8 [c]) (Const8 [d]))
// cond: d != 0
func rewriteValuegeneric_OpDiv8u(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Div8u (Const8 [c]) (Const8 [d]))
// cond: d != 0
func rewriteValuegeneric_OpEqInter(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqInter x y)
// cond:
func rewriteValuegeneric_OpEqPtr(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqPtr p (ConstNil))
// cond:
func rewriteValuegeneric_OpEqSlice(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (EqSlice x y)
// cond:
func rewriteValuegeneric_OpLoad(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Load <t1> p1 (Store {t2} p2 x _))
// cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && t1.Size() == t2.(Type).Size()
func rewriteValuegeneric_OpLsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh16x64 (Const16 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpLsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh32x64 (Const32 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpLsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh64x64 (Const64 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpLsh8x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Lsh8x64 (Const8 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpMul16(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul16 (Const16 [c]) (Const16 [d]))
// cond:
func rewriteValuegeneric_OpMul32(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul32 (Const32 [c]) (Const32 [d]))
// cond:
func rewriteValuegeneric_OpMul64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul64 (Const64 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpMul8(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Mul8 (Const8 [c]) (Const8 [d]))
// cond:
func rewriteValuegeneric_OpNeqInter(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqInter x y)
// cond:
func rewriteValuegeneric_OpNeqSlice(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NeqSlice x y)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (NilCheck (GetG mem) mem)
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (PtrIndex <t> ptr idx)
// cond: config.PtrSize == 4
func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16Ux64 (Const16 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh16x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh16x64 (Const16 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32Ux64 (Const32 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh32x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh32x64 (Const32 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64Ux64 (Const64 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh64x64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh64x64 (Const64 [c]) (Const64 [d]))
// cond:
func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Rsh8Ux64 (Const8 [c]) (Const64 [d]))
// cond:
_ = b
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (Store _ (StructMake0) mem)
// cond:
func rewriteValuegeneric_OpStructSelect(v *Value) bool {
b := v.Block
_ = b
- config := b.Func.Config
- _ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
// match: (StructSelect (StructMake1 x))
// cond:
func rewriteBlockgeneric(b *Block) bool {
config := b.Func.Config
_ = config
- fe := config.fe
+ fe := b.Func.fe
_ = fe
switch b.Kind {
case BlockIf:
func TestSchedule(t *testing.T) {
c := testConfig(t)
cases := []fun{
- Fun(c, "entry",
+ Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem0", OpInitMem, TypeMem, 0, nil),
Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil),
// In the function below, v2 depends on v3 and v4, v4 depends on v3, and v3 depends on store v5.
// storeOrder did not handle this case correctly.
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem0", OpInitMem, TypeMem, 0, nil),
Valu("a", OpAdd64, TypeInt64, 0, nil, "b", "c"), // v2
func TestShiftConstAMD64(t *testing.T) {
c := testConfig(t)
- fun := makeConstShiftFunc(c, 18, OpLsh64x64, TypeUInt64)
+ fe := DummyFrontend{t}
+ fun := makeConstShiftFunc(c, fe, 18, OpLsh64x64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, 66, OpLsh64x64, TypeUInt64)
+ fun = makeConstShiftFunc(c, fe, 66, OpLsh64x64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, 18, OpRsh64Ux64, TypeUInt64)
+ fun = makeConstShiftFunc(c, fe, 18, OpRsh64Ux64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, 66, OpRsh64Ux64, TypeUInt64)
+ fun = makeConstShiftFunc(c, fe, 66, OpRsh64Ux64, TypeUInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0})
- fun = makeConstShiftFunc(c, 18, OpRsh64x64, TypeInt64)
+ fun = makeConstShiftFunc(c, fe, 18, OpRsh64x64, TypeInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0})
- fun = makeConstShiftFunc(c, 66, OpRsh64x64, TypeInt64)
+ fun = makeConstShiftFunc(c, fe, 66, OpRsh64x64, TypeInt64)
checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0})
}
-func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun {
+func makeConstShiftFunc(c *Config, fe Frontend, amount int64, op Op, typ Type) fun {
ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"}
- fun := Fun(c, "entry",
+ fun := Fun(c, fe, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
{8, OpLsh16x64, OpRsh16x64, TypeInt16},
}
c := testConfig(t)
+ fe := DummyFrontend{t}
for _, tc := range tests {
- fun := makeShiftExtensionFunc(c, tc.amount, tc.left, tc.right, tc.typ)
+ fun := makeShiftExtensionFunc(c, fe, tc.amount, tc.left, tc.right, tc.typ)
checkOpcodeCounts(t, fun.f, ops)
}
}
// (rshift (lshift (Const64 [amount])) (Const64 [amount]))
//
// This may be equivalent to a sign or zero extension.
-func makeShiftExtensionFunc(c *Config, amount int64, lshift, rshift Op, typ Type) fun {
+func makeShiftExtensionFunc(c *Config, fe Frontend, amount int64, lshift, rshift Op, typ Type) fun {
ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"}
- fun := Fun(c, "entry",
+ fun := Fun(c, fe, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("SP", OpSP, TypeUInt64, 0, nil),
// x = phi(a, ...)
//
// We can replace the "a" in the phi with the constant true.
- ct := f.ConstBool(f.Entry.Pos, f.Config.fe.TypeBool(), true)
- cf := f.ConstBool(f.Entry.Pos, f.Config.fe.TypeBool(), false)
+ ct := f.ConstBool(f.Entry.Pos, f.fe.TypeBool(), true)
+ cf := f.ConstBool(f.Entry.Pos, f.fe.TypeBool(), false)
for _, b := range f.Blocks {
for _, v := range b.Values {
if v.Op != OpPhi {
func TestShortCircuit(t *testing.T) {
c := testConfig(t)
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("mem", OpInitMem, TypeMem, 0, nil),
Valu("arg1", OpArg, TypeInt64, 0, nil),
return new(stackAllocState)
}
if s.f != nil {
- f.Config.Fatalf(src.NoXPos, "newStackAllocState called without previous free")
+ f.fe.Fatalf(src.NoXPos, "newStackAllocState called without previous free")
}
return s
}
// If there is no unused stack slot, allocate a new one.
if i == len(locs) {
s.nAuto++
- locs = append(locs, LocalSlot{N: f.Config.fe.Auto(v.Type), Type: v.Type, Off: 0})
+ locs = append(locs, LocalSlot{N: f.fe.Auto(v.Type), Type: v.Type, Off: 0})
locations[v.Type] = locs
}
// Use the stack variable at that index for v.
// stackframe calls back into the frontend to assign frame offsets.
func stackframe(f *Func) {
- f.Config.fe.AllocFrame(f)
+ f.fe.AllocFrame(f)
}
func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) }
func (v *Value) Log() bool { return v.Block.Log() }
func (v *Value) Fatalf(msg string, args ...interface{}) {
- v.Block.Func.Config.Fatalf(v.Pos, msg, args...)
+ v.Block.Func.fe.Fatalf(v.Pos, msg, args...)
}
// isGenericIntConst returns whether v is a generic integer constant.
// A sequence of WB stores for many pointer fields of a single type will
// be emitted together, with a single branch.
func writebarrier(f *Func) {
- if !f.Config.fe.UseWriteBarrier() {
+ if !f.fe.UseWriteBarrier() {
return
}
}
}
if sb == nil {
- sb = f.Entry.NewValue0(initpos, OpSB, f.Config.fe.TypeUintptr())
+ sb = f.Entry.NewValue0(initpos, OpSB, f.fe.TypeUintptr())
}
if sp == nil {
- sp = f.Entry.NewValue0(initpos, OpSP, f.Config.fe.TypeUintptr())
+ sp = f.Entry.NewValue0(initpos, OpSP, f.fe.TypeUintptr())
}
- wbsym := &ExternSymbol{Typ: f.Config.fe.TypeBool(), Sym: f.Config.fe.Syslook("writeBarrier")}
- wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.fe.TypeUInt32().PtrTo(), wbsym, sb)
- writebarrierptr = f.Config.fe.Syslook("writebarrierptr")
- typedmemmove = f.Config.fe.Syslook("typedmemmove")
- typedmemclr = f.Config.fe.Syslook("typedmemclr")
- const0 = f.ConstInt32(initpos, f.Config.fe.TypeUInt32(), 0)
+ wbsym := &ExternSymbol{Typ: f.fe.TypeBool(), Sym: f.fe.Syslook("writeBarrier")}
+ wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.fe.TypeUInt32().PtrTo(), wbsym, sb)
+ writebarrierptr = f.fe.Syslook("writebarrierptr")
+ typedmemmove = f.fe.Syslook("typedmemmove")
+ typedmemclr = f.fe.Syslook("typedmemclr")
+ const0 = f.ConstInt32(initpos, f.fe.TypeUInt32(), 0)
// allocate auxiliary data structures for computing store order
sset = f.newSparseSet(f.NumValues())
// set up control flow for write barrier test
// load word, test word, avoiding partial register write from load byte.
- flag := b.NewValue2(pos, OpLoad, f.Config.fe.TypeUInt32(), wbaddr, mem)
- flag = b.NewValue2(pos, OpNeq32, f.Config.fe.TypeBool(), flag, const0)
+ flag := b.NewValue2(pos, OpLoad, f.fe.TypeUInt32(), wbaddr, mem)
+ flag = b.NewValue2(pos, OpNeq32, f.fe.TypeBool(), flag, const0)
b.Kind = BlockIf
b.SetControl(flag)
b.Likely = BranchUnlikely
ptr := w.Args[0]
var typ interface{}
if w.Op != OpStoreWB {
- typ = &ExternSymbol{Typ: f.Config.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()}
+ typ = &ExternSymbol{Typ: f.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()}
}
pos = w.Pos
}
if f.NoWB {
- f.Config.fe.Error(pos, "write barrier prohibited")
+ f.fe.Error(pos, "write barrier prohibited")
}
if !f.WBPos.IsKnown() {
f.WBPos = pos
}
- if f.Config.fe.Debug_wb() {
- f.Config.Warnl(pos, "write barrier")
+ if f.fe.Debug_wb() {
+ f.Warnl(pos, "write barrier")
}
}
// a function call). Marshaling the args to typedmemmove might clobber the
// value we're trying to move.
t := val.Type.ElemType()
- tmp = config.fe.Auto(t)
+ tmp = b.Func.fe.Auto(t)
aux := &AutoSymbol{Typ: t, Node: tmp}
mem = b.NewValue1A(pos, OpVarDef, TypeMem, tmp, mem)
tmpaddr := b.NewValue1A(pos, OpAddr, t.PtrTo(), aux, sp)
off := config.ctxt.FixedFrameSize()
if typ != nil { // for typedmemmove
- taddr := b.NewValue1A(pos, OpAddr, config.fe.TypeUintptr(), typ, sb)
+ taddr := b.NewValue1A(pos, OpAddr, b.Func.fe.TypeUintptr(), typ, sb)
off = round(off, taddr.Type.Alignment())
arg := b.NewValue1I(pos, OpOffPtr, taddr.Type.PtrTo(), off, sp)
mem = b.NewValue3A(pos, OpStore, TypeMem, ptr.Type, arg, taddr, mem)
// Make sure writebarrier phase works even StoreWB ops are not in dependency order
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),
// See issue #19067.
c := testConfig(t)
ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing
- fun := Fun(c, "entry",
+ fun := Fun(c, DummyFrontend{t}, "entry",
Bloc("entry",
Valu("start", OpInitMem, TypeMem, 0, nil),
Valu("sb", OpSB, TypeInvalid, 0, nil),