From 2cdb7f118ab86adb6fef5485d96831df3446b747 Mon Sep 17 00:00:00 2001 From: Josh Bleecher Snyder Date: Thu, 16 Mar 2017 22:42:10 -0700 Subject: [PATCH] cmd/compile: move Frontend field from ssa.Config to ssa.Func Suggested by mdempsky in CL 38232. This allows us to use the Frontend field to associate frontend state and information with a function. See the following CL in the series for examples. This is a giant CL, but it is almost entirely routine refactoring. The ssa test API is starting to feel a bit unwieldy. I will clean it up separately, once the dust has settled. Passes toolstash -cmp. Updates #15756 Change-Id: I71c573bd96ff7251935fce1391b06b1f133c3caf Reviewed-on: https://go-review.googlesource.com/38327 Run-TryBot: Josh Bleecher Snyder Reviewed-by: Matthew Dempsky --- src/cmd/compile/internal/gc/ssa.go | 24 +- src/cmd/compile/internal/ssa/checkbce.go | 2 +- src/cmd/compile/internal/ssa/compile.go | 2 +- src/cmd/compile/internal/ssa/config.go | 18 +- src/cmd/compile/internal/ssa/copyelim_test.go | 2 +- src/cmd/compile/internal/ssa/critical.go | 4 +- src/cmd/compile/internal/ssa/cse_test.go | 4 +- src/cmd/compile/internal/ssa/deadcode_test.go | 10 +- .../compile/internal/ssa/deadstore_test.go | 8 +- src/cmd/compile/internal/ssa/decompose.go | 44 +- src/cmd/compile/internal/ssa/dom_test.go | 22 +- src/cmd/compile/internal/ssa/export_test.go | 4 +- src/cmd/compile/internal/ssa/func.go | 15 +- src/cmd/compile/internal/ssa/func_test.go | 36 +- src/cmd/compile/internal/ssa/fuse_test.go | 18 +- src/cmd/compile/internal/ssa/gen/rulegen.go | 8 +- src/cmd/compile/internal/ssa/lca_test.go | 4 +- src/cmd/compile/internal/ssa/likelyadjust.go | 6 +- src/cmd/compile/internal/ssa/loop_test.go | 5 +- src/cmd/compile/internal/ssa/loopbce.go | 10 +- .../compile/internal/ssa/loopreschedchecks.go | 6 +- src/cmd/compile/internal/ssa/nilcheck.go | 8 +- src/cmd/compile/internal/ssa/nilcheck_test.go | 40 +- src/cmd/compile/internal/ssa/passbm_test.go | 8 +- src/cmd/compile/internal/ssa/phielim.go | 2 +- src/cmd/compile/internal/ssa/phiopt.go | 8 +- src/cmd/compile/internal/ssa/print.go | 2 +- src/cmd/compile/internal/ssa/prove.go | 24 +- src/cmd/compile/internal/ssa/regalloc.go | 12 +- src/cmd/compile/internal/ssa/regalloc_test.go | 2 +- src/cmd/compile/internal/ssa/rewrite.go | 8 +- src/cmd/compile/internal/ssa/rewrite386.go | 34 +- src/cmd/compile/internal/ssa/rewriteAMD64.go | 134 ++--- src/cmd/compile/internal/ssa/rewriteARM.go | 238 +++------ src/cmd/compile/internal/ssa/rewriteARM64.go | 314 +++--------- src/cmd/compile/internal/ssa/rewriteMIPS.go | 334 ++++-------- src/cmd/compile/internal/ssa/rewriteMIPS64.go | 474 +++++------------- src/cmd/compile/internal/ssa/rewritePPC64.go | 368 ++++---------- src/cmd/compile/internal/ssa/rewriteS390X.go | 474 +++++------------- src/cmd/compile/internal/ssa/rewritedec.go | 6 +- src/cmd/compile/internal/ssa/rewritedec64.go | 198 ++------ .../compile/internal/ssa/rewritegeneric.go | 130 ++--- src/cmd/compile/internal/ssa/schedule_test.go | 4 +- src/cmd/compile/internal/ssa/shift_test.go | 24 +- src/cmd/compile/internal/ssa/shortcircuit.go | 4 +- .../compile/internal/ssa/shortcircuit_test.go | 2 +- src/cmd/compile/internal/ssa/stackalloc.go | 4 +- src/cmd/compile/internal/ssa/stackframe.go | 2 +- src/cmd/compile/internal/ssa/value.go | 2 +- src/cmd/compile/internal/ssa/writebarrier.go | 34 +- .../compile/internal/ssa/writebarrier_test.go | 4 +- 51 files changed, 922 insertions(+), 2228 deletions(-) diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index bff3ccddb6..587bb7e2fb 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -23,7 +23,7 @@ var ssaExp ssaExport var ssaCache *ssa.Cache func initssaconfig() { - ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, &ssaExp, Ctxt, Debug['N'] == 0) + ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, Ctxt, Debug['N'] == 0) if thearch.LinkArch.Name == "386" { ssaConfig.Set387(thearch.Use387) } @@ -52,7 +52,7 @@ func buildssa(fn *Node) *ssa.Func { ssaExp.log = printssa - s.f = ssa.NewFunc() + s.f = ssa.NewFunc(&ssaExp) s.config = ssaConfig s.f.Config = ssaConfig s.f.Cache = ssaCache @@ -74,7 +74,7 @@ func buildssa(fn *Node) *ssa.Func { s.panics = map[funcLine]*ssa.Block{} if name == os.Getenv("GOSSAFUNC") { - s.f.HTMLWriter = ssa.NewHTMLWriter("ssa.html", ssaConfig, name) + s.f.HTMLWriter = ssa.NewHTMLWriter("ssa.html", s.f.Frontend(), name) // TODO: generate and print a mapping from nodes to values and blocks } @@ -239,13 +239,13 @@ func (s *state) label(sym *Sym) *ssaLabel { return lab } -func (s *state) Logf(msg string, args ...interface{}) { s.config.Logf(msg, args...) } -func (s *state) Log() bool { return s.config.Log() } -func (s *state) Fatalf(msg string, args ...interface{}) { s.config.Fatalf(s.peekPos(), msg, args...) } -func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { - s.config.Warnl(pos, msg, args...) +func (s *state) Logf(msg string, args ...interface{}) { s.f.Logf(msg, args...) } +func (s *state) Log() bool { return s.f.Log() } +func (s *state) Fatalf(msg string, args ...interface{}) { + s.f.Frontend().Fatalf(s.peekPos(), msg, args...) } -func (s *state) Debug_checknil() bool { return s.config.Debug_checknil() } +func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl(pos, msg, args...) } +func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() } var ( // dummy node for the memory variable @@ -3279,8 +3279,8 @@ func canSSAType(t *Type) bool { func (s *state) exprPtr(n *Node, bounded bool, lineno src.XPos) *ssa.Value { p := s.expr(n) if bounded || n.NonNil() { - if s.f.Config.Debug_checknil() && lineno.Line() > 1 { - s.f.Config.Warnl(lineno, "removed nil check") + if s.f.Frontend().Debug_checknil() && lineno.Line() > 1 { + s.f.Warnl(lineno, "removed nil check") } return p } @@ -4211,7 +4211,7 @@ func (s *SSAGenState) SetPos(pos src.XPos) { func genssa(f *ssa.Func, ptxt *obj.Prog, gcargs, gclocals *Sym) { var s SSAGenState - e := f.Config.Frontend().(*ssaExport) + e := f.Frontend().(*ssaExport) // Remember where each block starts. s.bstart = make([]*obj.Prog, f.NumBlocks()) diff --git a/src/cmd/compile/internal/ssa/checkbce.go b/src/cmd/compile/internal/ssa/checkbce.go index c26ace4c32..ab842b4296 100644 --- a/src/cmd/compile/internal/ssa/checkbce.go +++ b/src/cmd/compile/internal/ssa/checkbce.go @@ -16,7 +16,7 @@ func checkbce(f *Func) { for _, b := range f.Blocks { for _, v := range b.Values { if v.Op == OpIsInBounds || v.Op == OpIsSliceInBounds { - f.Config.Warnl(v.Pos, "Found %v", v.Op) + f.Warnl(v.Pos, "Found %v", v.Op) } } } diff --git a/src/cmd/compile/internal/ssa/compile.go b/src/cmd/compile/internal/ssa/compile.go index 4f62250dc5..551521bbe8 100644 --- a/src/cmd/compile/internal/ssa/compile.go +++ b/src/cmd/compile/internal/ssa/compile.go @@ -130,7 +130,7 @@ func (f *Func) dumpFile(phaseName string) { fi, err := os.Create(fname) if err != nil { - f.Config.Warnl(src.NoXPos, "Unable to create after-phase dump file %s", fname) + f.Warnl(src.NoXPos, "Unable to create after-phase dump file %s", fname) return } diff --git a/src/cmd/compile/internal/ssa/config.go b/src/cmd/compile/internal/ssa/config.go index 8842db01bd..c764aa3539 100644 --- a/src/cmd/compile/internal/ssa/config.go +++ b/src/cmd/compile/internal/ssa/config.go @@ -28,7 +28,6 @@ type Config struct { FPReg int8 // register number of frame pointer, -1 if not used LinkReg int8 // register number of link register if it is a general purpose register, -1 if not used hasGReg bool // has hardware g register - fe Frontend // callbacks into compiler frontend ctxt *obj.Link // Generic arch information optimize bool // Do optimization noDuffDevice bool // Don't use Duff's device @@ -136,8 +135,8 @@ type GCNode interface { } // NewConfig returns a new configuration object for the given architecture. -func NewConfig(arch string, fe Frontend, ctxt *obj.Link, optimize bool) *Config { - c := &Config{arch: arch, fe: fe} +func NewConfig(arch string, ctxt *obj.Link, optimize bool) *Config { + c := &Config{arch: arch} switch arch { case "amd64": c.IntSize = 8 @@ -266,7 +265,7 @@ func NewConfig(arch string, fe Frontend, ctxt *obj.Link, optimize bool) *Config c.hasGReg = true c.noDuffDevice = true default: - fe.Fatalf(src.NoXPos, "arch %s not implemented", arch) + ctxt.Diag("arch %s not implemented", arch) } c.ctxt = ctxt c.optimize = optimize @@ -296,7 +295,7 @@ func NewConfig(arch string, fe Frontend, ctxt *obj.Link, optimize bool) *Config if ev != "" { v, err := strconv.ParseInt(ev, 10, 64) if err != nil { - fe.Fatalf(src.NoXPos, "Environment variable GO_SSA_PHI_LOC_CUTOFF (value '%s') did not parse as a number", ev) + ctxt.Diag("Environment variable GO_SSA_PHI_LOC_CUTOFF (value '%s') did not parse as a number", ev) } c.sparsePhiCutoff = uint64(v) // convert -1 to maxint, for never use sparse } @@ -309,14 +308,5 @@ func (c *Config) Set387(b bool) { c.use387 = b } -func (c *Config) Frontend() Frontend { return c.fe } func (c *Config) SparsePhiCutoff() uint64 { return c.sparsePhiCutoff } func (c *Config) Ctxt() *obj.Link { return c.ctxt } - -func (c *Config) Logf(msg string, args ...interface{}) { c.fe.Logf(msg, args...) } -func (c *Config) Log() bool { return c.fe.Log() } -func (c *Config) Fatalf(pos src.XPos, msg string, args ...interface{}) { c.fe.Fatalf(pos, msg, args...) } -func (c *Config) Error(pos src.XPos, msg string, args ...interface{}) { c.fe.Error(pos, msg, args...) } -func (c *Config) Warnl(pos src.XPos, msg string, args ...interface{}) { c.fe.Warnl(pos, msg, args...) } -func (c *Config) Debug_checknil() bool { return c.fe.Debug_checknil() } -func (c *Config) Debug_wb() bool { return c.fe.Debug_wb() } diff --git a/src/cmd/compile/internal/ssa/copyelim_test.go b/src/cmd/compile/internal/ssa/copyelim_test.go index 34c548a48b..3250445fec 100644 --- a/src/cmd/compile/internal/ssa/copyelim_test.go +++ b/src/cmd/compile/internal/ssa/copyelim_test.go @@ -34,7 +34,7 @@ func benchmarkCopyElim(b *testing.B, n int) { } for i := 0; i < b.N; i++ { - fun := Fun(c, "entry", Bloc("entry", values...)) + fun := Fun(c, DummyFrontend{b}, "entry", Bloc("entry", values...)) Copyelim(fun.f) } } diff --git a/src/cmd/compile/internal/ssa/critical.go b/src/cmd/compile/internal/ssa/critical.go index 72aa976327..b85721eba4 100644 --- a/src/cmd/compile/internal/ssa/critical.go +++ b/src/cmd/compile/internal/ssa/critical.go @@ -63,7 +63,7 @@ func critical(f *Func) { d.Pos = p.Pos blocks[argID] = d if f.pass.debug > 0 { - f.Config.Warnl(p.Pos, "split critical edge") + f.Warnl(p.Pos, "split critical edge") } } else { reusedBlock = true @@ -74,7 +74,7 @@ func critical(f *Func) { d = f.NewBlock(BlockPlain) d.Pos = p.Pos if f.pass.debug > 0 { - f.Config.Warnl(p.Pos, "split critical edge") + f.Warnl(p.Pos, "split critical edge") } } diff --git a/src/cmd/compile/internal/ssa/cse_test.go b/src/cmd/compile/internal/ssa/cse_test.go index 2788ca6ffd..16d301506a 100644 --- a/src/cmd/compile/internal/ssa/cse_test.go +++ b/src/cmd/compile/internal/ssa/cse_test.go @@ -19,7 +19,7 @@ func TestCSEAuxPartitionBug(t *testing.T) { // construct lots of values with args that have aux values and place // them in an order that triggers the bug - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sp", OpSP, TypeBytePtr, 0, nil), @@ -87,7 +87,7 @@ func TestCSEAuxPartitionBug(t *testing.T) { func TestZCSE(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sp", OpSP, TypeBytePtr, 0, nil), diff --git a/src/cmd/compile/internal/ssa/deadcode_test.go b/src/cmd/compile/internal/ssa/deadcode_test.go index c8ee3662fd..156a1d746e 100644 --- a/src/cmd/compile/internal/ssa/deadcode_test.go +++ b/src/cmd/compile/internal/ssa/deadcode_test.go @@ -12,7 +12,7 @@ import ( func TestDeadLoop(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Goto("exit")), @@ -42,7 +42,7 @@ func TestDeadLoop(t *testing.T) { func TestDeadValue(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("deadval", OpConst64, TypeInt64, 37, nil), @@ -65,7 +65,7 @@ func TestDeadValue(t *testing.T) { func TestNeverTaken(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("cond", OpConstBool, TypeBool, 0, nil), Valu("mem", OpInitMem, TypeMem, 0, nil), @@ -100,7 +100,7 @@ func TestNeverTaken(t *testing.T) { func TestNestedDeadBlocks(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("cond", OpConstBool, TypeBool, 0, nil), @@ -152,7 +152,7 @@ func BenchmarkDeadCode(b *testing.B) { } b.ResetTimer() for i := 0; i < b.N; i++ { - fun := Fun(c, "entry", blocks...) + fun := Fun(c, DummyFrontend{b}, "entry", blocks...) Deadcode(fun.f) } }) diff --git a/src/cmd/compile/internal/ssa/deadstore_test.go b/src/cmd/compile/internal/ssa/deadstore_test.go index 003b2284be..82cda3327e 100644 --- a/src/cmd/compile/internal/ssa/deadstore_test.go +++ b/src/cmd/compile/internal/ssa/deadstore_test.go @@ -10,7 +10,7 @@ func TestDeadStore(t *testing.T) { c := testConfig(t) elemType := &TypeImpl{Size_: 1, Name: "testtype"} ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr", Elem_: elemType} // dummy for testing - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -45,7 +45,7 @@ func TestDeadStorePhi(t *testing.T) { // make sure we don't get into an infinite loop with phi values. c := testConfig(t) ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -72,7 +72,7 @@ func TestDeadStoreTypes(t *testing.T) { c := testConfig(t) t1 := &TypeImpl{Size_: 8, Ptr: true, Name: "t1"} t2 := &TypeImpl{Size_: 4, Ptr: true, Name: "t2"} - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -102,7 +102,7 @@ func TestDeadStoreUnsafe(t *testing.T) { // can get to a point where the size is changed but type unchanged. c := testConfig(t) ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), diff --git a/src/cmd/compile/internal/ssa/decompose.go b/src/cmd/compile/internal/ssa/decompose.go index 6f9093bbb5..a0cd5b2c41 100644 --- a/src/cmd/compile/internal/ssa/decompose.go +++ b/src/cmd/compile/internal/ssa/decompose.go @@ -28,15 +28,15 @@ func decomposeBuiltIn(f *Func) { case t.IsInteger() && t.Size() == 8 && f.Config.IntSize == 4: var elemType Type if t.IsSigned() { - elemType = f.Config.fe.TypeInt32() + elemType = f.fe.TypeInt32() } else { - elemType = f.Config.fe.TypeUInt32() + elemType = f.fe.TypeUInt32() } - hiName, loName := f.Config.fe.SplitInt64(name) + hiName, loName := f.fe.SplitInt64(name) newNames = append(newNames, hiName, loName) for _, v := range f.NamedValues[name] { hi := v.Block.NewValue1(v.Pos, OpInt64Hi, elemType, v) - lo := v.Block.NewValue1(v.Pos, OpInt64Lo, f.Config.fe.TypeUInt32(), v) + lo := v.Block.NewValue1(v.Pos, OpInt64Lo, f.fe.TypeUInt32(), v) f.NamedValues[hiName] = append(f.NamedValues[hiName], hi) f.NamedValues[loName] = append(f.NamedValues[loName], lo) } @@ -44,11 +44,11 @@ func decomposeBuiltIn(f *Func) { case t.IsComplex(): var elemType Type if t.Size() == 16 { - elemType = f.Config.fe.TypeFloat64() + elemType = f.fe.TypeFloat64() } else { - elemType = f.Config.fe.TypeFloat32() + elemType = f.fe.TypeFloat32() } - rName, iName := f.Config.fe.SplitComplex(name) + rName, iName := f.fe.SplitComplex(name) newNames = append(newNames, rName, iName) for _, v := range f.NamedValues[name] { r := v.Block.NewValue1(v.Pos, OpComplexReal, elemType, v) @@ -58,9 +58,9 @@ func decomposeBuiltIn(f *Func) { } delete(f.NamedValues, name) case t.IsString(): - ptrType := f.Config.fe.TypeBytePtr() - lenType := f.Config.fe.TypeInt() - ptrName, lenName := f.Config.fe.SplitString(name) + ptrType := f.fe.TypeBytePtr() + lenType := f.fe.TypeInt() + ptrName, lenName := f.fe.SplitString(name) newNames = append(newNames, ptrName, lenName) for _, v := range f.NamedValues[name] { ptr := v.Block.NewValue1(v.Pos, OpStringPtr, ptrType, v) @@ -70,9 +70,9 @@ func decomposeBuiltIn(f *Func) { } delete(f.NamedValues, name) case t.IsSlice(): - ptrType := f.Config.fe.TypeBytePtr() - lenType := f.Config.fe.TypeInt() - ptrName, lenName, capName := f.Config.fe.SplitSlice(name) + ptrType := f.fe.TypeBytePtr() + lenType := f.fe.TypeInt() + ptrName, lenName, capName := f.fe.SplitSlice(name) newNames = append(newNames, ptrName, lenName, capName) for _, v := range f.NamedValues[name] { ptr := v.Block.NewValue1(v.Pos, OpSlicePtr, ptrType, v) @@ -84,8 +84,8 @@ func decomposeBuiltIn(f *Func) { } delete(f.NamedValues, name) case t.IsInterface(): - ptrType := f.Config.fe.TypeBytePtr() - typeName, dataName := f.Config.fe.SplitInterface(name) + ptrType := f.fe.TypeBytePtr() + typeName, dataName := f.fe.SplitInterface(name) newNames = append(newNames, typeName, dataName) for _, v := range f.NamedValues[name] { typ := v.Block.NewValue1(v.Pos, OpITab, ptrType, v) @@ -129,7 +129,7 @@ func decomposeBuiltInPhi(v *Value) { } func decomposeStringPhi(v *Value) { - fe := v.Block.Func.Config.fe + fe := v.Block.Func.fe ptrType := fe.TypeBytePtr() lenType := fe.TypeInt() @@ -145,7 +145,7 @@ func decomposeStringPhi(v *Value) { } func decomposeSlicePhi(v *Value) { - fe := v.Block.Func.Config.fe + fe := v.Block.Func.fe ptrType := fe.TypeBytePtr() lenType := fe.TypeInt() @@ -164,7 +164,7 @@ func decomposeSlicePhi(v *Value) { } func decomposeInt64Phi(v *Value) { - fe := v.Block.Func.Config.fe + fe := v.Block.Func.fe var partType Type if v.Type.IsSigned() { partType = fe.TypeInt32() @@ -184,7 +184,7 @@ func decomposeInt64Phi(v *Value) { } func decomposeComplexPhi(v *Value) { - fe := v.Block.Func.Config.fe + fe := v.Block.Func.fe var partType Type switch z := v.Type.Size(); z { case 8: @@ -207,7 +207,7 @@ func decomposeComplexPhi(v *Value) { } func decomposeInterfacePhi(v *Value) { - ptrType := v.Block.Func.Config.fe.TypeBytePtr() + ptrType := v.Block.Func.fe.TypeBytePtr() itab := v.Block.NewValue0(v.Pos, OpPhi, ptrType) data := v.Block.NewValue0(v.Pos, OpPhi, ptrType) @@ -243,7 +243,7 @@ func decomposeUser(f *Func) { n := t.NumFields() fnames = fnames[:0] for i := 0; i < n; i++ { - fnames = append(fnames, f.Config.fe.SplitStruct(name, i)) + fnames = append(fnames, f.fe.SplitStruct(name, i)) } for _, v := range f.NamedValues[name] { for i := 0; i < n; i++ { @@ -262,7 +262,7 @@ func decomposeUser(f *Func) { if t.NumElem() != 1 { f.Fatalf("array not of size 1") } - elemName := f.Config.fe.SplitArray(name) + elemName := f.fe.SplitArray(name) for _, v := range f.NamedValues[name] { e := v.Block.NewValue1I(v.Pos, OpArraySelect, t.ElemType(), 0, v) f.NamedValues[elemName] = append(f.NamedValues[elemName], e) diff --git a/src/cmd/compile/internal/ssa/dom_test.go b/src/cmd/compile/internal/ssa/dom_test.go index 6ecbe923d4..787c429e41 100644 --- a/src/cmd/compile/internal/ssa/dom_test.go +++ b/src/cmd/compile/internal/ssa/dom_test.go @@ -160,8 +160,8 @@ func genMaxPredValue(size int) []bloc { var domBenchRes []*Block func benchmarkDominators(b *testing.B, size int, bg blockGen) { - c := NewConfig("amd64", DummyFrontend{b}, nil, true) - fun := Fun(c, "entry", bg(size)...) + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...) CheckFunc(fun.f) b.SetBytes(int64(size)) @@ -221,7 +221,7 @@ func verifyDominators(t *testing.T, fut fun, domFn domFunc, doms map[string]stri func TestDominatorsSingleBlock(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Exit("mem"))) @@ -236,7 +236,7 @@ func TestDominatorsSingleBlock(t *testing.T) { func TestDominatorsSimple(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Goto("a")), @@ -264,7 +264,7 @@ func TestDominatorsSimple(t *testing.T) { func TestDominatorsMultPredFwd(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 1, nil), @@ -292,7 +292,7 @@ func TestDominatorsMultPredFwd(t *testing.T) { func TestDominatorsDeadCode(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 0, nil), @@ -315,7 +315,7 @@ func TestDominatorsDeadCode(t *testing.T) { func TestDominatorsMultPredRev(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Goto("first")), Bloc("first", @@ -346,7 +346,7 @@ func TestDominatorsMultPredRev(t *testing.T) { func TestDominatorsMultPred(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 1, nil), @@ -375,7 +375,7 @@ func TestDominatorsMultPred(t *testing.T) { func TestInfiniteLoop(t *testing.T) { c := testConfig(t) // note lack of an exit block - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 1, nil), @@ -411,7 +411,7 @@ func TestDomTricky(t *testing.T) { b := 1 & i >> 1 c := 1 & i >> 2 - fun := Fun(testConfig(t), "1", + fun := Fun(testConfig(t), DummyFrontend{t}, "1", Bloc("1", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 1, nil), @@ -455,7 +455,7 @@ func generateDominatorMap(fut fun) map[string]string { func TestDominatorsPostTricky(t *testing.T) { c := testConfig(t) - fun := Fun(c, "b1", + fun := Fun(c, DummyFrontend{t}, "b1", Bloc("b1", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("p", OpConstBool, TypeBool, 1, nil), diff --git a/src/cmd/compile/internal/ssa/export_test.go b/src/cmd/compile/internal/ssa/export_test.go index 743bdfcd03..eeb1fceb7a 100644 --- a/src/cmd/compile/internal/ssa/export_test.go +++ b/src/cmd/compile/internal/ssa/export_test.go @@ -19,11 +19,11 @@ var Copyelim = copyelim var TestCtxt = obj.Linknew(&x86.Linkamd64) func testConfig(t testing.TB) *Config { - return NewConfig("amd64", DummyFrontend{t}, TestCtxt, true) + return NewConfig("amd64", TestCtxt, true) } func testConfigS390X(t testing.TB) *Config { - return NewConfig("s390x", DummyFrontend{t}, obj.Linknew(&s390x.Links390x), true) + return NewConfig("s390x", obj.Linknew(&s390x.Links390x), true) } // DummyFrontend is a test-only frontend. diff --git a/src/cmd/compile/internal/ssa/func.go b/src/cmd/compile/internal/ssa/func.go index 733e2abe1b..74a74f2fd4 100644 --- a/src/cmd/compile/internal/ssa/func.go +++ b/src/cmd/compile/internal/ssa/func.go @@ -19,6 +19,7 @@ import ( type Func struct { Config *Config // architecture information Cache *Cache // re-usable cache + fe Frontend // frontend state associated with this Func, callbacks into compiler frontend pass *pass // current pass information (name, options, etc.) Name string // e.g. bytes·Compare Type Type // type signature of the function. @@ -63,8 +64,8 @@ type Func struct { // NewFunc returns a new, empty function object. // Caller must set f.Config and f.Cache before using f. -func NewFunc() *Func { - return &Func{NamedValues: make(map[LocalSlot][]*Value)} +func NewFunc(fe Frontend) *Func { + return &Func{fe: fe, NamedValues: make(map[LocalSlot][]*Value)} } // NumBlocks returns an integer larger than the id of any Block in the Func. @@ -165,7 +166,7 @@ func (f *Func) LogStat(key string, args ...interface{}) { if f.pass != nil { n = strings.Replace(f.pass.name, " ", "_", -1) } - f.Config.Warnl(f.Entry.Pos, "\t%s\t%s%s\t%s", n, key, value, f.Name) + f.Warnl(f.Entry.Pos, "\t%s\t%s%s\t%s", n, key, value, f.Name) } // freeValue frees a value. It must no longer be referenced. @@ -482,9 +483,11 @@ func (f *Func) ConstOffPtrSP(pos src.XPos, t Type, c int64, sp *Value) *Value { } -func (f *Func) Logf(msg string, args ...interface{}) { f.Config.Logf(msg, args...) } -func (f *Func) Log() bool { return f.Config.Log() } -func (f *Func) Fatalf(msg string, args ...interface{}) { f.Config.Fatalf(f.Entry.Pos, msg, args...) } +func (f *Func) Frontend() Frontend { return f.fe } +func (f *Func) Warnl(pos src.XPos, msg string, args ...interface{}) { f.fe.Warnl(pos, msg, args...) } +func (f *Func) Logf(msg string, args ...interface{}) { f.fe.Logf(msg, args...) } +func (f *Func) Log() bool { return f.fe.Log() } +func (f *Func) Fatalf(msg string, args ...interface{}) { f.fe.Fatalf(f.Entry.Pos, msg, args...) } // postorder returns the reachable blocks in f in a postorder traversal. func (f *Func) postorder() []*Block { diff --git a/src/cmd/compile/internal/ssa/func_test.go b/src/cmd/compile/internal/ssa/func_test.go index b14da75b1a..eaeb8268c1 100644 --- a/src/cmd/compile/internal/ssa/func_test.go +++ b/src/cmd/compile/internal/ssa/func_test.go @@ -143,8 +143,8 @@ var emptyPass pass = pass{ // returns a fun containing the composed Func. entry must be a name // supplied to one of the Bloc functions. Each of the bloc names and // valu names should be unique across the Fun. -func Fun(c *Config, entry string, blocs ...bloc) fun { - f := NewFunc() +func Fun(c *Config, fe Frontend, entry string, blocs ...bloc) fun { + f := NewFunc(fe) f.Config = c // TODO: Either mark some SSA tests as t.Parallel, // or set up a shared Cache and Reset it between tests. @@ -274,7 +274,7 @@ type valu struct { func TestArgs(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("a", OpConst64, TypeInt64, 14, nil), Valu("b", OpConst64, TypeInt64, 26, nil), @@ -296,7 +296,7 @@ func TestEquiv(t *testing.T) { equivalentCases := []struct{ f, g fun }{ // simple case { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("a", OpConst64, TypeInt64, 14, nil), Valu("b", OpConst64, TypeInt64, 26, nil), @@ -305,7 +305,7 @@ func TestEquiv(t *testing.T) { Goto("exit")), Bloc("exit", Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("a", OpConst64, TypeInt64, 14, nil), Valu("b", OpConst64, TypeInt64, 26, nil), @@ -317,7 +317,7 @@ func TestEquiv(t *testing.T) { }, // block order changed { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("a", OpConst64, TypeInt64, 14, nil), Valu("b", OpConst64, TypeInt64, 26, nil), @@ -326,7 +326,7 @@ func TestEquiv(t *testing.T) { Goto("exit")), Bloc("exit", Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("exit", Exit("mem")), Bloc("entry", @@ -348,26 +348,26 @@ func TestEquiv(t *testing.T) { differentCases := []struct{ f, g fun }{ // different shape { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Goto("exit")), Bloc("exit", Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Exit("mem"))), }, // value order changed { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("b", OpConst64, TypeInt64, 26, nil), Valu("a", OpConst64, TypeInt64, 14, nil), Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 14, nil), @@ -376,12 +376,12 @@ func TestEquiv(t *testing.T) { }, // value auxint different { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 14, nil), Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 26, nil), @@ -389,12 +389,12 @@ func TestEquiv(t *testing.T) { }, // value aux different { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 0, 14), Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 0, 26), @@ -402,14 +402,14 @@ func TestEquiv(t *testing.T) { }, // value args different { - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 14, nil), Valu("b", OpConst64, TypeInt64, 26, nil), Valu("sum", OpAdd64, TypeInt64, 0, nil, "a", "b"), Exit("mem"))), - Fun(testConfig(t), "entry", + Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("a", OpConst64, TypeInt64, 0, nil), @@ -430,7 +430,7 @@ func TestEquiv(t *testing.T) { // TestConstCache ensures that the cache will not return // reused free'd values with a non-matching AuxInt func TestConstCache(t *testing.T) { - f := Fun(testConfig(t), "entry", + f := Fun(testConfig(t), DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Exit("mem"))) diff --git a/src/cmd/compile/internal/ssa/fuse_test.go b/src/cmd/compile/internal/ssa/fuse_test.go index cf21ac468f..803cde50f2 100644 --- a/src/cmd/compile/internal/ssa/fuse_test.go +++ b/src/cmd/compile/internal/ssa/fuse_test.go @@ -8,8 +8,8 @@ import ( func TestFuseEliminatesOneBranch(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -36,8 +36,8 @@ func TestFuseEliminatesOneBranch(t *testing.T) { func TestFuseEliminatesBothBranches(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -69,8 +69,8 @@ func TestFuseEliminatesBothBranches(t *testing.T) { func TestFuseHandlesPhis(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -102,8 +102,8 @@ func TestFuseHandlesPhis(t *testing.T) { } func TestFuseEliminatesEmptyBlocks(t *testing.T) { - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -160,7 +160,7 @@ func BenchmarkFuse(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - fun := Fun(c, "entry", blocks...) + fun := Fun(c, DummyFrontend{b}, "entry", blocks...) fuse(fun.f) } }) diff --git a/src/cmd/compile/internal/ssa/gen/rulegen.go b/src/cmd/compile/internal/ssa/gen/rulegen.go index e6ca5c4f02..bb3291d55f 100644 --- a/src/cmd/compile/internal/ssa/gen/rulegen.go +++ b/src/cmd/compile/internal/ssa/gen/rulegen.go @@ -208,19 +208,19 @@ func genRules(arch arch) { // It's not precise--thus the blank assignments--but it's good enough // to avoid generating needless code and doing pointless nil checks. hasb := strings.Contains(body, "b.") - hasconfig := strings.Contains(body, "config.") + hasconfig := strings.Contains(body, "config.") || strings.Contains(body, "config)") hasfe := strings.Contains(body, "fe.") fmt.Fprintf(w, "func rewriteValue%s_%s(v *Value) bool {\n", arch.name, op) if hasb || hasconfig || hasfe { fmt.Fprintln(w, "b := v.Block") fmt.Fprintln(w, "_ = b") } - if hasconfig || hasfe { + if hasconfig { fmt.Fprintln(w, "config := b.Func.Config") fmt.Fprintln(w, "_ = config") } if hasfe { - fmt.Fprintln(w, "fe := config.fe") + fmt.Fprintln(w, "fe := b.Func.fe") fmt.Fprintln(w, "_ = fe") } fmt.Fprint(w, body) @@ -232,7 +232,7 @@ func genRules(arch arch) { fmt.Fprintf(w, "func rewriteBlock%s(b *Block) bool {\n", arch.name) fmt.Fprintln(w, "config := b.Func.Config") fmt.Fprintln(w, "_ = config") - fmt.Fprintln(w, "fe := config.fe") + fmt.Fprintln(w, "fe := b.Func.fe") fmt.Fprintln(w, "_ = fe") fmt.Fprintf(w, "switch b.Kind {\n") ops = nil diff --git a/src/cmd/compile/internal/ssa/lca_test.go b/src/cmd/compile/internal/ssa/lca_test.go index beb33e066e..196978451e 100644 --- a/src/cmd/compile/internal/ssa/lca_test.go +++ b/src/cmd/compile/internal/ssa/lca_test.go @@ -22,8 +22,8 @@ func lcaEqual(f *Func, lca1, lca2 lca) bool { } func testLCAgen(t *testing.T, bg blockGen, size int) { - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", bg(size)...) + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", bg(size)...) CheckFunc(fun.f) if size == 4 { t.Logf(fun.f.String()) diff --git a/src/cmd/compile/internal/ssa/likelyadjust.go b/src/cmd/compile/internal/ssa/likelyadjust.go index 15fecde201..8a2fe1bbd2 100644 --- a/src/cmd/compile/internal/ssa/likelyadjust.go +++ b/src/cmd/compile/internal/ssa/likelyadjust.go @@ -119,7 +119,7 @@ func describePredictionAgrees(b *Block, prediction BranchPrediction) string { } func describeBranchPrediction(f *Func, b *Block, likely, not int8, prediction BranchPrediction) { - f.Config.Warnl(b.Pos, "Branch prediction rule %s < %s%s", + f.Warnl(b.Pos, "Branch prediction rule %s < %s%s", bllikelies[likely-blMin], bllikelies[not-blMin], describePredictionAgrees(b, prediction)) } @@ -194,7 +194,7 @@ func likelyadjust(f *Func) { noprediction = true } if f.pass.debug > 0 && !noprediction { - f.Config.Warnl(b.Pos, "Branch prediction rule stay in loop%s", + f.Warnl(b.Pos, "Branch prediction rule stay in loop%s", describePredictionAgrees(b, prediction)) } @@ -237,7 +237,7 @@ func likelyadjust(f *Func) { } } if f.pass.debug > 2 { - f.Config.Warnl(b.Pos, "BP: Block %s, local=%s, certain=%s", b, bllikelies[local[b.ID]-blMin], bllikelies[certain[b.ID]-blMin]) + f.Warnl(b.Pos, "BP: Block %s, local=%s, certain=%s", b, bllikelies[local[b.ID]-blMin], bllikelies[certain[b.ID]-blMin]) } } diff --git a/src/cmd/compile/internal/ssa/loop_test.go b/src/cmd/compile/internal/ssa/loop_test.go index ddd14c2c01..db1069e823 100644 --- a/src/cmd/compile/internal/ssa/loop_test.go +++ b/src/cmd/compile/internal/ssa/loop_test.go @@ -44,12 +44,13 @@ func TestLoopConditionS390X(t *testing.T) { // done: // c := testConfigS390X(t) - fun := Fun(c, "entry", + fe := DummyFrontend{t} + fun := Fun(c, fe, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("SP", OpSP, TypeUInt64, 0, nil), Valu("ret", OpAddr, TypeInt64Ptr, 0, nil, "SP"), - Valu("N", OpArg, TypeInt64, 0, c.fe.Auto(TypeInt64)), + Valu("N", OpArg, TypeInt64, 0, fe.Auto(TypeInt64)), Valu("starti", OpConst64, TypeInt64, 0, nil), Valu("startsum", OpConst64, TypeInt64, 0, nil), Goto("b1")), diff --git a/src/cmd/compile/internal/ssa/loopbce.go b/src/cmd/compile/internal/ssa/loopbce.go index 0f356913ca..a96d98717d 100644 --- a/src/cmd/compile/internal/ssa/loopbce.go +++ b/src/cmd/compile/internal/ssa/loopbce.go @@ -139,9 +139,9 @@ nextb: if f.pass.debug > 1 { if min.Op == OpConst64 { - b.Func.Config.Warnl(b.Pos, "Induction variable with minimum %d and increment %d", min.AuxInt, inc.AuxInt) + b.Func.Warnl(b.Pos, "Induction variable with minimum %d and increment %d", min.AuxInt, inc.AuxInt) } else { - b.Func.Config.Warnl(b.Pos, "Induction variable with non-const minimum and increment %d", inc.AuxInt) + b.Func.Warnl(b.Pos, "Induction variable with non-const minimum and increment %d", inc.AuxInt) } } @@ -205,7 +205,7 @@ func removeBoundsChecks(f *Func, m map[*Value]indVar) { if iv, has := m[ind]; has && sdom.isAncestorEq(iv.entry, b) && isNonNegative(iv.min) { if v.Args[1] == iv.max { if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "Found redundant %s", v.Op) + f.Warnl(b.Pos, "Found redundant %s", v.Op) } goto simplify } @@ -232,7 +232,7 @@ func removeBoundsChecks(f *Func, m map[*Value]indVar) { if iv, has := m[ind]; has && sdom.isAncestorEq(iv.entry, b) && isNonNegative(iv.min) { if v.Args[1].Op == OpSliceCap && iv.max.Op == OpSliceLen && v.Args[1].Args[0] == iv.max.Args[0] { if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "Found redundant %s (len promoted to cap)", v.Op) + f.Warnl(b.Pos, "Found redundant %s (len promoted to cap)", v.Op) } goto simplify } @@ -263,7 +263,7 @@ func removeBoundsChecks(f *Func, m map[*Value]indVar) { if max := iv.max.AuxInt + add; 0 <= max && max <= limit { // handle overflow if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "Found redundant (%s ind %d), ind < %d", v.Op, v.Args[1].AuxInt, iv.max.AuxInt+add) + f.Warnl(b.Pos, "Found redundant (%s ind %d), ind < %d", v.Op, v.Args[1].AuxInt, iv.max.AuxInt+add) } goto simplify } diff --git a/src/cmd/compile/internal/ssa/loopreschedchecks.go b/src/cmd/compile/internal/ssa/loopreschedchecks.go index dda0c86512..b6dfebd612 100644 --- a/src/cmd/compile/internal/ssa/loopreschedchecks.go +++ b/src/cmd/compile/internal/ssa/loopreschedchecks.go @@ -197,7 +197,7 @@ func insertLoopReschedChecks(f *Func) { // if sp < g.limit { goto sched } // goto header - pt := f.Config.Frontend().TypeUintptr() + pt := f.fe.TypeUintptr() g := test.NewValue1(bb.Pos, OpGetG, pt, mem0) sp := test.NewValue0(bb.Pos, OpSP, pt) cmpOp := OpLess64U @@ -206,7 +206,7 @@ func insertLoopReschedChecks(f *Func) { } limaddr := test.NewValue1I(bb.Pos, OpOffPtr, pt, 2*pt.Size(), g) lim := test.NewValue2(bb.Pos, OpLoad, pt, limaddr, mem0) - cmp := test.NewValue2(bb.Pos, cmpOp, f.Config.fe.TypeBool(), sp, lim) + cmp := test.NewValue2(bb.Pos, cmpOp, f.fe.TypeBool(), sp, lim) test.SetControl(cmp) // if true, goto sched @@ -224,7 +224,7 @@ func insertLoopReschedChecks(f *Func) { // sched: // mem1 := call resched (mem0) // goto header - resched := f.Config.fe.Syslook("goschedguarded") + resched := f.fe.Syslook("goschedguarded") mem1 := sched.NewValue1A(bb.Pos, OpStaticCall, TypeMem, resched, mem0) sched.AddEdgeTo(h) headerMemPhi.AddArg(mem1) diff --git a/src/cmd/compile/internal/ssa/nilcheck.go b/src/cmd/compile/internal/ssa/nilcheck.go index c63b7d2200..d01edcc77d 100644 --- a/src/cmd/compile/internal/ssa/nilcheck.go +++ b/src/cmd/compile/internal/ssa/nilcheck.go @@ -122,8 +122,8 @@ func nilcheckelim(f *Func) { // This is a redundant implicit nil check. // Logging in the style of the former compiler -- and omit line 1, // which is usually in generated code. - if f.Config.Debug_checknil() && v.Pos.Line() > 1 { - f.Config.Warnl(v.Pos, "removed nil check") + if f.fe.Debug_checknil() && v.Pos.Line() > 1 { + f.Warnl(v.Pos, "removed nil check") } v.reset(OpUnknown) // TODO: f.freeValue(v) @@ -171,8 +171,8 @@ func nilcheckelim2(f *Func) { for i := len(b.Values) - 1; i >= 0; i-- { v := b.Values[i] if opcodeTable[v.Op].nilCheck && unnecessary.contains(v.Args[0].ID) { - if f.Config.Debug_checknil() && v.Pos.Line() > 1 { - f.Config.Warnl(v.Pos, "removed nil check") + if f.fe.Debug_checknil() && v.Pos.Line() > 1 { + f.Warnl(v.Pos, "removed nil check") } v.reset(OpUnknown) continue diff --git a/src/cmd/compile/internal/ssa/nilcheck_test.go b/src/cmd/compile/internal/ssa/nilcheck_test.go index 8f403fbb01..ae78266b82 100644 --- a/src/cmd/compile/internal/ssa/nilcheck_test.go +++ b/src/cmd/compile/internal/ssa/nilcheck_test.go @@ -40,8 +40,8 @@ func benchmarkNilCheckDeep(b *testing.B, depth int) { Bloc("exit", Exit("mem")), ) - c := NewConfig("amd64", DummyFrontend{b}, nil, true) - fun := Fun(c, "entry", blocs...) + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{b}, "entry", blocs...) CheckFunc(fun.f) b.SetBytes(int64(depth)) // helps for eyeballing linearity @@ -64,8 +64,8 @@ func isNilCheck(b *Block) bool { // TestNilcheckSimple verifies that a second repeated nilcheck is removed. func TestNilcheckSimple(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -101,8 +101,8 @@ func TestNilcheckSimple(t *testing.T) { // on the order of the dominees. func TestNilcheckDomOrder(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -137,8 +137,8 @@ func TestNilcheckDomOrder(t *testing.T) { // TestNilcheckAddr verifies that nilchecks of OpAddr constructed values are removed. func TestNilcheckAddr(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -170,8 +170,8 @@ func TestNilcheckAddr(t *testing.T) { // TestNilcheckAddPtr verifies that nilchecks of OpAddPtr constructed values are removed. func TestNilcheckAddPtr(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -205,8 +205,8 @@ func TestNilcheckAddPtr(t *testing.T) { // non-nil are removed. func TestNilcheckPhi(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -249,8 +249,8 @@ func TestNilcheckPhi(t *testing.T) { // are removed, but checks of different pointers are not. func TestNilcheckKeepRemove(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -297,8 +297,8 @@ func TestNilcheckKeepRemove(t *testing.T) { // block are *not* removed. func TestNilcheckInFalseBranch(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -348,8 +348,8 @@ func TestNilcheckInFalseBranch(t *testing.T) { // wil remove the generated nil check. func TestNilcheckUser(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -387,8 +387,8 @@ func TestNilcheckUser(t *testing.T) { // TestNilcheckBug reproduces a bug in nilcheckelim found by compiling math/big func TestNilcheckBug(t *testing.T) { ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - c := NewConfig("amd64", DummyFrontend{t}, nil, true) - fun := Fun(c, "entry", + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), diff --git a/src/cmd/compile/internal/ssa/passbm_test.go b/src/cmd/compile/internal/ssa/passbm_test.go index f40690a83a..c431f89ea5 100644 --- a/src/cmd/compile/internal/ssa/passbm_test.go +++ b/src/cmd/compile/internal/ssa/passbm_test.go @@ -33,8 +33,8 @@ func BenchmarkMultiPassBlock(b *testing.B) { benchFnBlock(b, multi, genFunction) // benchFnPass runs passFunc b.N times across a single function. func benchFnPass(b *testing.B, fn passFunc, size int, bg blockGen) { b.ReportAllocs() - c := NewConfig("amd64", DummyFrontend{b}, nil, true) - fun := Fun(c, "entry", bg(size)...) + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{b}, "entry", bg(size)...) CheckFunc(fun.f) b.ResetTimer() for i := 0; i < b.N; i++ { @@ -48,8 +48,8 @@ func benchFnPass(b *testing.B, fn passFunc, size int, bg blockGen) { // benchFnPass runs passFunc across a function with b.N blocks. func benchFnBlock(b *testing.B, fn passFunc, bg blockGen) { b.ReportAllocs() - c := NewConfig("amd64", DummyFrontend{b}, nil, true) - fun := Fun(c, "entry", bg(b.N)...) + c := NewConfig("amd64", nil, true) + fun := Fun(c, DummyFrontend{b}, "entry", bg(b.N)...) CheckFunc(fun.f) b.ResetTimer() for i := 0; i < passCount; i++ { diff --git a/src/cmd/compile/internal/ssa/phielim.go b/src/cmd/compile/internal/ssa/phielim.go index 0c85b6fb12..761cb7a392 100644 --- a/src/cmd/compile/internal/ssa/phielim.go +++ b/src/cmd/compile/internal/ssa/phielim.go @@ -63,7 +63,7 @@ func phielimValue(v *Value) bool { v.SetArgs1(w) f := v.Block.Func if f.pass.debug > 0 { - f.Config.Warnl(v.Pos, "eliminated phi") + f.Warnl(v.Pos, "eliminated phi") } return true } diff --git a/src/cmd/compile/internal/ssa/phiopt.go b/src/cmd/compile/internal/ssa/phiopt.go index d1b7cd53dd..60c8e58bd2 100644 --- a/src/cmd/compile/internal/ssa/phiopt.go +++ b/src/cmd/compile/internal/ssa/phiopt.go @@ -81,7 +81,7 @@ func phiopt(f *Func) { v.reset(ops[v.Args[reverse].AuxInt]) v.AddArg(b0.Control) if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op) + f.Warnl(b.Pos, "converted OpPhi to %v", v.Op) } continue } @@ -97,7 +97,7 @@ func phiopt(f *Func) { v.reset(OpOrB) v.SetArgs2(b0.Control, tmp) if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op) + f.Warnl(b.Pos, "converted OpPhi to %v", v.Op) } continue } @@ -113,7 +113,7 @@ func phiopt(f *Func) { v.reset(OpAndB) v.SetArgs2(b0.Control, tmp) if f.pass.debug > 0 { - f.Config.Warnl(b.Pos, "converted OpPhi to %v", v.Op) + f.Warnl(b.Pos, "converted OpPhi to %v", v.Op) } continue } @@ -169,6 +169,6 @@ func phioptint(v *Value, b0 *Block, reverse int) { f := b0.Func if f.pass.debug > 0 { - f.Config.Warnl(v.Block.Pos, "converted OpPhi bool -> int%d", v.Type.Size()*8) + f.Warnl(v.Block.Pos, "converted OpPhi bool -> int%d", v.Type.Size()*8) } } diff --git a/src/cmd/compile/internal/ssa/print.go b/src/cmd/compile/internal/ssa/print.go index 01b8083b95..d2a87eb615 100644 --- a/src/cmd/compile/internal/ssa/print.go +++ b/src/cmd/compile/internal/ssa/print.go @@ -62,7 +62,7 @@ func (p stringFuncPrinter) endBlock(b *Block) { func (p stringFuncPrinter) value(v *Value, live bool) { fmt.Fprint(p.w, " ") - //fmt.Fprint(p.w, v.Block.Func.Config.fe.Pos(v.Pos)) + //fmt.Fprint(p.w, v.Block.Func.fe.Pos(v.Pos)) //fmt.Fprint(p.w, ": ") fmt.Fprint(p.w, v.LongString()) if !live { diff --git a/src/cmd/compile/internal/ssa/prove.go b/src/cmd/compile/internal/ssa/prove.go index 37c92ae544..a4473311ba 100644 --- a/src/cmd/compile/internal/ssa/prove.go +++ b/src/cmd/compile/internal/ssa/prove.go @@ -307,7 +307,7 @@ func (ft *factsTable) update(parent *Block, v, w *Value, d domain, r relation) { ft.limitStack = append(ft.limitStack, limitFact{v.ID, old}) ft.limits[v.ID] = lim if v.Block.Func.pass.debug > 2 { - v.Block.Func.Config.Warnl(parent.Pos, "parent=%s, new limits %s %s %s", parent, v, w, lim.String()) + v.Block.Func.Warnl(parent.Pos, "parent=%s, new limits %s %s %s", parent, v, w, lim.String()) } } } @@ -657,7 +657,7 @@ func simplifyBlock(ft *factsTable, b *Block) branch { v.reset(OpConst32) } if b.Func.pass.debug > 0 { - b.Func.Config.Warnl(v.Pos, "Proved slicemask not needed") + b.Func.Warnl(v.Pos, "Proved slicemask not needed") } v.AuxInt = -1 } @@ -672,9 +672,9 @@ func simplifyBlock(ft *factsTable, b *Block) branch { if m == lt|gt { if b.Func.pass.debug > 0 { if b.Func.pass.debug > 1 { - b.Func.Config.Warnl(b.Pos, "Proved boolean %s (%s)", b.Control.Op, b.Control) + b.Func.Warnl(b.Pos, "Proved boolean %s (%s)", b.Control.Op, b.Control) } else { - b.Func.Config.Warnl(b.Pos, "Proved boolean %s", b.Control.Op) + b.Func.Warnl(b.Pos, "Proved boolean %s", b.Control.Op) } } return positive @@ -682,9 +682,9 @@ func simplifyBlock(ft *factsTable, b *Block) branch { if m == eq { if b.Func.pass.debug > 0 { if b.Func.pass.debug > 1 { - b.Func.Config.Warnl(b.Pos, "Disproved boolean %s (%s)", b.Control.Op, b.Control) + b.Func.Warnl(b.Pos, "Disproved boolean %s (%s)", b.Control.Op, b.Control) } else { - b.Func.Config.Warnl(b.Pos, "Disproved boolean %s", b.Control.Op) + b.Func.Warnl(b.Pos, "Disproved boolean %s", b.Control.Op) } } return negative @@ -713,9 +713,9 @@ func simplifyBlock(ft *factsTable, b *Block) branch { if m != 0 && tr.r&m == m { if b.Func.pass.debug > 0 { if b.Func.pass.debug > 1 { - b.Func.Config.Warnl(b.Pos, "Proved %s (%s)", c.Op, c) + b.Func.Warnl(b.Pos, "Proved %s (%s)", c.Op, c) } else { - b.Func.Config.Warnl(b.Pos, "Proved %s", c.Op) + b.Func.Warnl(b.Pos, "Proved %s", c.Op) } } return positive @@ -723,9 +723,9 @@ func simplifyBlock(ft *factsTable, b *Block) branch { if m != 0 && ((lt|eq|gt)^tr.r)&m == m { if b.Func.pass.debug > 0 { if b.Func.pass.debug > 1 { - b.Func.Config.Warnl(b.Pos, "Disproved %s (%s)", c.Op, c) + b.Func.Warnl(b.Pos, "Disproved %s (%s)", c.Op, c) } else { - b.Func.Config.Warnl(b.Pos, "Disproved %s", c.Op) + b.Func.Warnl(b.Pos, "Disproved %s", c.Op) } } return negative @@ -742,9 +742,9 @@ func simplifyBlock(ft *factsTable, b *Block) branch { if m != 0 && tr.r&m == m { if b.Func.pass.debug > 0 { if b.Func.pass.debug > 1 { - b.Func.Config.Warnl(b.Pos, "Proved non-negative bounds %s (%s)", c.Op, c) + b.Func.Warnl(b.Pos, "Proved non-negative bounds %s (%s)", c.Op, c) } else { - b.Func.Config.Warnl(b.Pos, "Proved non-negative bounds %s", c.Op) + b.Func.Warnl(b.Pos, "Proved non-negative bounds %s", c.Op) } } return positive diff --git a/src/cmd/compile/internal/ssa/regalloc.go b/src/cmd/compile/internal/ssa/regalloc.go index cf305b027e..03f1f7ce32 100644 --- a/src/cmd/compile/internal/ssa/regalloc.go +++ b/src/cmd/compile/internal/ssa/regalloc.go @@ -469,7 +469,7 @@ func (s *regAllocState) allocValToReg(v *Value, mask regMask, nospill bool, pos // Load v from its spill location. spill := s.makeSpill(v, s.curBlock) if s.f.pass.debug > logSpills { - s.f.Config.Warnl(vi.spill.Pos, "load spill for %v from %v", v, spill) + s.f.Warnl(vi.spill.Pos, "load spill for %v from %v", v, spill) } c = s.curBlock.NewValue1(pos, OpLoadReg, v.Type, spill) } @@ -575,7 +575,7 @@ func (s *regAllocState) init(f *Func) { case "s390x": // nothing to do, R10 & R11 already reserved default: - s.f.Config.fe.Fatalf(src.NoXPos, "arch %s not implemented", s.f.Config.arch) + s.f.fe.Fatalf(src.NoXPos, "arch %s not implemented", s.f.Config.arch) } } if s.f.Config.nacl { @@ -2056,9 +2056,9 @@ func (e *edgeState) findRegFor(typ Type) Location { // Which registers are possibilities. var m regMask if typ.IsFloat() { - m = e.s.compatRegs(e.s.f.Config.fe.TypeFloat64()) + m = e.s.compatRegs(e.s.f.fe.TypeFloat64()) } else { - m = e.s.compatRegs(e.s.f.Config.fe.TypeInt64()) + m = e.s.compatRegs(e.s.f.fe.TypeInt64()) } // Pick a register. In priority order: @@ -2082,8 +2082,8 @@ func (e *edgeState) findRegFor(typ Type) Location { // No register is available. Allocate a temp location to spill a register to. // The type of the slot is immaterial - it will not be live across // any safepoint. Just use a type big enough to hold any register. - typ = e.s.f.Config.fe.TypeInt64() - t := LocalSlot{e.s.f.Config.fe.Auto(typ), typ, 0} + typ = e.s.f.fe.TypeInt64() + t := LocalSlot{e.s.f.fe.Auto(typ), typ, 0} // TODO: reuse these slots. // Pick a register to spill. diff --git a/src/cmd/compile/internal/ssa/regalloc_test.go b/src/cmd/compile/internal/ssa/regalloc_test.go index cf8f452d12..55ed6d769e 100644 --- a/src/cmd/compile/internal/ssa/regalloc_test.go +++ b/src/cmd/compile/internal/ssa/regalloc_test.go @@ -8,7 +8,7 @@ import "testing" func TestLiveControlOps(t *testing.T) { c := testConfig(t) - f := Fun(c, "entry", + f := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("x", OpAMD64MOVLconst, TypeInt8, 1, nil), diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go index 68bd0ad66c..4ee23b7362 100644 --- a/src/cmd/compile/internal/ssa/rewrite.go +++ b/src/cmd/compile/internal/ssa/rewrite.go @@ -392,12 +392,12 @@ func devirt(v *Value, sym interface{}, offset int64) *obj.LSym { if !ok { return nil } - lsym := f.Config.Frontend().DerefItab(ext.Sym, offset) + lsym := f.fe.DerefItab(ext.Sym, offset) if f.pass.debug > 0 { if lsym != nil { - f.Config.Warnl(v.Pos, "de-virtualizing call") + f.Warnl(v.Pos, "de-virtualizing call") } else { - f.Config.Warnl(v.Pos, "couldn't de-virtualize call") + f.Warnl(v.Pos, "couldn't de-virtualize call") } } return lsym @@ -510,7 +510,7 @@ func noteRule(s string) bool { // cond is true and the rule is fired. func warnRule(cond bool, v *Value, s string) bool { if cond { - v.Block.Func.Config.Warnl(v.Pos, s) + v.Block.Func.Warnl(v.Pos, s) } return true } diff --git a/src/cmd/compile/internal/ssa/rewrite386.go b/src/cmd/compile/internal/ssa/rewrite386.go index 2d0eda6e50..f96b0c63d5 100644 --- a/src/cmd/compile/internal/ssa/rewrite386.go +++ b/src/cmd/compile/internal/ssa/rewrite386.go @@ -4354,7 +4354,7 @@ func rewriteValue386_Op386MOVSDconst(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVSDconst [c]) // cond: config.ctxt.Flag_shared @@ -4843,7 +4843,7 @@ func rewriteValue386_Op386MOVSSconst(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVSSconst [c]) // cond: config.ctxt.Flag_shared @@ -7212,9 +7212,7 @@ func rewriteValue386_Op386NOTL(v *Value) bool { func rewriteValue386_Op386ORL(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ORL x (MOVLconst [c])) // cond: @@ -9892,9 +9890,7 @@ func rewriteValue386_OpDiv64F(v *Value) bool { func rewriteValue386_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -9915,9 +9911,7 @@ func rewriteValue386_OpDiv8(v *Value) bool { func rewriteValue386_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -11169,9 +11163,7 @@ func rewriteValue386_OpMod32u(v *Value) bool { func rewriteValue386_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -11192,9 +11184,7 @@ func rewriteValue386_OpMod8(v *Value) bool { func rewriteValue386_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -11217,7 +11207,7 @@ func rewriteValue386_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -11605,7 +11595,7 @@ func rewriteValue386_OpNeg32F(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg32F x) // cond: !config.use387 @@ -11641,7 +11631,7 @@ func rewriteValue386_OpNeg64F(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg64F x) // cond: !config.use387 @@ -12965,7 +12955,7 @@ func rewriteValue386_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -13291,7 +13281,7 @@ func rewriteValue386_OpZeromask(v *Value) bool { func rewriteBlock386(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case Block386EQ: diff --git a/src/cmd/compile/internal/ssa/rewriteAMD64.go b/src/cmd/compile/internal/ssa/rewriteAMD64.go index 548d5abfa1..223b470cad 100644 --- a/src/cmd/compile/internal/ssa/rewriteAMD64.go +++ b/src/cmd/compile/internal/ssa/rewriteAMD64.go @@ -6786,9 +6786,7 @@ func rewriteValueAMD64_OpAMD64MOVLstore(v *Value) bool { func rewriteValueAMD64_OpAMD64MOVLstoreconst(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVLstoreconst [sc] {s} (ADDQconst [off] ptr) mem) // cond: ValAndOff(sc).canAdd(off) @@ -6994,9 +6992,7 @@ func rewriteValueAMD64_OpAMD64MOVLstoreconst(v *Value) bool { func rewriteValueAMD64_OpAMD64MOVLstoreconstidx1(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVLstoreconstidx1 [c] {sym} ptr (SHLQconst [2] idx) mem) // cond: @@ -7108,9 +7104,7 @@ func rewriteValueAMD64_OpAMD64MOVLstoreconstidx1(v *Value) bool { func rewriteValueAMD64_OpAMD64MOVLstoreconstidx4(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVLstoreconstidx4 [x] {sym} (ADDQconst [c] ptr) idx mem) // cond: @@ -12043,9 +12037,7 @@ func rewriteValueAMD64_OpAMD64NOTQ(v *Value) bool { func rewriteValueAMD64_OpAMD64ORL(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ORL x (MOVLconst [c])) // cond: @@ -12911,9 +12903,7 @@ func rewriteValueAMD64_OpAMD64ORLconst(v *Value) bool { func rewriteValueAMD64_OpAMD64ORQ(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ORQ x (MOVQconst [c])) // cond: is32Bit(c) @@ -17365,9 +17355,7 @@ func rewriteValueAMD64_OpAndB(v *Value) bool { func rewriteValueAMD64_OpAtomicAdd32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicAdd32 ptr val mem) // cond: @@ -17389,9 +17377,7 @@ func rewriteValueAMD64_OpAtomicAdd32(v *Value) bool { func rewriteValueAMD64_OpAtomicAdd64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicAdd64 ptr val mem) // cond: @@ -17568,9 +17554,7 @@ func rewriteValueAMD64_OpAtomicOr8(v *Value) bool { func rewriteValueAMD64_OpAtomicStore32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicStore32 ptr val mem) // cond: @@ -17591,9 +17575,7 @@ func rewriteValueAMD64_OpAtomicStore32(v *Value) bool { func rewriteValueAMD64_OpAtomicStore64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicStore64 ptr val mem) // cond: @@ -17616,7 +17598,7 @@ func rewriteValueAMD64_OpAtomicStorePtrNoWB(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicStorePtrNoWB ptr val mem) // cond: config.PtrSize == 8 @@ -17672,9 +17654,7 @@ func rewriteValueAMD64_OpAvg64u(v *Value) bool { func rewriteValueAMD64_OpBitLen32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen32 x) // cond: @@ -17691,9 +17671,7 @@ func rewriteValueAMD64_OpBitLen32(v *Value) bool { func rewriteValueAMD64_OpBitLen64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen64 x) // cond: @@ -17952,9 +17930,7 @@ func rewriteValueAMD64_OpConvert(v *Value) bool { func rewriteValueAMD64_OpCtz32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz32 x) // cond: @@ -17976,9 +17952,7 @@ func rewriteValueAMD64_OpCtz32(v *Value) bool { func rewriteValueAMD64_OpCtz64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz64 x) // cond: @@ -18131,9 +18105,7 @@ func rewriteValueAMD64_OpDiv128u(v *Value) bool { func rewriteValueAMD64_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -18152,9 +18124,7 @@ func rewriteValueAMD64_OpDiv16(v *Value) bool { func rewriteValueAMD64_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -18173,9 +18143,7 @@ func rewriteValueAMD64_OpDiv16u(v *Value) bool { func rewriteValueAMD64_OpDiv32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 x y) // cond: @@ -18207,9 +18175,7 @@ func rewriteValueAMD64_OpDiv32F(v *Value) bool { func rewriteValueAMD64_OpDiv32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u x y) // cond: @@ -18228,9 +18194,7 @@ func rewriteValueAMD64_OpDiv32u(v *Value) bool { func rewriteValueAMD64_OpDiv64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64 x y) // cond: @@ -18262,9 +18226,7 @@ func rewriteValueAMD64_OpDiv64F(v *Value) bool { func rewriteValueAMD64_OpDiv64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64u x y) // cond: @@ -18283,9 +18245,7 @@ func rewriteValueAMD64_OpDiv64u(v *Value) bool { func rewriteValueAMD64_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -18308,9 +18268,7 @@ func rewriteValueAMD64_OpDiv8(v *Value) bool { func rewriteValueAMD64_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -19822,9 +19780,7 @@ func rewriteValueAMD64_OpLsh8x8(v *Value) bool { func rewriteValueAMD64_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -19843,9 +19799,7 @@ func rewriteValueAMD64_OpMod16(v *Value) bool { func rewriteValueAMD64_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -19864,9 +19818,7 @@ func rewriteValueAMD64_OpMod16u(v *Value) bool { func rewriteValueAMD64_OpMod32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -19885,9 +19837,7 @@ func rewriteValueAMD64_OpMod32(v *Value) bool { func rewriteValueAMD64_OpMod32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -19906,9 +19856,7 @@ func rewriteValueAMD64_OpMod32u(v *Value) bool { func rewriteValueAMD64_OpMod64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64 x y) // cond: @@ -19927,9 +19875,7 @@ func rewriteValueAMD64_OpMod64(v *Value) bool { func rewriteValueAMD64_OpMod64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64u x y) // cond: @@ -19948,9 +19894,7 @@ func rewriteValueAMD64_OpMod64u(v *Value) bool { func rewriteValueAMD64_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -19973,9 +19917,7 @@ func rewriteValueAMD64_OpMod8(v *Value) bool { func rewriteValueAMD64_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -20000,7 +19942,7 @@ func rewriteValueAMD64_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -20469,9 +20411,7 @@ func rewriteValueAMD64_OpNeg32(v *Value) bool { func rewriteValueAMD64_OpNeg32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg32F x) // cond: @@ -20500,9 +20440,7 @@ func rewriteValueAMD64_OpNeg64(v *Value) bool { func rewriteValueAMD64_OpNeg64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg64F x) // cond: @@ -20716,7 +20654,7 @@ func rewriteValueAMD64_OpOffPtr(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (OffPtr [off] ptr) // cond: config.PtrSize == 8 && is32Bit(off) @@ -22195,7 +22133,7 @@ func rewriteValueAMD64_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -22574,7 +22512,7 @@ func rewriteValueAMD64_OpZeroExt8to64(v *Value) bool { func rewriteBlockAMD64(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockAMD64EQ: diff --git a/src/cmd/compile/internal/ssa/rewriteARM.go b/src/cmd/compile/internal/ssa/rewriteARM.go index 665eec36c8..1a8e915bf7 100644 --- a/src/cmd/compile/internal/ssa/rewriteARM.go +++ b/src/cmd/compile/internal/ssa/rewriteARM.go @@ -13209,9 +13209,7 @@ func rewriteValueARM_OpCvt64Fto32U(v *Value) bool { func rewriteValueARM_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -13232,9 +13230,7 @@ func rewriteValueARM_OpDiv16(v *Value) bool { func rewriteValueARM_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -13257,7 +13253,7 @@ func rewriteValueARM_OpDiv32(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 x y) // cond: @@ -13328,7 +13324,7 @@ func rewriteValueARM_OpDiv32u(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u x y) // cond: @@ -13362,9 +13358,7 @@ func rewriteValueARM_OpDiv64F(v *Value) bool { func rewriteValueARM_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -13385,9 +13379,7 @@ func rewriteValueARM_OpDiv8(v *Value) bool { func rewriteValueARM_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -13408,9 +13400,7 @@ func rewriteValueARM_OpDiv8u(v *Value) bool { func rewriteValueARM_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: @@ -13484,9 +13474,7 @@ func rewriteValueARM_OpEq64F(v *Value) bool { func rewriteValueARM_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: @@ -13509,9 +13497,7 @@ func rewriteValueARM_OpEq8(v *Value) bool { func rewriteValueARM_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -13548,9 +13534,7 @@ func rewriteValueARM_OpEqPtr(v *Value) bool { func rewriteValueARM_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -13573,9 +13557,7 @@ func rewriteValueARM_OpGeq16(v *Value) bool { func rewriteValueARM_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -13666,9 +13648,7 @@ func rewriteValueARM_OpGeq64F(v *Value) bool { func rewriteValueARM_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -13691,9 +13671,7 @@ func rewriteValueARM_OpGeq8(v *Value) bool { func rewriteValueARM_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -13725,9 +13703,7 @@ func rewriteValueARM_OpGetClosurePtr(v *Value) bool { func rewriteValueARM_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -13750,9 +13726,7 @@ func rewriteValueARM_OpGreater16(v *Value) bool { func rewriteValueARM_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -13843,9 +13817,7 @@ func rewriteValueARM_OpGreater64F(v *Value) bool { func rewriteValueARM_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -13868,9 +13840,7 @@ func rewriteValueARM_OpGreater8(v *Value) bool { func rewriteValueARM_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -13984,9 +13954,7 @@ func rewriteValueARM_OpIsSliceInBounds(v *Value) bool { func rewriteValueARM_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -14009,9 +13977,7 @@ func rewriteValueARM_OpLeq16(v *Value) bool { func rewriteValueARM_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -14102,9 +14068,7 @@ func rewriteValueARM_OpLeq64F(v *Value) bool { func rewriteValueARM_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -14127,9 +14091,7 @@ func rewriteValueARM_OpLeq8(v *Value) bool { func rewriteValueARM_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -14152,9 +14114,7 @@ func rewriteValueARM_OpLeq8U(v *Value) bool { func rewriteValueARM_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -14177,9 +14137,7 @@ func rewriteValueARM_OpLess16(v *Value) bool { func rewriteValueARM_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -14270,9 +14228,7 @@ func rewriteValueARM_OpLess64F(v *Value) bool { func rewriteValueARM_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -14295,9 +14251,7 @@ func rewriteValueARM_OpLess8(v *Value) bool { func rewriteValueARM_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -14443,9 +14397,7 @@ func rewriteValueARM_OpLoad(v *Value) bool { func rewriteValueARM_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -14532,9 +14484,7 @@ func rewriteValueARM_OpLsh16x64(v *Value) bool { func rewriteValueARM_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -14553,9 +14503,7 @@ func rewriteValueARM_OpLsh16x8(v *Value) bool { func rewriteValueARM_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -14642,9 +14590,7 @@ func rewriteValueARM_OpLsh32x64(v *Value) bool { func rewriteValueARM_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -14663,9 +14609,7 @@ func rewriteValueARM_OpLsh32x8(v *Value) bool { func rewriteValueARM_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -14752,9 +14696,7 @@ func rewriteValueARM_OpLsh8x64(v *Value) bool { func rewriteValueARM_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -14773,9 +14715,7 @@ func rewriteValueARM_OpLsh8x8(v *Value) bool { func rewriteValueARM_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -14796,9 +14736,7 @@ func rewriteValueARM_OpMod16(v *Value) bool { func rewriteValueARM_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -14821,7 +14759,7 @@ func rewriteValueARM_OpMod32(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -14873,7 +14811,7 @@ func rewriteValueARM_OpMod32u(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -14894,9 +14832,7 @@ func rewriteValueARM_OpMod32u(v *Value) bool { func rewriteValueARM_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -14917,9 +14853,7 @@ func rewriteValueARM_OpMod8(v *Value) bool { func rewriteValueARM_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -14942,7 +14876,7 @@ func rewriteValueARM_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -15348,9 +15282,7 @@ func rewriteValueARM_OpNeg8(v *Value) bool { func rewriteValueARM_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: @@ -15424,9 +15356,7 @@ func rewriteValueARM_OpNeq64F(v *Value) bool { func rewriteValueARM_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: @@ -15607,9 +15537,7 @@ func rewriteValueARM_OpRound64F(v *Value) bool { func rewriteValueARM_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -15639,9 +15567,7 @@ func rewriteValueARM_OpRsh16Ux16(v *Value) bool { func rewriteValueARM_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x y) // cond: @@ -15667,9 +15593,7 @@ func rewriteValueARM_OpRsh16Ux32(v *Value) bool { func rewriteValueARM_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x (Const64 [c])) // cond: uint64(c) < 16 @@ -15713,9 +15637,7 @@ func rewriteValueARM_OpRsh16Ux64(v *Value) bool { func rewriteValueARM_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -15736,9 +15658,7 @@ func rewriteValueARM_OpRsh16Ux8(v *Value) bool { func rewriteValueARM_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -15765,9 +15685,7 @@ func rewriteValueARM_OpRsh16x16(v *Value) bool { func rewriteValueARM_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x y) // cond: @@ -15790,9 +15708,7 @@ func rewriteValueARM_OpRsh16x32(v *Value) bool { func rewriteValueARM_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x (Const64 [c])) // cond: uint64(c) < 16 @@ -15841,9 +15757,7 @@ func rewriteValueARM_OpRsh16x64(v *Value) bool { func rewriteValueARM_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -15864,9 +15778,7 @@ func rewriteValueARM_OpRsh16x8(v *Value) bool { func rewriteValueARM_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -15953,9 +15865,7 @@ func rewriteValueARM_OpRsh32Ux64(v *Value) bool { func rewriteValueARM_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -15974,9 +15884,7 @@ func rewriteValueARM_OpRsh32Ux8(v *Value) bool { func rewriteValueARM_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -16059,9 +15967,7 @@ func rewriteValueARM_OpRsh32x64(v *Value) bool { func rewriteValueARM_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -16080,9 +15986,7 @@ func rewriteValueARM_OpRsh32x8(v *Value) bool { func rewriteValueARM_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -16112,9 +16016,7 @@ func rewriteValueARM_OpRsh8Ux16(v *Value) bool { func rewriteValueARM_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x y) // cond: @@ -16140,9 +16042,7 @@ func rewriteValueARM_OpRsh8Ux32(v *Value) bool { func rewriteValueARM_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x (Const64 [c])) // cond: uint64(c) < 8 @@ -16186,9 +16086,7 @@ func rewriteValueARM_OpRsh8Ux64(v *Value) bool { func rewriteValueARM_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -16209,9 +16107,7 @@ func rewriteValueARM_OpRsh8Ux8(v *Value) bool { func rewriteValueARM_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -16238,9 +16134,7 @@ func rewriteValueARM_OpRsh8x16(v *Value) bool { func rewriteValueARM_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x y) // cond: @@ -16263,9 +16157,7 @@ func rewriteValueARM_OpRsh8x32(v *Value) bool { func rewriteValueARM_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x (Const64 [c])) // cond: uint64(c) < 8 @@ -16314,9 +16206,7 @@ func rewriteValueARM_OpRsh8x64(v *Value) bool { func rewriteValueARM_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -16831,7 +16721,7 @@ func rewriteValueARM_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -17112,9 +17002,7 @@ func rewriteValueARM_OpZeroExt8to32(v *Value) bool { func rewriteValueARM_OpZeromask(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zeromask x) // cond: @@ -17134,7 +17022,7 @@ func rewriteValueARM_OpZeromask(v *Value) bool { func rewriteBlockARM(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockARMEQ: diff --git a/src/cmd/compile/internal/ssa/rewriteARM64.go b/src/cmd/compile/internal/ssa/rewriteARM64.go index 67604f741c..5db99c2745 100644 --- a/src/cmd/compile/internal/ssa/rewriteARM64.go +++ b/src/cmd/compile/internal/ssa/rewriteARM64.go @@ -9509,9 +9509,7 @@ func rewriteValueARM64_OpAvg64u(v *Value) bool { func rewriteValueARM64_OpBitLen64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen64 x) // cond: @@ -9531,9 +9529,7 @@ func rewriteValueARM64_OpBitLen64(v *Value) bool { func rewriteValueARM64_OpBitRev16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitRev16 x) // cond: @@ -9573,9 +9569,7 @@ func rewriteValueARM64_OpBitRev64(v *Value) bool { func rewriteValueARM64_OpBitRev8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitRev8 x) // cond: @@ -10006,9 +10000,7 @@ func rewriteValueARM64_OpCvt64to64F(v *Value) bool { func rewriteValueARM64_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -10029,9 +10021,7 @@ func rewriteValueARM64_OpDiv16(v *Value) bool { func rewriteValueARM64_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -10130,9 +10120,7 @@ func rewriteValueARM64_OpDiv64u(v *Value) bool { func rewriteValueARM64_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -10153,9 +10141,7 @@ func rewriteValueARM64_OpDiv8(v *Value) bool { func rewriteValueARM64_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -10176,9 +10162,7 @@ func rewriteValueARM64_OpDiv8u(v *Value) bool { func rewriteValueARM64_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: @@ -10269,9 +10253,7 @@ func rewriteValueARM64_OpEq64F(v *Value) bool { func rewriteValueARM64_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: @@ -10294,9 +10276,7 @@ func rewriteValueARM64_OpEq8(v *Value) bool { func rewriteValueARM64_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -10335,9 +10315,7 @@ func rewriteValueARM64_OpEqPtr(v *Value) bool { func rewriteValueARM64_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -10360,9 +10338,7 @@ func rewriteValueARM64_OpGeq16(v *Value) bool { func rewriteValueARM64_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -10487,9 +10463,7 @@ func rewriteValueARM64_OpGeq64U(v *Value) bool { func rewriteValueARM64_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -10512,9 +10486,7 @@ func rewriteValueARM64_OpGeq8(v *Value) bool { func rewriteValueARM64_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -10546,9 +10518,7 @@ func rewriteValueARM64_OpGetClosurePtr(v *Value) bool { func rewriteValueARM64_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -10571,9 +10541,7 @@ func rewriteValueARM64_OpGreater16(v *Value) bool { func rewriteValueARM64_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -10698,9 +10666,7 @@ func rewriteValueARM64_OpGreater64U(v *Value) bool { func rewriteValueARM64_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -10723,9 +10689,7 @@ func rewriteValueARM64_OpGreater8(v *Value) bool { func rewriteValueARM64_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -10748,9 +10712,7 @@ func rewriteValueARM64_OpGreater8U(v *Value) bool { func rewriteValueARM64_OpHmul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32 x y) // cond: @@ -10770,9 +10732,7 @@ func rewriteValueARM64_OpHmul32(v *Value) bool { func rewriteValueARM64_OpHmul32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32u x y) // cond: @@ -10883,9 +10843,7 @@ func rewriteValueARM64_OpIsSliceInBounds(v *Value) bool { func rewriteValueARM64_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -10908,9 +10866,7 @@ func rewriteValueARM64_OpLeq16(v *Value) bool { func rewriteValueARM64_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -11035,9 +10991,7 @@ func rewriteValueARM64_OpLeq64U(v *Value) bool { func rewriteValueARM64_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -11060,9 +11014,7 @@ func rewriteValueARM64_OpLeq8(v *Value) bool { func rewriteValueARM64_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -11085,9 +11037,7 @@ func rewriteValueARM64_OpLeq8U(v *Value) bool { func rewriteValueARM64_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -11110,9 +11060,7 @@ func rewriteValueARM64_OpLess16(v *Value) bool { func rewriteValueARM64_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -11237,9 +11185,7 @@ func rewriteValueARM64_OpLess64U(v *Value) bool { func rewriteValueARM64_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -11262,9 +11208,7 @@ func rewriteValueARM64_OpLess8(v *Value) bool { func rewriteValueARM64_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -11440,9 +11384,7 @@ func rewriteValueARM64_OpLoad(v *Value) bool { func rewriteValueARM64_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -11473,9 +11415,7 @@ func rewriteValueARM64_OpLsh16x16(v *Value) bool { func rewriteValueARM64_OpLsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x32 x y) // cond: @@ -11565,9 +11505,7 @@ func rewriteValueARM64_OpLsh16x64(v *Value) bool { func rewriteValueARM64_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -11598,9 +11536,7 @@ func rewriteValueARM64_OpLsh16x8(v *Value) bool { func rewriteValueARM64_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -11631,9 +11567,7 @@ func rewriteValueARM64_OpLsh32x16(v *Value) bool { func rewriteValueARM64_OpLsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x32 x y) // cond: @@ -11723,9 +11657,7 @@ func rewriteValueARM64_OpLsh32x64(v *Value) bool { func rewriteValueARM64_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -11756,9 +11688,7 @@ func rewriteValueARM64_OpLsh32x8(v *Value) bool { func rewriteValueARM64_OpLsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x16 x y) // cond: @@ -11789,9 +11719,7 @@ func rewriteValueARM64_OpLsh64x16(v *Value) bool { func rewriteValueARM64_OpLsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x32 x y) // cond: @@ -11881,9 +11809,7 @@ func rewriteValueARM64_OpLsh64x64(v *Value) bool { func rewriteValueARM64_OpLsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x8 x y) // cond: @@ -11914,9 +11840,7 @@ func rewriteValueARM64_OpLsh64x8(v *Value) bool { func rewriteValueARM64_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -11947,9 +11871,7 @@ func rewriteValueARM64_OpLsh8x16(v *Value) bool { func rewriteValueARM64_OpLsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x32 x y) // cond: @@ -12039,9 +11961,7 @@ func rewriteValueARM64_OpLsh8x64(v *Value) bool { func rewriteValueARM64_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -12072,9 +11992,7 @@ func rewriteValueARM64_OpLsh8x8(v *Value) bool { func rewriteValueARM64_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -12095,9 +12013,7 @@ func rewriteValueARM64_OpMod16(v *Value) bool { func rewriteValueARM64_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -12170,9 +12086,7 @@ func rewriteValueARM64_OpMod64u(v *Value) bool { func rewriteValueARM64_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -12193,9 +12107,7 @@ func rewriteValueARM64_OpMod8(v *Value) bool { func rewriteValueARM64_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -12218,7 +12130,7 @@ func rewriteValueARM64_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -12740,9 +12652,7 @@ func rewriteValueARM64_OpNeg8(v *Value) bool { func rewriteValueARM64_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: @@ -12833,9 +12743,7 @@ func rewriteValueARM64_OpNeq64F(v *Value) bool { func rewriteValueARM64_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: @@ -12901,9 +12809,7 @@ func rewriteValueARM64_OpNilCheck(v *Value) bool { func rewriteValueARM64_OpNot(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Not x) // cond: @@ -13037,9 +12943,7 @@ func rewriteValueARM64_OpRound64F(v *Value) bool { func rewriteValueARM64_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -13072,9 +12976,7 @@ func rewriteValueARM64_OpRsh16Ux16(v *Value) bool { func rewriteValueARM64_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x y) // cond: @@ -13107,9 +13009,7 @@ func rewriteValueARM64_OpRsh16Ux32(v *Value) bool { func rewriteValueARM64_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x (MOVDconst [c])) // cond: uint64(c) < 16 @@ -13174,9 +13074,7 @@ func rewriteValueARM64_OpRsh16Ux64(v *Value) bool { func rewriteValueARM64_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -13209,9 +13107,7 @@ func rewriteValueARM64_OpRsh16Ux8(v *Value) bool { func rewriteValueARM64_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -13243,9 +13139,7 @@ func rewriteValueARM64_OpRsh16x16(v *Value) bool { func rewriteValueARM64_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x y) // cond: @@ -13277,9 +13171,7 @@ func rewriteValueARM64_OpRsh16x32(v *Value) bool { func rewriteValueARM64_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x (MOVDconst [c])) // cond: uint64(c) < 16 @@ -13347,9 +13239,7 @@ func rewriteValueARM64_OpRsh16x64(v *Value) bool { func rewriteValueARM64_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -13381,9 +13271,7 @@ func rewriteValueARM64_OpRsh16x8(v *Value) bool { func rewriteValueARM64_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -13416,9 +13304,7 @@ func rewriteValueARM64_OpRsh32Ux16(v *Value) bool { func rewriteValueARM64_OpRsh32Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux32 x y) // cond: @@ -13451,9 +13337,7 @@ func rewriteValueARM64_OpRsh32Ux32(v *Value) bool { func rewriteValueARM64_OpRsh32Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux64 x (MOVDconst [c])) // cond: uint64(c) < 32 @@ -13518,9 +13402,7 @@ func rewriteValueARM64_OpRsh32Ux64(v *Value) bool { func rewriteValueARM64_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -13553,9 +13435,7 @@ func rewriteValueARM64_OpRsh32Ux8(v *Value) bool { func rewriteValueARM64_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -13587,9 +13467,7 @@ func rewriteValueARM64_OpRsh32x16(v *Value) bool { func rewriteValueARM64_OpRsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x32 x y) // cond: @@ -13621,9 +13499,7 @@ func rewriteValueARM64_OpRsh32x32(v *Value) bool { func rewriteValueARM64_OpRsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x64 x (MOVDconst [c])) // cond: uint64(c) < 32 @@ -13691,9 +13567,7 @@ func rewriteValueARM64_OpRsh32x64(v *Value) bool { func rewriteValueARM64_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -13725,9 +13599,7 @@ func rewriteValueARM64_OpRsh32x8(v *Value) bool { func rewriteValueARM64_OpRsh64Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux16 x y) // cond: @@ -13758,9 +13630,7 @@ func rewriteValueARM64_OpRsh64Ux16(v *Value) bool { func rewriteValueARM64_OpRsh64Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux32 x y) // cond: @@ -13850,9 +13720,7 @@ func rewriteValueARM64_OpRsh64Ux64(v *Value) bool { func rewriteValueARM64_OpRsh64Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux8 x y) // cond: @@ -13883,9 +13751,7 @@ func rewriteValueARM64_OpRsh64Ux8(v *Value) bool { func rewriteValueARM64_OpRsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x16 x y) // cond: @@ -13915,9 +13781,7 @@ func rewriteValueARM64_OpRsh64x16(v *Value) bool { func rewriteValueARM64_OpRsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x32 x y) // cond: @@ -14007,9 +13871,7 @@ func rewriteValueARM64_OpRsh64x64(v *Value) bool { func rewriteValueARM64_OpRsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x8 x y) // cond: @@ -14039,9 +13901,7 @@ func rewriteValueARM64_OpRsh64x8(v *Value) bool { func rewriteValueARM64_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -14074,9 +13934,7 @@ func rewriteValueARM64_OpRsh8Ux16(v *Value) bool { func rewriteValueARM64_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x y) // cond: @@ -14109,9 +13967,7 @@ func rewriteValueARM64_OpRsh8Ux32(v *Value) bool { func rewriteValueARM64_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x (MOVDconst [c])) // cond: uint64(c) < 8 @@ -14176,9 +14032,7 @@ func rewriteValueARM64_OpRsh8Ux64(v *Value) bool { func rewriteValueARM64_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -14211,9 +14065,7 @@ func rewriteValueARM64_OpRsh8Ux8(v *Value) bool { func rewriteValueARM64_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -14245,9 +14097,7 @@ func rewriteValueARM64_OpRsh8x16(v *Value) bool { func rewriteValueARM64_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x y) // cond: @@ -14279,9 +14129,7 @@ func rewriteValueARM64_OpRsh8x32(v *Value) bool { func rewriteValueARM64_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x (MOVDconst [c])) // cond: uint64(c) < 8 @@ -14349,9 +14197,7 @@ func rewriteValueARM64_OpRsh8x64(v *Value) bool { func rewriteValueARM64_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -14814,7 +14660,7 @@ func rewriteValueARM64_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -15208,7 +15054,7 @@ func rewriteValueARM64_OpZeroExt8to64(v *Value) bool { func rewriteBlockARM64(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockARM64EQ: diff --git a/src/cmd/compile/internal/ssa/rewriteMIPS.go b/src/cmd/compile/internal/ssa/rewriteMIPS.go index af3496a91c..5e95c57678 100644 --- a/src/cmd/compile/internal/ssa/rewriteMIPS.go +++ b/src/cmd/compile/internal/ssa/rewriteMIPS.go @@ -684,7 +684,7 @@ func rewriteValueMIPS_OpAtomicAnd8(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicAnd8 ptr val mem) // cond: !config.BigEndian @@ -858,7 +858,7 @@ func rewriteValueMIPS_OpAtomicOr8(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicOr8 ptr val mem) // cond: !config.BigEndian @@ -984,9 +984,7 @@ func rewriteValueMIPS_OpAvg32u(v *Value) bool { func rewriteValueMIPS_OpBitLen32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen32 x) // cond: @@ -1149,9 +1147,7 @@ func rewriteValueMIPS_OpConvert(v *Value) bool { func rewriteValueMIPS_OpCtz32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz32 x) // cond: @@ -1246,9 +1242,7 @@ func rewriteValueMIPS_OpCvt64Fto32F(v *Value) bool { func rewriteValueMIPS_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -1271,9 +1265,7 @@ func rewriteValueMIPS_OpDiv16(v *Value) bool { func rewriteValueMIPS_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -1296,9 +1288,7 @@ func rewriteValueMIPS_OpDiv16u(v *Value) bool { func rewriteValueMIPS_OpDiv32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 x y) // cond: @@ -1330,9 +1320,7 @@ func rewriteValueMIPS_OpDiv32F(v *Value) bool { func rewriteValueMIPS_OpDiv32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u x y) // cond: @@ -1364,9 +1352,7 @@ func rewriteValueMIPS_OpDiv64F(v *Value) bool { func rewriteValueMIPS_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -1389,9 +1375,7 @@ func rewriteValueMIPS_OpDiv8(v *Value) bool { func rewriteValueMIPS_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -1414,9 +1398,7 @@ func rewriteValueMIPS_OpDiv8u(v *Value) bool { func rewriteValueMIPS_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: @@ -1440,9 +1422,7 @@ func rewriteValueMIPS_OpEq16(v *Value) bool { func rewriteValueMIPS_OpEq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq32 x y) // cond: @@ -1496,9 +1476,7 @@ func rewriteValueMIPS_OpEq64F(v *Value) bool { func rewriteValueMIPS_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: @@ -1522,9 +1500,7 @@ func rewriteValueMIPS_OpEq8(v *Value) bool { func rewriteValueMIPS_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -1544,9 +1520,7 @@ func rewriteValueMIPS_OpEqB(v *Value) bool { func rewriteValueMIPS_OpEqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqPtr x y) // cond: @@ -1566,9 +1540,7 @@ func rewriteValueMIPS_OpEqPtr(v *Value) bool { func rewriteValueMIPS_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -1592,9 +1564,7 @@ func rewriteValueMIPS_OpGeq16(v *Value) bool { func rewriteValueMIPS_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -1618,9 +1588,7 @@ func rewriteValueMIPS_OpGeq16U(v *Value) bool { func rewriteValueMIPS_OpGeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32 x y) // cond: @@ -1657,9 +1625,7 @@ func rewriteValueMIPS_OpGeq32F(v *Value) bool { func rewriteValueMIPS_OpGeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32U x y) // cond: @@ -1696,9 +1662,7 @@ func rewriteValueMIPS_OpGeq64F(v *Value) bool { func rewriteValueMIPS_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -1722,9 +1686,7 @@ func rewriteValueMIPS_OpGeq8(v *Value) bool { func rewriteValueMIPS_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -1757,9 +1719,7 @@ func rewriteValueMIPS_OpGetClosurePtr(v *Value) bool { func rewriteValueMIPS_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -1780,9 +1740,7 @@ func rewriteValueMIPS_OpGreater16(v *Value) bool { func rewriteValueMIPS_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -1863,9 +1821,7 @@ func rewriteValueMIPS_OpGreater64F(v *Value) bool { func rewriteValueMIPS_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -1886,9 +1842,7 @@ func rewriteValueMIPS_OpGreater8(v *Value) bool { func rewriteValueMIPS_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -1909,9 +1863,7 @@ func rewriteValueMIPS_OpGreater8U(v *Value) bool { func rewriteValueMIPS_OpHmul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32 x y) // cond: @@ -1930,9 +1882,7 @@ func rewriteValueMIPS_OpHmul32(v *Value) bool { func rewriteValueMIPS_OpHmul32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32u x y) // cond: @@ -1979,9 +1929,7 @@ func rewriteValueMIPS_OpIsInBounds(v *Value) bool { func rewriteValueMIPS_OpIsNonNil(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsNonNil ptr) // cond: @@ -1999,9 +1947,7 @@ func rewriteValueMIPS_OpIsNonNil(v *Value) bool { func rewriteValueMIPS_OpIsSliceInBounds(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsSliceInBounds idx len) // cond: @@ -2021,9 +1967,7 @@ func rewriteValueMIPS_OpIsSliceInBounds(v *Value) bool { func rewriteValueMIPS_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -2047,9 +1991,7 @@ func rewriteValueMIPS_OpLeq16(v *Value) bool { func rewriteValueMIPS_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -2073,9 +2015,7 @@ func rewriteValueMIPS_OpLeq16U(v *Value) bool { func rewriteValueMIPS_OpLeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32 x y) // cond: @@ -2112,9 +2052,7 @@ func rewriteValueMIPS_OpLeq32F(v *Value) bool { func rewriteValueMIPS_OpLeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32U x y) // cond: @@ -2151,9 +2089,7 @@ func rewriteValueMIPS_OpLeq64F(v *Value) bool { func rewriteValueMIPS_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -2177,9 +2113,7 @@ func rewriteValueMIPS_OpLeq8(v *Value) bool { func rewriteValueMIPS_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -2203,9 +2137,7 @@ func rewriteValueMIPS_OpLeq8U(v *Value) bool { func rewriteValueMIPS_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -2226,9 +2158,7 @@ func rewriteValueMIPS_OpLess16(v *Value) bool { func rewriteValueMIPS_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -2309,9 +2239,7 @@ func rewriteValueMIPS_OpLess64F(v *Value) bool { func rewriteValueMIPS_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -2332,9 +2260,7 @@ func rewriteValueMIPS_OpLess8(v *Value) bool { func rewriteValueMIPS_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -2478,9 +2404,7 @@ func rewriteValueMIPS_OpLoad(v *Value) bool { func rewriteValueMIPS_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -2511,9 +2435,7 @@ func rewriteValueMIPS_OpLsh16x16(v *Value) bool { func rewriteValueMIPS_OpLsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x32 x y) // cond: @@ -2577,9 +2499,7 @@ func rewriteValueMIPS_OpLsh16x64(v *Value) bool { func rewriteValueMIPS_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -2610,9 +2530,7 @@ func rewriteValueMIPS_OpLsh16x8(v *Value) bool { func rewriteValueMIPS_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -2643,9 +2561,7 @@ func rewriteValueMIPS_OpLsh32x16(v *Value) bool { func rewriteValueMIPS_OpLsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x32 x y) // cond: @@ -2709,9 +2625,7 @@ func rewriteValueMIPS_OpLsh32x64(v *Value) bool { func rewriteValueMIPS_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -2742,9 +2656,7 @@ func rewriteValueMIPS_OpLsh32x8(v *Value) bool { func rewriteValueMIPS_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -2775,9 +2687,7 @@ func rewriteValueMIPS_OpLsh8x16(v *Value) bool { func rewriteValueMIPS_OpLsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x32 x y) // cond: @@ -2841,9 +2751,7 @@ func rewriteValueMIPS_OpLsh8x64(v *Value) bool { func rewriteValueMIPS_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -5998,9 +5906,7 @@ func rewriteValueMIPS_OpMIPSXORconst(v *Value) bool { func rewriteValueMIPS_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -6023,9 +5929,7 @@ func rewriteValueMIPS_OpMod16(v *Value) bool { func rewriteValueMIPS_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -6048,9 +5952,7 @@ func rewriteValueMIPS_OpMod16u(v *Value) bool { func rewriteValueMIPS_OpMod32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -6069,9 +5971,7 @@ func rewriteValueMIPS_OpMod32(v *Value) bool { func rewriteValueMIPS_OpMod32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -6090,9 +5990,7 @@ func rewriteValueMIPS_OpMod32u(v *Value) bool { func rewriteValueMIPS_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -6115,9 +6013,7 @@ func rewriteValueMIPS_OpMod8(v *Value) bool { func rewriteValueMIPS_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -6142,7 +6038,7 @@ func rewriteValueMIPS_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -6740,9 +6636,7 @@ func rewriteValueMIPS_OpNeg8(v *Value) bool { func rewriteValueMIPS_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: @@ -6768,9 +6662,7 @@ func rewriteValueMIPS_OpNeq16(v *Value) bool { func rewriteValueMIPS_OpNeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq32 x y) // cond: @@ -6826,9 +6718,7 @@ func rewriteValueMIPS_OpNeq64F(v *Value) bool { func rewriteValueMIPS_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: @@ -6867,9 +6757,7 @@ func rewriteValueMIPS_OpNeqB(v *Value) bool { func rewriteValueMIPS_OpNeqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqPtr x y) // cond: @@ -7019,9 +6907,7 @@ func rewriteValueMIPS_OpRound64F(v *Value) bool { func rewriteValueMIPS_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -7054,9 +6940,7 @@ func rewriteValueMIPS_OpRsh16Ux16(v *Value) bool { func rewriteValueMIPS_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x y) // cond: @@ -7085,9 +6969,7 @@ func rewriteValueMIPS_OpRsh16Ux32(v *Value) bool { func rewriteValueMIPS_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x (Const64 [c])) // cond: uint32(c) < 16 @@ -7131,9 +7013,7 @@ func rewriteValueMIPS_OpRsh16Ux64(v *Value) bool { func rewriteValueMIPS_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -7166,9 +7046,7 @@ func rewriteValueMIPS_OpRsh16Ux8(v *Value) bool { func rewriteValueMIPS_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -7200,9 +7078,7 @@ func rewriteValueMIPS_OpRsh16x16(v *Value) bool { func rewriteValueMIPS_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x y) // cond: @@ -7230,9 +7106,7 @@ func rewriteValueMIPS_OpRsh16x32(v *Value) bool { func rewriteValueMIPS_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x (Const64 [c])) // cond: uint32(c) < 16 @@ -7281,9 +7155,7 @@ func rewriteValueMIPS_OpRsh16x64(v *Value) bool { func rewriteValueMIPS_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -7315,9 +7187,7 @@ func rewriteValueMIPS_OpRsh16x8(v *Value) bool { func rewriteValueMIPS_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -7348,9 +7218,7 @@ func rewriteValueMIPS_OpRsh32Ux16(v *Value) bool { func rewriteValueMIPS_OpRsh32Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux32 x y) // cond: @@ -7414,9 +7282,7 @@ func rewriteValueMIPS_OpRsh32Ux64(v *Value) bool { func rewriteValueMIPS_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -7447,9 +7313,7 @@ func rewriteValueMIPS_OpRsh32Ux8(v *Value) bool { func rewriteValueMIPS_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -7479,9 +7343,7 @@ func rewriteValueMIPS_OpRsh32x16(v *Value) bool { func rewriteValueMIPS_OpRsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x32 x y) // cond: @@ -7546,9 +7408,7 @@ func rewriteValueMIPS_OpRsh32x64(v *Value) bool { func rewriteValueMIPS_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -7578,9 +7438,7 @@ func rewriteValueMIPS_OpRsh32x8(v *Value) bool { func rewriteValueMIPS_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -7613,9 +7471,7 @@ func rewriteValueMIPS_OpRsh8Ux16(v *Value) bool { func rewriteValueMIPS_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x y) // cond: @@ -7644,9 +7500,7 @@ func rewriteValueMIPS_OpRsh8Ux32(v *Value) bool { func rewriteValueMIPS_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x (Const64 [c])) // cond: uint32(c) < 8 @@ -7690,9 +7544,7 @@ func rewriteValueMIPS_OpRsh8Ux64(v *Value) bool { func rewriteValueMIPS_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -7725,9 +7577,7 @@ func rewriteValueMIPS_OpRsh8Ux8(v *Value) bool { func rewriteValueMIPS_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -7759,9 +7609,7 @@ func rewriteValueMIPS_OpRsh8x16(v *Value) bool { func rewriteValueMIPS_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x y) // cond: @@ -7789,9 +7637,7 @@ func rewriteValueMIPS_OpRsh8x32(v *Value) bool { func rewriteValueMIPS_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x (Const64 [c])) // cond: uint32(c) < 8 @@ -7840,9 +7686,7 @@ func rewriteValueMIPS_OpRsh8x64(v *Value) bool { func rewriteValueMIPS_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -7874,9 +7718,7 @@ func rewriteValueMIPS_OpRsh8x8(v *Value) bool { func rewriteValueMIPS_OpSelect0(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Select0 (Add32carry x y)) // cond: @@ -8096,9 +7938,7 @@ func rewriteValueMIPS_OpSelect0(v *Value) bool { func rewriteValueMIPS_OpSelect1(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Select1 (Add32carry x y)) // cond: @@ -8671,7 +8511,7 @@ func rewriteValueMIPS_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -9073,9 +8913,7 @@ func rewriteValueMIPS_OpZeroExt8to32(v *Value) bool { func rewriteValueMIPS_OpZeromask(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zeromask x) // cond: @@ -9095,7 +8933,7 @@ func rewriteValueMIPS_OpZeromask(v *Value) bool { func rewriteBlockMIPS(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockMIPSEQ: diff --git a/src/cmd/compile/internal/ssa/rewriteMIPS64.go b/src/cmd/compile/internal/ssa/rewriteMIPS64.go index ab1c6ab592..862b1b5d26 100644 --- a/src/cmd/compile/internal/ssa/rewriteMIPS64.go +++ b/src/cmd/compile/internal/ssa/rewriteMIPS64.go @@ -776,9 +776,7 @@ func rewriteValueMIPS64_OpClosureCall(v *Value) bool { func rewriteValueMIPS64_OpCom16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Com16 x) // cond: @@ -796,9 +794,7 @@ func rewriteValueMIPS64_OpCom16(v *Value) bool { func rewriteValueMIPS64_OpCom32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Com32 x) // cond: @@ -816,9 +812,7 @@ func rewriteValueMIPS64_OpCom32(v *Value) bool { func rewriteValueMIPS64_OpCom64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Com64 x) // cond: @@ -836,9 +830,7 @@ func rewriteValueMIPS64_OpCom64(v *Value) bool { func rewriteValueMIPS64_OpCom8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Com8 x) // cond: @@ -1066,9 +1058,7 @@ func rewriteValueMIPS64_OpCvt64to64F(v *Value) bool { func rewriteValueMIPS64_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -1091,9 +1081,7 @@ func rewriteValueMIPS64_OpDiv16(v *Value) bool { func rewriteValueMIPS64_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -1116,9 +1104,7 @@ func rewriteValueMIPS64_OpDiv16u(v *Value) bool { func rewriteValueMIPS64_OpDiv32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 x y) // cond: @@ -1154,9 +1140,7 @@ func rewriteValueMIPS64_OpDiv32F(v *Value) bool { func rewriteValueMIPS64_OpDiv32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u x y) // cond: @@ -1179,9 +1163,7 @@ func rewriteValueMIPS64_OpDiv32u(v *Value) bool { func rewriteValueMIPS64_OpDiv64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64 x y) // cond: @@ -1213,9 +1195,7 @@ func rewriteValueMIPS64_OpDiv64F(v *Value) bool { func rewriteValueMIPS64_OpDiv64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64u x y) // cond: @@ -1234,9 +1214,7 @@ func rewriteValueMIPS64_OpDiv64u(v *Value) bool { func rewriteValueMIPS64_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -1259,9 +1237,7 @@ func rewriteValueMIPS64_OpDiv8(v *Value) bool { func rewriteValueMIPS64_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -1284,9 +1260,7 @@ func rewriteValueMIPS64_OpDiv8u(v *Value) bool { func rewriteValueMIPS64_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: @@ -1312,9 +1286,7 @@ func rewriteValueMIPS64_OpEq16(v *Value) bool { func rewriteValueMIPS64_OpEq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq32 x y) // cond: @@ -1357,9 +1329,7 @@ func rewriteValueMIPS64_OpEq32F(v *Value) bool { func rewriteValueMIPS64_OpEq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq64 x y) // cond: @@ -1398,9 +1368,7 @@ func rewriteValueMIPS64_OpEq64F(v *Value) bool { func rewriteValueMIPS64_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: @@ -1426,9 +1394,7 @@ func rewriteValueMIPS64_OpEq8(v *Value) bool { func rewriteValueMIPS64_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -1450,9 +1416,7 @@ func rewriteValueMIPS64_OpEqB(v *Value) bool { func rewriteValueMIPS64_OpEqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqPtr x y) // cond: @@ -1474,9 +1438,7 @@ func rewriteValueMIPS64_OpEqPtr(v *Value) bool { func rewriteValueMIPS64_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -1502,9 +1464,7 @@ func rewriteValueMIPS64_OpGeq16(v *Value) bool { func rewriteValueMIPS64_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -1530,9 +1490,7 @@ func rewriteValueMIPS64_OpGeq16U(v *Value) bool { func rewriteValueMIPS64_OpGeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32 x y) // cond: @@ -1575,9 +1533,7 @@ func rewriteValueMIPS64_OpGeq32F(v *Value) bool { func rewriteValueMIPS64_OpGeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32U x y) // cond: @@ -1603,9 +1559,7 @@ func rewriteValueMIPS64_OpGeq32U(v *Value) bool { func rewriteValueMIPS64_OpGeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64 x y) // cond: @@ -1644,9 +1598,7 @@ func rewriteValueMIPS64_OpGeq64F(v *Value) bool { func rewriteValueMIPS64_OpGeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64U x y) // cond: @@ -1668,9 +1620,7 @@ func rewriteValueMIPS64_OpGeq64U(v *Value) bool { func rewriteValueMIPS64_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -1696,9 +1646,7 @@ func rewriteValueMIPS64_OpGeq8(v *Value) bool { func rewriteValueMIPS64_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -1733,9 +1681,7 @@ func rewriteValueMIPS64_OpGetClosurePtr(v *Value) bool { func rewriteValueMIPS64_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -1756,9 +1702,7 @@ func rewriteValueMIPS64_OpGreater16(v *Value) bool { func rewriteValueMIPS64_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -1779,9 +1723,7 @@ func rewriteValueMIPS64_OpGreater16U(v *Value) bool { func rewriteValueMIPS64_OpGreater32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater32 x y) // cond: @@ -1819,9 +1761,7 @@ func rewriteValueMIPS64_OpGreater32F(v *Value) bool { func rewriteValueMIPS64_OpGreater32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater32U x y) // cond: @@ -1885,9 +1825,7 @@ func rewriteValueMIPS64_OpGreater64U(v *Value) bool { func rewriteValueMIPS64_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -1908,9 +1846,7 @@ func rewriteValueMIPS64_OpGreater8(v *Value) bool { func rewriteValueMIPS64_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -1931,9 +1867,7 @@ func rewriteValueMIPS64_OpGreater8U(v *Value) bool { func rewriteValueMIPS64_OpHmul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32 x y) // cond: @@ -1959,9 +1893,7 @@ func rewriteValueMIPS64_OpHmul32(v *Value) bool { func rewriteValueMIPS64_OpHmul32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32u x y) // cond: @@ -1987,9 +1919,7 @@ func rewriteValueMIPS64_OpHmul32u(v *Value) bool { func rewriteValueMIPS64_OpHmul64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul64 x y) // cond: @@ -2008,9 +1938,7 @@ func rewriteValueMIPS64_OpHmul64(v *Value) bool { func rewriteValueMIPS64_OpHmul64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul64u x y) // cond: @@ -2057,9 +1985,7 @@ func rewriteValueMIPS64_OpIsInBounds(v *Value) bool { func rewriteValueMIPS64_OpIsNonNil(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsNonNil ptr) // cond: @@ -2077,9 +2003,7 @@ func rewriteValueMIPS64_OpIsNonNil(v *Value) bool { func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsSliceInBounds idx len) // cond: @@ -2101,9 +2025,7 @@ func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool { func rewriteValueMIPS64_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -2129,9 +2051,7 @@ func rewriteValueMIPS64_OpLeq16(v *Value) bool { func rewriteValueMIPS64_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -2157,9 +2077,7 @@ func rewriteValueMIPS64_OpLeq16U(v *Value) bool { func rewriteValueMIPS64_OpLeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32 x y) // cond: @@ -2202,9 +2120,7 @@ func rewriteValueMIPS64_OpLeq32F(v *Value) bool { func rewriteValueMIPS64_OpLeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32U x y) // cond: @@ -2230,9 +2146,7 @@ func rewriteValueMIPS64_OpLeq32U(v *Value) bool { func rewriteValueMIPS64_OpLeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64 x y) // cond: @@ -2271,9 +2185,7 @@ func rewriteValueMIPS64_OpLeq64F(v *Value) bool { func rewriteValueMIPS64_OpLeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64U x y) // cond: @@ -2295,9 +2207,7 @@ func rewriteValueMIPS64_OpLeq64U(v *Value) bool { func rewriteValueMIPS64_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -2323,9 +2233,7 @@ func rewriteValueMIPS64_OpLeq8(v *Value) bool { func rewriteValueMIPS64_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -2351,9 +2259,7 @@ func rewriteValueMIPS64_OpLeq8U(v *Value) bool { func rewriteValueMIPS64_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -2374,9 +2280,7 @@ func rewriteValueMIPS64_OpLess16(v *Value) bool { func rewriteValueMIPS64_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -2397,9 +2301,7 @@ func rewriteValueMIPS64_OpLess16U(v *Value) bool { func rewriteValueMIPS64_OpLess32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less32 x y) // cond: @@ -2437,9 +2339,7 @@ func rewriteValueMIPS64_OpLess32F(v *Value) bool { func rewriteValueMIPS64_OpLess32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less32U x y) // cond: @@ -2503,9 +2403,7 @@ func rewriteValueMIPS64_OpLess64U(v *Value) bool { func rewriteValueMIPS64_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -2526,9 +2424,7 @@ func rewriteValueMIPS64_OpLess8(v *Value) bool { func rewriteValueMIPS64_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -2702,9 +2598,7 @@ func rewriteValueMIPS64_OpLoad(v *Value) bool { func rewriteValueMIPS64_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -2736,9 +2630,7 @@ func rewriteValueMIPS64_OpLsh16x16(v *Value) bool { func rewriteValueMIPS64_OpLsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x32 x y) // cond: @@ -2770,9 +2662,7 @@ func rewriteValueMIPS64_OpLsh16x32(v *Value) bool { func rewriteValueMIPS64_OpLsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x64 x y) // cond: @@ -2800,9 +2690,7 @@ func rewriteValueMIPS64_OpLsh16x64(v *Value) bool { func rewriteValueMIPS64_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -2834,9 +2722,7 @@ func rewriteValueMIPS64_OpLsh16x8(v *Value) bool { func rewriteValueMIPS64_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -2868,9 +2754,7 @@ func rewriteValueMIPS64_OpLsh32x16(v *Value) bool { func rewriteValueMIPS64_OpLsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x32 x y) // cond: @@ -2902,9 +2786,7 @@ func rewriteValueMIPS64_OpLsh32x32(v *Value) bool { func rewriteValueMIPS64_OpLsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x64 x y) // cond: @@ -2932,9 +2814,7 @@ func rewriteValueMIPS64_OpLsh32x64(v *Value) bool { func rewriteValueMIPS64_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -2966,9 +2846,7 @@ func rewriteValueMIPS64_OpLsh32x8(v *Value) bool { func rewriteValueMIPS64_OpLsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x16 x y) // cond: @@ -3000,9 +2878,7 @@ func rewriteValueMIPS64_OpLsh64x16(v *Value) bool { func rewriteValueMIPS64_OpLsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x32 x y) // cond: @@ -3034,9 +2910,7 @@ func rewriteValueMIPS64_OpLsh64x32(v *Value) bool { func rewriteValueMIPS64_OpLsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x64 x y) // cond: @@ -3064,9 +2938,7 @@ func rewriteValueMIPS64_OpLsh64x64(v *Value) bool { func rewriteValueMIPS64_OpLsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x8 x y) // cond: @@ -3098,9 +2970,7 @@ func rewriteValueMIPS64_OpLsh64x8(v *Value) bool { func rewriteValueMIPS64_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -3132,9 +3002,7 @@ func rewriteValueMIPS64_OpLsh8x16(v *Value) bool { func rewriteValueMIPS64_OpLsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x32 x y) // cond: @@ -3166,9 +3034,7 @@ func rewriteValueMIPS64_OpLsh8x32(v *Value) bool { func rewriteValueMIPS64_OpLsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x64 x y) // cond: @@ -3196,9 +3062,7 @@ func rewriteValueMIPS64_OpLsh8x64(v *Value) bool { func rewriteValueMIPS64_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -6221,9 +6085,7 @@ func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool { func rewriteValueMIPS64_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -6246,9 +6108,7 @@ func rewriteValueMIPS64_OpMod16(v *Value) bool { func rewriteValueMIPS64_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -6271,9 +6131,7 @@ func rewriteValueMIPS64_OpMod16u(v *Value) bool { func rewriteValueMIPS64_OpMod32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -6296,9 +6154,7 @@ func rewriteValueMIPS64_OpMod32(v *Value) bool { func rewriteValueMIPS64_OpMod32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -6321,9 +6177,7 @@ func rewriteValueMIPS64_OpMod32u(v *Value) bool { func rewriteValueMIPS64_OpMod64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64 x y) // cond: @@ -6342,9 +6196,7 @@ func rewriteValueMIPS64_OpMod64(v *Value) bool { func rewriteValueMIPS64_OpMod64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64u x y) // cond: @@ -6363,9 +6215,7 @@ func rewriteValueMIPS64_OpMod64u(v *Value) bool { func rewriteValueMIPS64_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -6388,9 +6238,7 @@ func rewriteValueMIPS64_OpMod8(v *Value) bool { func rewriteValueMIPS64_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -6415,7 +6263,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -6926,9 +6774,7 @@ func rewriteValueMIPS64_OpMove(v *Value) bool { func rewriteValueMIPS64_OpMul16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul16 x y) // cond: @@ -6947,9 +6793,7 @@ func rewriteValueMIPS64_OpMul16(v *Value) bool { func rewriteValueMIPS64_OpMul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul32 x y) // cond: @@ -6981,9 +6825,7 @@ func rewriteValueMIPS64_OpMul32F(v *Value) bool { func rewriteValueMIPS64_OpMul64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul64 x y) // cond: @@ -7015,9 +6857,7 @@ func rewriteValueMIPS64_OpMul64F(v *Value) bool { func rewriteValueMIPS64_OpMul8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul8 x y) // cond: @@ -7102,9 +6942,7 @@ func rewriteValueMIPS64_OpNeg8(v *Value) bool { func rewriteValueMIPS64_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: @@ -7130,9 +6968,7 @@ func rewriteValueMIPS64_OpNeq16(v *Value) bool { func rewriteValueMIPS64_OpNeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq32 x y) // cond: @@ -7175,9 +7011,7 @@ func rewriteValueMIPS64_OpNeq32F(v *Value) bool { func rewriteValueMIPS64_OpNeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq64 x y) // cond: @@ -7216,9 +7050,7 @@ func rewriteValueMIPS64_OpNeq64F(v *Value) bool { func rewriteValueMIPS64_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: @@ -7257,9 +7089,7 @@ func rewriteValueMIPS64_OpNeqB(v *Value) bool { func rewriteValueMIPS64_OpNeqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqPtr x y) // cond: @@ -7422,9 +7252,7 @@ func rewriteValueMIPS64_OpRound64F(v *Value) bool { func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -7458,9 +7286,7 @@ func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool { func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x y) // cond: @@ -7494,9 +7320,7 @@ func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool { func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x y) // cond: @@ -7526,9 +7350,7 @@ func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool { func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -7562,9 +7384,7 @@ func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool { func rewriteValueMIPS64_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -7598,9 +7418,7 @@ func rewriteValueMIPS64_OpRsh16x16(v *Value) bool { func rewriteValueMIPS64_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x y) // cond: @@ -7634,9 +7452,7 @@ func rewriteValueMIPS64_OpRsh16x32(v *Value) bool { func rewriteValueMIPS64_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x y) // cond: @@ -7666,9 +7482,7 @@ func rewriteValueMIPS64_OpRsh16x64(v *Value) bool { func rewriteValueMIPS64_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -7702,9 +7516,7 @@ func rewriteValueMIPS64_OpRsh16x8(v *Value) bool { func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -7738,9 +7550,7 @@ func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool { func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux32 x y) // cond: @@ -7774,9 +7584,7 @@ func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool { func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux64 x y) // cond: @@ -7806,9 +7614,7 @@ func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool { func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -7842,9 +7648,7 @@ func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool { func rewriteValueMIPS64_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -7878,9 +7682,7 @@ func rewriteValueMIPS64_OpRsh32x16(v *Value) bool { func rewriteValueMIPS64_OpRsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x32 x y) // cond: @@ -7914,9 +7716,7 @@ func rewriteValueMIPS64_OpRsh32x32(v *Value) bool { func rewriteValueMIPS64_OpRsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x64 x y) // cond: @@ -7946,9 +7746,7 @@ func rewriteValueMIPS64_OpRsh32x64(v *Value) bool { func rewriteValueMIPS64_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -7982,9 +7780,7 @@ func rewriteValueMIPS64_OpRsh32x8(v *Value) bool { func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux16 x y) // cond: @@ -8016,9 +7812,7 @@ func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool { func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux32 x y) // cond: @@ -8050,9 +7844,7 @@ func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool { func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux64 x y) // cond: @@ -8080,9 +7872,7 @@ func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool { func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux8 x y) // cond: @@ -8114,9 +7904,7 @@ func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool { func rewriteValueMIPS64_OpRsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x16 x y) // cond: @@ -8148,9 +7936,7 @@ func rewriteValueMIPS64_OpRsh64x16(v *Value) bool { func rewriteValueMIPS64_OpRsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x32 x y) // cond: @@ -8182,9 +7968,7 @@ func rewriteValueMIPS64_OpRsh64x32(v *Value) bool { func rewriteValueMIPS64_OpRsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x64 x y) // cond: @@ -8212,9 +7996,7 @@ func rewriteValueMIPS64_OpRsh64x64(v *Value) bool { func rewriteValueMIPS64_OpRsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x8 x y) // cond: @@ -8246,9 +8028,7 @@ func rewriteValueMIPS64_OpRsh64x8(v *Value) bool { func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -8282,9 +8062,7 @@ func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool { func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x y) // cond: @@ -8318,9 +8096,7 @@ func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool { func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x y) // cond: @@ -8350,9 +8126,7 @@ func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool { func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -8386,9 +8160,7 @@ func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool { func rewriteValueMIPS64_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -8422,9 +8194,7 @@ func rewriteValueMIPS64_OpRsh8x16(v *Value) bool { func rewriteValueMIPS64_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x y) // cond: @@ -8458,9 +8228,7 @@ func rewriteValueMIPS64_OpRsh8x32(v *Value) bool { func rewriteValueMIPS64_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x y) // cond: @@ -8490,9 +8258,7 @@ func rewriteValueMIPS64_OpRsh8x64(v *Value) bool { func rewriteValueMIPS64_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -9310,7 +9076,7 @@ func rewriteValueMIPS64_OpZero(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Zero [0] _ mem) // cond: @@ -9848,7 +9614,7 @@ func rewriteValueMIPS64_OpZeroExt8to64(v *Value) bool { func rewriteBlockMIPS64(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockMIPS64EQ: diff --git a/src/cmd/compile/internal/ssa/rewritePPC64.go b/src/cmd/compile/internal/ssa/rewritePPC64.go index dbe7553a61..d1123e9a94 100644 --- a/src/cmd/compile/internal/ssa/rewritePPC64.go +++ b/src/cmd/compile/internal/ssa/rewritePPC64.go @@ -1143,9 +1143,7 @@ func rewriteValuePPC64_OpConvert(v *Value) bool { func rewriteValuePPC64_OpCvt32Fto32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt32Fto32 x) // cond: @@ -1162,9 +1160,7 @@ func rewriteValuePPC64_OpCvt32Fto32(v *Value) bool { func rewriteValuePPC64_OpCvt32Fto64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt32Fto64 x) // cond: @@ -1193,9 +1189,7 @@ func rewriteValuePPC64_OpCvt32Fto64F(v *Value) bool { func rewriteValuePPC64_OpCvt32to32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt32to32F x) // cond: @@ -1216,9 +1210,7 @@ func rewriteValuePPC64_OpCvt32to32F(v *Value) bool { func rewriteValuePPC64_OpCvt32to64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt32to64F x) // cond: @@ -1237,9 +1229,7 @@ func rewriteValuePPC64_OpCvt32to64F(v *Value) bool { func rewriteValuePPC64_OpCvt64Fto32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt64Fto32 x) // cond: @@ -1267,9 +1257,7 @@ func rewriteValuePPC64_OpCvt64Fto32F(v *Value) bool { func rewriteValuePPC64_OpCvt64Fto64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt64Fto64 x) // cond: @@ -1286,9 +1274,7 @@ func rewriteValuePPC64_OpCvt64Fto64(v *Value) bool { func rewriteValuePPC64_OpCvt64to32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt64to32F x) // cond: @@ -1307,9 +1293,7 @@ func rewriteValuePPC64_OpCvt64to32F(v *Value) bool { func rewriteValuePPC64_OpCvt64to64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Cvt64to64F x) // cond: @@ -1326,9 +1310,7 @@ func rewriteValuePPC64_OpCvt64to64F(v *Value) bool { func rewriteValuePPC64_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -1349,9 +1331,7 @@ func rewriteValuePPC64_OpDiv16(v *Value) bool { func rewriteValuePPC64_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -1450,9 +1430,7 @@ func rewriteValuePPC64_OpDiv64u(v *Value) bool { func rewriteValuePPC64_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -1473,9 +1451,7 @@ func rewriteValuePPC64_OpDiv8(v *Value) bool { func rewriteValuePPC64_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -1496,9 +1472,7 @@ func rewriteValuePPC64_OpDiv8u(v *Value) bool { func rewriteValuePPC64_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: isSigned(x.Type) && isSigned(y.Type) @@ -1609,9 +1583,7 @@ func rewriteValuePPC64_OpEq64F(v *Value) bool { func rewriteValuePPC64_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: isSigned(x.Type) && isSigned(y.Type) @@ -1654,9 +1626,7 @@ func rewriteValuePPC64_OpEq8(v *Value) bool { func rewriteValuePPC64_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -1693,9 +1663,7 @@ func rewriteValuePPC64_OpEqPtr(v *Value) bool { func rewriteValuePPC64_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -1718,9 +1686,7 @@ func rewriteValuePPC64_OpGeq16(v *Value) bool { func rewriteValuePPC64_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -1845,9 +1811,7 @@ func rewriteValuePPC64_OpGeq64U(v *Value) bool { func rewriteValuePPC64_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -1870,9 +1834,7 @@ func rewriteValuePPC64_OpGeq8(v *Value) bool { func rewriteValuePPC64_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -1904,9 +1866,7 @@ func rewriteValuePPC64_OpGetClosurePtr(v *Value) bool { func rewriteValuePPC64_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -1929,9 +1889,7 @@ func rewriteValuePPC64_OpGreater16(v *Value) bool { func rewriteValuePPC64_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -2056,9 +2014,7 @@ func rewriteValuePPC64_OpGreater64U(v *Value) bool { func rewriteValuePPC64_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -2081,9 +2037,7 @@ func rewriteValuePPC64_OpGreater8(v *Value) bool { func rewriteValuePPC64_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -2223,9 +2177,7 @@ func rewriteValuePPC64_OpIsSliceInBounds(v *Value) bool { func rewriteValuePPC64_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -2248,9 +2200,7 @@ func rewriteValuePPC64_OpLeq16(v *Value) bool { func rewriteValuePPC64_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -2375,9 +2325,7 @@ func rewriteValuePPC64_OpLeq64U(v *Value) bool { func rewriteValuePPC64_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -2400,9 +2348,7 @@ func rewriteValuePPC64_OpLeq8(v *Value) bool { func rewriteValuePPC64_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -2425,9 +2371,7 @@ func rewriteValuePPC64_OpLeq8U(v *Value) bool { func rewriteValuePPC64_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -2450,9 +2394,7 @@ func rewriteValuePPC64_OpLess16(v *Value) bool { func rewriteValuePPC64_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -2577,9 +2519,7 @@ func rewriteValuePPC64_OpLess64U(v *Value) bool { func rewriteValuePPC64_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -2602,9 +2542,7 @@ func rewriteValuePPC64_OpLess8(v *Value) bool { func rewriteValuePPC64_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -2627,9 +2565,7 @@ func rewriteValuePPC64_OpLess8U(v *Value) bool { func rewriteValuePPC64_OpLoad(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Load ptr mem) // cond: (is64BitInt(t) || isPtr(t)) @@ -2788,9 +2724,7 @@ func rewriteValuePPC64_OpLoad(v *Value) bool { func rewriteValuePPC64_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -2817,9 +2751,7 @@ func rewriteValuePPC64_OpLsh16x16(v *Value) bool { func rewriteValuePPC64_OpLsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x32 x (Const64 [c])) // cond: uint32(c) < 16 @@ -2882,9 +2814,7 @@ func rewriteValuePPC64_OpLsh16x32(v *Value) bool { func rewriteValuePPC64_OpLsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x64 x (Const64 [c])) // cond: uint64(c) < 16 @@ -2961,9 +2891,7 @@ func rewriteValuePPC64_OpLsh16x64(v *Value) bool { func rewriteValuePPC64_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -2990,9 +2918,7 @@ func rewriteValuePPC64_OpLsh16x8(v *Value) bool { func rewriteValuePPC64_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -3019,9 +2945,7 @@ func rewriteValuePPC64_OpLsh32x16(v *Value) bool { func rewriteValuePPC64_OpLsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x32 x (Const64 [c])) // cond: uint32(c) < 32 @@ -3084,9 +3008,7 @@ func rewriteValuePPC64_OpLsh32x32(v *Value) bool { func rewriteValuePPC64_OpLsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x64 x (Const64 [c])) // cond: uint64(c) < 32 @@ -3163,9 +3085,7 @@ func rewriteValuePPC64_OpLsh32x64(v *Value) bool { func rewriteValuePPC64_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -3192,9 +3112,7 @@ func rewriteValuePPC64_OpLsh32x8(v *Value) bool { func rewriteValuePPC64_OpLsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x16 x y) // cond: @@ -3221,9 +3139,7 @@ func rewriteValuePPC64_OpLsh64x16(v *Value) bool { func rewriteValuePPC64_OpLsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x32 x (Const64 [c])) // cond: uint32(c) < 64 @@ -3286,9 +3202,7 @@ func rewriteValuePPC64_OpLsh64x32(v *Value) bool { func rewriteValuePPC64_OpLsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x64 x (Const64 [c])) // cond: uint64(c) < 64 @@ -3365,9 +3279,7 @@ func rewriteValuePPC64_OpLsh64x64(v *Value) bool { func rewriteValuePPC64_OpLsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x8 x y) // cond: @@ -3394,9 +3306,7 @@ func rewriteValuePPC64_OpLsh64x8(v *Value) bool { func rewriteValuePPC64_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -3423,9 +3333,7 @@ func rewriteValuePPC64_OpLsh8x16(v *Value) bool { func rewriteValuePPC64_OpLsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x32 x (Const64 [c])) // cond: uint32(c) < 8 @@ -3488,9 +3396,7 @@ func rewriteValuePPC64_OpLsh8x32(v *Value) bool { func rewriteValuePPC64_OpLsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x64 x (Const64 [c])) // cond: uint64(c) < 8 @@ -3567,9 +3473,7 @@ func rewriteValuePPC64_OpLsh8x64(v *Value) bool { func rewriteValuePPC64_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -3596,9 +3500,7 @@ func rewriteValuePPC64_OpLsh8x8(v *Value) bool { func rewriteValuePPC64_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -3619,9 +3521,7 @@ func rewriteValuePPC64_OpMod16(v *Value) bool { func rewriteValuePPC64_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -3642,9 +3542,7 @@ func rewriteValuePPC64_OpMod16u(v *Value) bool { func rewriteValuePPC64_OpMod32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -3667,9 +3565,7 @@ func rewriteValuePPC64_OpMod32(v *Value) bool { func rewriteValuePPC64_OpMod32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -3692,9 +3588,7 @@ func rewriteValuePPC64_OpMod32u(v *Value) bool { func rewriteValuePPC64_OpMod64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64 x y) // cond: @@ -3717,9 +3611,7 @@ func rewriteValuePPC64_OpMod64(v *Value) bool { func rewriteValuePPC64_OpMod64u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod64u x y) // cond: @@ -3742,9 +3634,7 @@ func rewriteValuePPC64_OpMod64u(v *Value) bool { func rewriteValuePPC64_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -3765,9 +3655,7 @@ func rewriteValuePPC64_OpMod8(v *Value) bool { func rewriteValuePPC64_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -3790,7 +3678,7 @@ func rewriteValuePPC64_OpMove(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -4290,9 +4178,7 @@ func rewriteValuePPC64_OpNeg8(v *Value) bool { func rewriteValuePPC64_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: isSigned(x.Type) && isSigned(y.Type) @@ -4403,9 +4289,7 @@ func rewriteValuePPC64_OpNeq64F(v *Value) bool { func rewriteValuePPC64_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: isSigned(x.Type) && isSigned(y.Type) @@ -4503,9 +4387,7 @@ func rewriteValuePPC64_OpNot(v *Value) bool { func rewriteValuePPC64_OpOffPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (OffPtr [off] ptr) // cond: @@ -7586,9 +7468,7 @@ func rewriteValuePPC64_OpRound64F(v *Value) bool { func rewriteValuePPC64_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -7617,9 +7497,7 @@ func rewriteValuePPC64_OpRsh16Ux16(v *Value) bool { func rewriteValuePPC64_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x (Const64 [c])) // cond: uint32(c) < 16 @@ -7688,9 +7566,7 @@ func rewriteValuePPC64_OpRsh16Ux32(v *Value) bool { func rewriteValuePPC64_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x (Const64 [c])) // cond: uint64(c) < 16 @@ -7773,9 +7649,7 @@ func rewriteValuePPC64_OpRsh16Ux64(v *Value) bool { func rewriteValuePPC64_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -7804,9 +7678,7 @@ func rewriteValuePPC64_OpRsh16Ux8(v *Value) bool { func rewriteValuePPC64_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -7835,9 +7707,7 @@ func rewriteValuePPC64_OpRsh16x16(v *Value) bool { func rewriteValuePPC64_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x (Const64 [c])) // cond: uint32(c) < 16 @@ -7906,9 +7776,7 @@ func rewriteValuePPC64_OpRsh16x32(v *Value) bool { func rewriteValuePPC64_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x (Const64 [c])) // cond: uint64(c) < 16 @@ -7995,9 +7863,7 @@ func rewriteValuePPC64_OpRsh16x64(v *Value) bool { func rewriteValuePPC64_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -8026,9 +7892,7 @@ func rewriteValuePPC64_OpRsh16x8(v *Value) bool { func rewriteValuePPC64_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -8055,9 +7919,7 @@ func rewriteValuePPC64_OpRsh32Ux16(v *Value) bool { func rewriteValuePPC64_OpRsh32Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux32 x (Const64 [c])) // cond: uint32(c) < 32 @@ -8120,9 +7982,7 @@ func rewriteValuePPC64_OpRsh32Ux32(v *Value) bool { func rewriteValuePPC64_OpRsh32Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux64 x (Const64 [c])) // cond: uint64(c) < 32 @@ -8199,9 +8059,7 @@ func rewriteValuePPC64_OpRsh32Ux64(v *Value) bool { func rewriteValuePPC64_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -8228,9 +8086,7 @@ func rewriteValuePPC64_OpRsh32Ux8(v *Value) bool { func rewriteValuePPC64_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -8257,9 +8113,7 @@ func rewriteValuePPC64_OpRsh32x16(v *Value) bool { func rewriteValuePPC64_OpRsh32x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x32 x (Const64 [c])) // cond: uint32(c) < 32 @@ -8322,9 +8176,7 @@ func rewriteValuePPC64_OpRsh32x32(v *Value) bool { func rewriteValuePPC64_OpRsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x64 x (Const64 [c])) // cond: uint64(c) < 32 @@ -8403,9 +8255,7 @@ func rewriteValuePPC64_OpRsh32x64(v *Value) bool { func rewriteValuePPC64_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -8432,9 +8282,7 @@ func rewriteValuePPC64_OpRsh32x8(v *Value) bool { func rewriteValuePPC64_OpRsh64Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux16 x y) // cond: @@ -8461,9 +8309,7 @@ func rewriteValuePPC64_OpRsh64Ux16(v *Value) bool { func rewriteValuePPC64_OpRsh64Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux32 x (Const64 [c])) // cond: uint32(c) < 64 @@ -8526,9 +8372,7 @@ func rewriteValuePPC64_OpRsh64Ux32(v *Value) bool { func rewriteValuePPC64_OpRsh64Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux64 x (Const64 [c])) // cond: uint64(c) < 64 @@ -8605,9 +8449,7 @@ func rewriteValuePPC64_OpRsh64Ux64(v *Value) bool { func rewriteValuePPC64_OpRsh64Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux8 x y) // cond: @@ -8634,9 +8476,7 @@ func rewriteValuePPC64_OpRsh64Ux8(v *Value) bool { func rewriteValuePPC64_OpRsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x16 x y) // cond: @@ -8663,9 +8503,7 @@ func rewriteValuePPC64_OpRsh64x16(v *Value) bool { func rewriteValuePPC64_OpRsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x32 x (Const64 [c])) // cond: uint32(c) < 64 @@ -8728,9 +8566,7 @@ func rewriteValuePPC64_OpRsh64x32(v *Value) bool { func rewriteValuePPC64_OpRsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x64 x (Const64 [c])) // cond: uint64(c) < 64 @@ -8809,9 +8645,7 @@ func rewriteValuePPC64_OpRsh64x64(v *Value) bool { func rewriteValuePPC64_OpRsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x8 x y) // cond: @@ -8838,9 +8672,7 @@ func rewriteValuePPC64_OpRsh64x8(v *Value) bool { func rewriteValuePPC64_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -8869,9 +8701,7 @@ func rewriteValuePPC64_OpRsh8Ux16(v *Value) bool { func rewriteValuePPC64_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x (Const64 [c])) // cond: uint32(c) < 8 @@ -8940,9 +8770,7 @@ func rewriteValuePPC64_OpRsh8Ux32(v *Value) bool { func rewriteValuePPC64_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x (Const64 [c])) // cond: uint64(c) < 8 @@ -9025,9 +8853,7 @@ func rewriteValuePPC64_OpRsh8Ux64(v *Value) bool { func rewriteValuePPC64_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -9056,9 +8882,7 @@ func rewriteValuePPC64_OpRsh8Ux8(v *Value) bool { func rewriteValuePPC64_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -9087,9 +8911,7 @@ func rewriteValuePPC64_OpRsh8x16(v *Value) bool { func rewriteValuePPC64_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x (Const64 [c])) // cond: uint32(c) < 8 @@ -9158,9 +8980,7 @@ func rewriteValuePPC64_OpRsh8x32(v *Value) bool { func rewriteValuePPC64_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x (Const64 [c])) // cond: uint64(c) < 8 @@ -9247,9 +9067,7 @@ func rewriteValuePPC64_OpRsh8x64(v *Value) bool { func rewriteValuePPC64_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -10121,7 +9939,7 @@ func rewriteValuePPC64_OpZeroExt8to64(v *Value) bool { func rewriteBlockPPC64(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockPPC64EQ: diff --git a/src/cmd/compile/internal/ssa/rewriteS390X.go b/src/cmd/compile/internal/ssa/rewriteS390X.go index 3b8ea93f53..c3a1ff4fb6 100644 --- a/src/cmd/compile/internal/ssa/rewriteS390X.go +++ b/src/cmd/compile/internal/ssa/rewriteS390X.go @@ -887,9 +887,7 @@ func rewriteValueS390X_OpAndB(v *Value) bool { func rewriteValueS390X_OpAtomicAdd32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicAdd32 ptr val mem) // cond: @@ -911,9 +909,7 @@ func rewriteValueS390X_OpAtomicAdd32(v *Value) bool { func rewriteValueS390X_OpAtomicAdd64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (AtomicAdd64 ptr val mem) // cond: @@ -1105,9 +1101,7 @@ func rewriteValueS390X_OpAvg64u(v *Value) bool { func rewriteValueS390X_OpBitLen64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen64 x) // cond: @@ -1312,9 +1306,7 @@ func rewriteValueS390X_OpConvert(v *Value) bool { func rewriteValueS390X_OpCtz32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz32 x) // cond: @@ -1345,9 +1337,7 @@ func rewriteValueS390X_OpCtz32(v *Value) bool { func rewriteValueS390X_OpCtz64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz64 x) // cond: @@ -1486,9 +1476,7 @@ func rewriteValueS390X_OpCvt64to64F(v *Value) bool { func rewriteValueS390X_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 x y) // cond: @@ -1509,9 +1497,7 @@ func rewriteValueS390X_OpDiv16(v *Value) bool { func rewriteValueS390X_OpDiv16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u x y) // cond: @@ -1532,9 +1518,7 @@ func rewriteValueS390X_OpDiv16u(v *Value) bool { func rewriteValueS390X_OpDiv32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 x y) // cond: @@ -1566,9 +1550,7 @@ func rewriteValueS390X_OpDiv32F(v *Value) bool { func rewriteValueS390X_OpDiv32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u x y) // cond: @@ -1626,9 +1608,7 @@ func rewriteValueS390X_OpDiv64u(v *Value) bool { func rewriteValueS390X_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 x y) // cond: @@ -1649,9 +1629,7 @@ func rewriteValueS390X_OpDiv8(v *Value) bool { func rewriteValueS390X_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u x y) // cond: @@ -1672,9 +1650,7 @@ func rewriteValueS390X_OpDiv8u(v *Value) bool { func rewriteValueS390X_OpEq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq16 x y) // cond: @@ -1703,9 +1679,7 @@ func rewriteValueS390X_OpEq16(v *Value) bool { func rewriteValueS390X_OpEq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq32 x y) // cond: @@ -1730,9 +1704,7 @@ func rewriteValueS390X_OpEq32(v *Value) bool { func rewriteValueS390X_OpEq32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq32F x y) // cond: @@ -1757,9 +1729,7 @@ func rewriteValueS390X_OpEq32F(v *Value) bool { func rewriteValueS390X_OpEq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq64 x y) // cond: @@ -1784,9 +1754,7 @@ func rewriteValueS390X_OpEq64(v *Value) bool { func rewriteValueS390X_OpEq64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq64F x y) // cond: @@ -1811,9 +1779,7 @@ func rewriteValueS390X_OpEq64F(v *Value) bool { func rewriteValueS390X_OpEq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq8 x y) // cond: @@ -1842,9 +1808,7 @@ func rewriteValueS390X_OpEq8(v *Value) bool { func rewriteValueS390X_OpEqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqB x y) // cond: @@ -1873,9 +1837,7 @@ func rewriteValueS390X_OpEqB(v *Value) bool { func rewriteValueS390X_OpEqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqPtr x y) // cond: @@ -1900,9 +1862,7 @@ func rewriteValueS390X_OpEqPtr(v *Value) bool { func rewriteValueS390X_OpGeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16 x y) // cond: @@ -1931,9 +1891,7 @@ func rewriteValueS390X_OpGeq16(v *Value) bool { func rewriteValueS390X_OpGeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq16U x y) // cond: @@ -1962,9 +1920,7 @@ func rewriteValueS390X_OpGeq16U(v *Value) bool { func rewriteValueS390X_OpGeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32 x y) // cond: @@ -1989,9 +1945,7 @@ func rewriteValueS390X_OpGeq32(v *Value) bool { func rewriteValueS390X_OpGeq32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32F x y) // cond: @@ -2016,9 +1970,7 @@ func rewriteValueS390X_OpGeq32F(v *Value) bool { func rewriteValueS390X_OpGeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq32U x y) // cond: @@ -2043,9 +1995,7 @@ func rewriteValueS390X_OpGeq32U(v *Value) bool { func rewriteValueS390X_OpGeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64 x y) // cond: @@ -2070,9 +2020,7 @@ func rewriteValueS390X_OpGeq64(v *Value) bool { func rewriteValueS390X_OpGeq64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64F x y) // cond: @@ -2097,9 +2045,7 @@ func rewriteValueS390X_OpGeq64F(v *Value) bool { func rewriteValueS390X_OpGeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64U x y) // cond: @@ -2124,9 +2070,7 @@ func rewriteValueS390X_OpGeq64U(v *Value) bool { func rewriteValueS390X_OpGeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8 x y) // cond: @@ -2155,9 +2099,7 @@ func rewriteValueS390X_OpGeq8(v *Value) bool { func rewriteValueS390X_OpGeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq8U x y) // cond: @@ -2206,9 +2148,7 @@ func rewriteValueS390X_OpGetG(v *Value) bool { func rewriteValueS390X_OpGreater16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16 x y) // cond: @@ -2237,9 +2177,7 @@ func rewriteValueS390X_OpGreater16(v *Value) bool { func rewriteValueS390X_OpGreater16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater16U x y) // cond: @@ -2268,9 +2206,7 @@ func rewriteValueS390X_OpGreater16U(v *Value) bool { func rewriteValueS390X_OpGreater32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater32 x y) // cond: @@ -2295,9 +2231,7 @@ func rewriteValueS390X_OpGreater32(v *Value) bool { func rewriteValueS390X_OpGreater32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater32F x y) // cond: @@ -2322,9 +2256,7 @@ func rewriteValueS390X_OpGreater32F(v *Value) bool { func rewriteValueS390X_OpGreater32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater32U x y) // cond: @@ -2349,9 +2281,7 @@ func rewriteValueS390X_OpGreater32U(v *Value) bool { func rewriteValueS390X_OpGreater64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater64 x y) // cond: @@ -2376,9 +2306,7 @@ func rewriteValueS390X_OpGreater64(v *Value) bool { func rewriteValueS390X_OpGreater64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater64F x y) // cond: @@ -2403,9 +2331,7 @@ func rewriteValueS390X_OpGreater64F(v *Value) bool { func rewriteValueS390X_OpGreater64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater64U x y) // cond: @@ -2430,9 +2356,7 @@ func rewriteValueS390X_OpGreater64U(v *Value) bool { func rewriteValueS390X_OpGreater8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8 x y) // cond: @@ -2461,9 +2385,7 @@ func rewriteValueS390X_OpGreater8(v *Value) bool { func rewriteValueS390X_OpGreater8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater8U x y) // cond: @@ -2492,9 +2414,7 @@ func rewriteValueS390X_OpGreater8U(v *Value) bool { func rewriteValueS390X_OpHmul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32 x y) // cond: @@ -2518,9 +2438,7 @@ func rewriteValueS390X_OpHmul32(v *Value) bool { func rewriteValueS390X_OpHmul32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Hmul32u x y) // cond: @@ -2603,9 +2521,7 @@ func rewriteValueS390X_OpInterCall(v *Value) bool { func rewriteValueS390X_OpIsInBounds(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsInBounds idx len) // cond: @@ -2630,9 +2546,7 @@ func rewriteValueS390X_OpIsInBounds(v *Value) bool { func rewriteValueS390X_OpIsNonNil(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsNonNil p) // cond: @@ -2656,9 +2570,7 @@ func rewriteValueS390X_OpIsNonNil(v *Value) bool { func rewriteValueS390X_OpIsSliceInBounds(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (IsSliceInBounds idx len) // cond: @@ -2683,9 +2595,7 @@ func rewriteValueS390X_OpIsSliceInBounds(v *Value) bool { func rewriteValueS390X_OpLeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16 x y) // cond: @@ -2714,9 +2624,7 @@ func rewriteValueS390X_OpLeq16(v *Value) bool { func rewriteValueS390X_OpLeq16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq16U x y) // cond: @@ -2745,9 +2653,7 @@ func rewriteValueS390X_OpLeq16U(v *Value) bool { func rewriteValueS390X_OpLeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32 x y) // cond: @@ -2772,9 +2678,7 @@ func rewriteValueS390X_OpLeq32(v *Value) bool { func rewriteValueS390X_OpLeq32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32F x y) // cond: @@ -2799,9 +2703,7 @@ func rewriteValueS390X_OpLeq32F(v *Value) bool { func rewriteValueS390X_OpLeq32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq32U x y) // cond: @@ -2826,9 +2728,7 @@ func rewriteValueS390X_OpLeq32U(v *Value) bool { func rewriteValueS390X_OpLeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64 x y) // cond: @@ -2853,9 +2753,7 @@ func rewriteValueS390X_OpLeq64(v *Value) bool { func rewriteValueS390X_OpLeq64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64F x y) // cond: @@ -2880,9 +2778,7 @@ func rewriteValueS390X_OpLeq64F(v *Value) bool { func rewriteValueS390X_OpLeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64U x y) // cond: @@ -2907,9 +2803,7 @@ func rewriteValueS390X_OpLeq64U(v *Value) bool { func rewriteValueS390X_OpLeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8 x y) // cond: @@ -2938,9 +2832,7 @@ func rewriteValueS390X_OpLeq8(v *Value) bool { func rewriteValueS390X_OpLeq8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq8U x y) // cond: @@ -2969,9 +2861,7 @@ func rewriteValueS390X_OpLeq8U(v *Value) bool { func rewriteValueS390X_OpLess16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16 x y) // cond: @@ -3000,9 +2890,7 @@ func rewriteValueS390X_OpLess16(v *Value) bool { func rewriteValueS390X_OpLess16U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less16U x y) // cond: @@ -3031,9 +2919,7 @@ func rewriteValueS390X_OpLess16U(v *Value) bool { func rewriteValueS390X_OpLess32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less32 x y) // cond: @@ -3058,9 +2944,7 @@ func rewriteValueS390X_OpLess32(v *Value) bool { func rewriteValueS390X_OpLess32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less32F x y) // cond: @@ -3085,9 +2969,7 @@ func rewriteValueS390X_OpLess32F(v *Value) bool { func rewriteValueS390X_OpLess32U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less32U x y) // cond: @@ -3112,9 +2994,7 @@ func rewriteValueS390X_OpLess32U(v *Value) bool { func rewriteValueS390X_OpLess64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less64 x y) // cond: @@ -3139,9 +3019,7 @@ func rewriteValueS390X_OpLess64(v *Value) bool { func rewriteValueS390X_OpLess64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less64F x y) // cond: @@ -3166,9 +3044,7 @@ func rewriteValueS390X_OpLess64F(v *Value) bool { func rewriteValueS390X_OpLess64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less64U x y) // cond: @@ -3193,9 +3069,7 @@ func rewriteValueS390X_OpLess64U(v *Value) bool { func rewriteValueS390X_OpLess8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8 x y) // cond: @@ -3224,9 +3098,7 @@ func rewriteValueS390X_OpLess8(v *Value) bool { func rewriteValueS390X_OpLess8U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less8U x y) // cond: @@ -3393,9 +3265,7 @@ func rewriteValueS390X_OpLoad(v *Value) bool { func rewriteValueS390X_OpLsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x16 x y) // cond: @@ -3471,9 +3341,7 @@ func rewriteValueS390X_OpLsh16x64(v *Value) bool { func rewriteValueS390X_OpLsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x8 x y) // cond: @@ -3501,9 +3369,7 @@ func rewriteValueS390X_OpLsh16x8(v *Value) bool { func rewriteValueS390X_OpLsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x16 x y) // cond: @@ -3579,9 +3445,7 @@ func rewriteValueS390X_OpLsh32x64(v *Value) bool { func rewriteValueS390X_OpLsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x8 x y) // cond: @@ -3609,9 +3473,7 @@ func rewriteValueS390X_OpLsh32x8(v *Value) bool { func rewriteValueS390X_OpLsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x16 x y) // cond: @@ -3687,9 +3549,7 @@ func rewriteValueS390X_OpLsh64x64(v *Value) bool { func rewriteValueS390X_OpLsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x8 x y) // cond: @@ -3717,9 +3577,7 @@ func rewriteValueS390X_OpLsh64x8(v *Value) bool { func rewriteValueS390X_OpLsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x16 x y) // cond: @@ -3795,9 +3653,7 @@ func rewriteValueS390X_OpLsh8x64(v *Value) bool { func rewriteValueS390X_OpLsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x8 x y) // cond: @@ -3825,9 +3681,7 @@ func rewriteValueS390X_OpLsh8x8(v *Value) bool { func rewriteValueS390X_OpMod16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16 x y) // cond: @@ -3848,9 +3702,7 @@ func rewriteValueS390X_OpMod16(v *Value) bool { func rewriteValueS390X_OpMod16u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod16u x y) // cond: @@ -3871,9 +3723,7 @@ func rewriteValueS390X_OpMod16u(v *Value) bool { func rewriteValueS390X_OpMod32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32 x y) // cond: @@ -3892,9 +3742,7 @@ func rewriteValueS390X_OpMod32(v *Value) bool { func rewriteValueS390X_OpMod32u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod32u x y) // cond: @@ -3939,9 +3787,7 @@ func rewriteValueS390X_OpMod64u(v *Value) bool { func rewriteValueS390X_OpMod8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8 x y) // cond: @@ -3962,9 +3808,7 @@ func rewriteValueS390X_OpMod8(v *Value) bool { func rewriteValueS390X_OpMod8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mod8u x y) // cond: @@ -3985,9 +3829,7 @@ func rewriteValueS390X_OpMod8u(v *Value) bool { func rewriteValueS390X_OpMove(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Move [0] _ _ mem) // cond: @@ -4471,9 +4313,7 @@ func rewriteValueS390X_OpMul8(v *Value) bool { func rewriteValueS390X_OpNeg16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg16 x) // cond: @@ -4534,9 +4374,7 @@ func rewriteValueS390X_OpNeg64F(v *Value) bool { func rewriteValueS390X_OpNeg8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neg8 x) // cond: @@ -4553,9 +4391,7 @@ func rewriteValueS390X_OpNeg8(v *Value) bool { func rewriteValueS390X_OpNeq16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq16 x y) // cond: @@ -4584,9 +4420,7 @@ func rewriteValueS390X_OpNeq16(v *Value) bool { func rewriteValueS390X_OpNeq32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq32 x y) // cond: @@ -4611,9 +4445,7 @@ func rewriteValueS390X_OpNeq32(v *Value) bool { func rewriteValueS390X_OpNeq32F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq32F x y) // cond: @@ -4638,9 +4470,7 @@ func rewriteValueS390X_OpNeq32F(v *Value) bool { func rewriteValueS390X_OpNeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq64 x y) // cond: @@ -4665,9 +4495,7 @@ func rewriteValueS390X_OpNeq64(v *Value) bool { func rewriteValueS390X_OpNeq64F(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq64F x y) // cond: @@ -4692,9 +4520,7 @@ func rewriteValueS390X_OpNeq64F(v *Value) bool { func rewriteValueS390X_OpNeq8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq8 x y) // cond: @@ -4723,9 +4549,7 @@ func rewriteValueS390X_OpNeq8(v *Value) bool { func rewriteValueS390X_OpNeqB(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqB x y) // cond: @@ -4754,9 +4578,7 @@ func rewriteValueS390X_OpNeqB(v *Value) bool { func rewriteValueS390X_OpNeqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqPtr x y) // cond: @@ -4806,9 +4628,7 @@ func rewriteValueS390X_OpNot(v *Value) bool { func rewriteValueS390X_OpOffPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (OffPtr [off] ptr:(SP)) // cond: @@ -4942,9 +4762,7 @@ func rewriteValueS390X_OpRound64F(v *Value) bool { func rewriteValueS390X_OpRsh16Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux16 x y) // cond: @@ -4974,9 +4792,7 @@ func rewriteValueS390X_OpRsh16Ux16(v *Value) bool { func rewriteValueS390X_OpRsh16Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux32 x y) // cond: @@ -5004,9 +4820,7 @@ func rewriteValueS390X_OpRsh16Ux32(v *Value) bool { func rewriteValueS390X_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 x y) // cond: @@ -5034,9 +4848,7 @@ func rewriteValueS390X_OpRsh16Ux64(v *Value) bool { func rewriteValueS390X_OpRsh16Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux8 x y) // cond: @@ -5066,9 +4878,7 @@ func rewriteValueS390X_OpRsh16Ux8(v *Value) bool { func rewriteValueS390X_OpRsh16x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x16 x y) // cond: @@ -5101,9 +4911,7 @@ func rewriteValueS390X_OpRsh16x16(v *Value) bool { func rewriteValueS390X_OpRsh16x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x32 x y) // cond: @@ -5134,9 +4942,7 @@ func rewriteValueS390X_OpRsh16x32(v *Value) bool { func rewriteValueS390X_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x y) // cond: @@ -5167,9 +4973,7 @@ func rewriteValueS390X_OpRsh16x64(v *Value) bool { func rewriteValueS390X_OpRsh16x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x8 x y) // cond: @@ -5202,9 +5006,7 @@ func rewriteValueS390X_OpRsh16x8(v *Value) bool { func rewriteValueS390X_OpRsh32Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux16 x y) // cond: @@ -5280,9 +5082,7 @@ func rewriteValueS390X_OpRsh32Ux64(v *Value) bool { func rewriteValueS390X_OpRsh32Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux8 x y) // cond: @@ -5310,9 +5110,7 @@ func rewriteValueS390X_OpRsh32Ux8(v *Value) bool { func rewriteValueS390X_OpRsh32x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x16 x y) // cond: @@ -5397,9 +5195,7 @@ func rewriteValueS390X_OpRsh32x64(v *Value) bool { func rewriteValueS390X_OpRsh32x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x8 x y) // cond: @@ -5430,9 +5226,7 @@ func rewriteValueS390X_OpRsh32x8(v *Value) bool { func rewriteValueS390X_OpRsh64Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux16 x y) // cond: @@ -5508,9 +5302,7 @@ func rewriteValueS390X_OpRsh64Ux64(v *Value) bool { func rewriteValueS390X_OpRsh64Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux8 x y) // cond: @@ -5538,9 +5330,7 @@ func rewriteValueS390X_OpRsh64Ux8(v *Value) bool { func rewriteValueS390X_OpRsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x16 x y) // cond: @@ -5625,9 +5415,7 @@ func rewriteValueS390X_OpRsh64x64(v *Value) bool { func rewriteValueS390X_OpRsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x8 x y) // cond: @@ -5658,9 +5446,7 @@ func rewriteValueS390X_OpRsh64x8(v *Value) bool { func rewriteValueS390X_OpRsh8Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux16 x y) // cond: @@ -5690,9 +5476,7 @@ func rewriteValueS390X_OpRsh8Ux16(v *Value) bool { func rewriteValueS390X_OpRsh8Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux32 x y) // cond: @@ -5720,9 +5504,7 @@ func rewriteValueS390X_OpRsh8Ux32(v *Value) bool { func rewriteValueS390X_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 x y) // cond: @@ -5750,9 +5532,7 @@ func rewriteValueS390X_OpRsh8Ux64(v *Value) bool { func rewriteValueS390X_OpRsh8Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux8 x y) // cond: @@ -5782,9 +5562,7 @@ func rewriteValueS390X_OpRsh8Ux8(v *Value) bool { func rewriteValueS390X_OpRsh8x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x16 x y) // cond: @@ -5817,9 +5595,7 @@ func rewriteValueS390X_OpRsh8x16(v *Value) bool { func rewriteValueS390X_OpRsh8x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x32 x y) // cond: @@ -5850,9 +5626,7 @@ func rewriteValueS390X_OpRsh8x32(v *Value) bool { func rewriteValueS390X_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x y) // cond: @@ -5883,9 +5657,7 @@ func rewriteValueS390X_OpRsh8x64(v *Value) bool { func rewriteValueS390X_OpRsh8x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x8 x y) // cond: @@ -14241,9 +14013,7 @@ func rewriteValueS390X_OpS390XMOVWstore(v *Value) bool { func rewriteValueS390X_OpS390XMOVWstoreconst(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (MOVWstoreconst [sc] {s} (ADDconst [off] ptr) mem) // cond: ValAndOff(sc).canAdd(off) @@ -14969,9 +14739,7 @@ func rewriteValueS390X_OpS390XNEGW(v *Value) bool { func rewriteValueS390X_OpS390XNOT(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NOT x) // cond: true @@ -15009,9 +14777,7 @@ func rewriteValueS390X_OpS390XNOTW(v *Value) bool { func rewriteValueS390X_OpS390XOR(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (OR x (MOVDconst [c])) // cond: isU32Bit(c) @@ -16090,9 +15856,7 @@ func rewriteValueS390X_OpS390XOR(v *Value) bool { func rewriteValueS390X_OpS390XORW(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ORW x (MOVDconst [c])) // cond: @@ -18826,7 +18590,7 @@ func rewriteValueS390X_OpZeroExt8to64(v *Value) bool { func rewriteBlockS390X(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockS390XEQ: diff --git a/src/cmd/compile/internal/ssa/rewritedec.go b/src/cmd/compile/internal/ssa/rewritedec.go index 7e6acda356..eedf61f536 100644 --- a/src/cmd/compile/internal/ssa/rewritedec.go +++ b/src/cmd/compile/internal/ssa/rewritedec.go @@ -108,7 +108,7 @@ func rewriteValuedec_OpLoad(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Load ptr mem) // cond: t.IsComplex() && t.Size() == 8 @@ -295,7 +295,7 @@ func rewriteValuedec_OpStore(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Store {t} dst (ComplexMake real imag) mem) // cond: t.(Type).Size() == 8 @@ -488,7 +488,7 @@ func rewriteValuedec_OpStringPtr(v *Value) bool { func rewriteBlockdec(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { } diff --git a/src/cmd/compile/internal/ssa/rewritedec64.go b/src/cmd/compile/internal/ssa/rewritedec64.go index ccb511d2ca..97e4db0fe3 100644 --- a/src/cmd/compile/internal/ssa/rewritedec64.go +++ b/src/cmd/compile/internal/ssa/rewritedec64.go @@ -128,9 +128,7 @@ func rewriteValuedec64(v *Value) bool { func rewriteValuedec64_OpAdd64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Add64 x y) // cond: @@ -173,9 +171,7 @@ func rewriteValuedec64_OpAdd64(v *Value) bool { func rewriteValuedec64_OpAnd64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (And64 x y) // cond: @@ -208,7 +204,7 @@ func rewriteValuedec64_OpArg(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Arg {n} [off]) // cond: is64BitInt(v.Type) && !config.BigEndian && v.Type.IsSigned() @@ -295,9 +291,7 @@ func rewriteValuedec64_OpArg(v *Value) bool { func rewriteValuedec64_OpBitLen64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (BitLen64 x) // cond: @@ -329,9 +323,7 @@ func rewriteValuedec64_OpBitLen64(v *Value) bool { func rewriteValuedec64_OpBswap64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Bswap64 x) // cond: @@ -355,9 +347,7 @@ func rewriteValuedec64_OpBswap64(v *Value) bool { func rewriteValuedec64_OpCom64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Com64 x) // cond: @@ -381,9 +371,7 @@ func rewriteValuedec64_OpCom64(v *Value) bool { func rewriteValuedec64_OpConst64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Const64 [c]) // cond: t.IsSigned() @@ -426,9 +414,7 @@ func rewriteValuedec64_OpConst64(v *Value) bool { func rewriteValuedec64_OpCtz64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Ctz64 x) // cond: @@ -462,9 +448,7 @@ func rewriteValuedec64_OpCtz64(v *Value) bool { func rewriteValuedec64_OpEq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Eq64 x y) // cond: @@ -495,9 +479,7 @@ func rewriteValuedec64_OpEq64(v *Value) bool { func rewriteValuedec64_OpGeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64 x y) // cond: @@ -538,9 +520,7 @@ func rewriteValuedec64_OpGeq64(v *Value) bool { func rewriteValuedec64_OpGeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Geq64U x y) // cond: @@ -581,9 +561,7 @@ func rewriteValuedec64_OpGeq64U(v *Value) bool { func rewriteValuedec64_OpGreater64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater64 x y) // cond: @@ -624,9 +602,7 @@ func rewriteValuedec64_OpGreater64(v *Value) bool { func rewriteValuedec64_OpGreater64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Greater64U x y) // cond: @@ -701,9 +677,7 @@ func rewriteValuedec64_OpInt64Lo(v *Value) bool { func rewriteValuedec64_OpLeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64 x y) // cond: @@ -744,9 +718,7 @@ func rewriteValuedec64_OpLeq64(v *Value) bool { func rewriteValuedec64_OpLeq64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Leq64U x y) // cond: @@ -787,9 +759,7 @@ func rewriteValuedec64_OpLeq64U(v *Value) bool { func rewriteValuedec64_OpLess64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less64 x y) // cond: @@ -830,9 +800,7 @@ func rewriteValuedec64_OpLess64(v *Value) bool { func rewriteValuedec64_OpLess64U(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Less64U x y) // cond: @@ -875,7 +843,7 @@ func rewriteValuedec64_OpLoad(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Load ptr mem) // cond: is64BitInt(t) && !config.BigEndian && t.IsSigned() @@ -978,9 +946,7 @@ func rewriteValuedec64_OpLoad(v *Value) bool { func rewriteValuedec64_OpLsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1053,9 +1019,7 @@ func rewriteValuedec64_OpLsh16x64(v *Value) bool { func rewriteValuedec64_OpLsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1128,9 +1092,7 @@ func rewriteValuedec64_OpLsh32x64(v *Value) bool { func rewriteValuedec64_OpLsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x16 (Int64Make hi lo) s) // cond: @@ -1181,9 +1143,7 @@ func rewriteValuedec64_OpLsh64x16(v *Value) bool { func rewriteValuedec64_OpLsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x32 (Int64Make hi lo) s) // cond: @@ -1234,9 +1194,7 @@ func rewriteValuedec64_OpLsh64x32(v *Value) bool { func rewriteValuedec64_OpLsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1309,9 +1267,7 @@ func rewriteValuedec64_OpLsh64x64(v *Value) bool { func rewriteValuedec64_OpLsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x8 (Int64Make hi lo) s) // cond: @@ -1362,9 +1318,7 @@ func rewriteValuedec64_OpLsh64x8(v *Value) bool { func rewriteValuedec64_OpLsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1437,9 +1391,7 @@ func rewriteValuedec64_OpLsh8x64(v *Value) bool { func rewriteValuedec64_OpMul64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul64 x y) // cond: @@ -1511,9 +1463,7 @@ func rewriteValuedec64_OpNeg64(v *Value) bool { func rewriteValuedec64_OpNeq64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Neq64 x y) // cond: @@ -1544,9 +1494,7 @@ func rewriteValuedec64_OpNeq64(v *Value) bool { func rewriteValuedec64_OpOr64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Or64 x y) // cond: @@ -1577,9 +1525,7 @@ func rewriteValuedec64_OpOr64(v *Value) bool { func rewriteValuedec64_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1652,9 +1598,7 @@ func rewriteValuedec64_OpRsh16Ux64(v *Value) bool { func rewriteValuedec64_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 x (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1730,9 +1674,7 @@ func rewriteValuedec64_OpRsh16x64(v *Value) bool { func rewriteValuedec64_OpRsh32Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1805,9 +1747,7 @@ func rewriteValuedec64_OpRsh32Ux64(v *Value) bool { func rewriteValuedec64_OpRsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x64 x (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -1881,9 +1821,7 @@ func rewriteValuedec64_OpRsh32x64(v *Value) bool { func rewriteValuedec64_OpRsh64Ux16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux16 (Int64Make hi lo) s) // cond: @@ -1934,9 +1872,7 @@ func rewriteValuedec64_OpRsh64Ux16(v *Value) bool { func rewriteValuedec64_OpRsh64Ux32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux32 (Int64Make hi lo) s) // cond: @@ -1987,9 +1923,7 @@ func rewriteValuedec64_OpRsh64Ux32(v *Value) bool { func rewriteValuedec64_OpRsh64Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -2062,9 +1996,7 @@ func rewriteValuedec64_OpRsh64Ux64(v *Value) bool { func rewriteValuedec64_OpRsh64Ux8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux8 (Int64Make hi lo) s) // cond: @@ -2115,9 +2047,7 @@ func rewriteValuedec64_OpRsh64Ux8(v *Value) bool { func rewriteValuedec64_OpRsh64x16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x16 (Int64Make hi lo) s) // cond: @@ -2180,9 +2110,7 @@ func rewriteValuedec64_OpRsh64x16(v *Value) bool { func rewriteValuedec64_OpRsh64x32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x32 (Int64Make hi lo) s) // cond: @@ -2243,9 +2171,7 @@ func rewriteValuedec64_OpRsh64x32(v *Value) bool { func rewriteValuedec64_OpRsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x64 x (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -2328,9 +2254,7 @@ func rewriteValuedec64_OpRsh64x64(v *Value) bool { func rewriteValuedec64_OpRsh64x8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x8 (Int64Make hi lo) s) // cond: @@ -2393,9 +2317,7 @@ func rewriteValuedec64_OpRsh64x8(v *Value) bool { func rewriteValuedec64_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 _ (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -2468,9 +2390,7 @@ func rewriteValuedec64_OpRsh8Ux64(v *Value) bool { func rewriteValuedec64_OpRsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8x64 x (Int64Make (Const32 [c]) _)) // cond: c != 0 @@ -2546,9 +2466,7 @@ func rewriteValuedec64_OpRsh8x64(v *Value) bool { func rewriteValuedec64_OpSignExt16to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (SignExt16to64 x) // cond: @@ -2565,9 +2483,7 @@ func rewriteValuedec64_OpSignExt16to64(v *Value) bool { func rewriteValuedec64_OpSignExt32to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (SignExt32to64 x) // cond: @@ -2585,9 +2501,7 @@ func rewriteValuedec64_OpSignExt32to64(v *Value) bool { func rewriteValuedec64_OpSignExt8to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (SignExt8to64 x) // cond: @@ -2673,9 +2587,7 @@ func rewriteValuedec64_OpStore(v *Value) bool { func rewriteValuedec64_OpSub64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Sub64 x y) // cond: @@ -2767,9 +2679,7 @@ func rewriteValuedec64_OpTrunc64to8(v *Value) bool { func rewriteValuedec64_OpXor64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Xor64 x y) // cond: @@ -2800,9 +2710,7 @@ func rewriteValuedec64_OpXor64(v *Value) bool { func rewriteValuedec64_OpZeroExt16to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ZeroExt16to64 x) // cond: @@ -2819,9 +2727,7 @@ func rewriteValuedec64_OpZeroExt16to64(v *Value) bool { func rewriteValuedec64_OpZeroExt32to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ZeroExt32to64 x) // cond: @@ -2839,9 +2745,7 @@ func rewriteValuedec64_OpZeroExt32to64(v *Value) bool { func rewriteValuedec64_OpZeroExt8to64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ZeroExt8to64 x) // cond: @@ -2858,7 +2762,7 @@ func rewriteValuedec64_OpZeroExt8to64(v *Value) bool { func rewriteBlockdec64(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { } diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index edabce6699..eb769020f0 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -2831,7 +2831,7 @@ func rewriteValuegeneric_OpArg(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Arg {n} [off]) // cond: v.Type.IsString() @@ -3192,9 +3192,7 @@ func rewriteValuegeneric_OpCom8(v *Value) bool { func rewriteValuegeneric_OpConstInterface(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ConstInterface) // cond: @@ -3213,7 +3211,7 @@ func rewriteValuegeneric_OpConstSlice(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ConstSlice) // cond: config.PtrSize == 4 @@ -3258,7 +3256,7 @@ func rewriteValuegeneric_OpConstString(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (ConstString {s}) // cond: config.PtrSize == 4 && s.(string) == "" @@ -3434,9 +3432,7 @@ func rewriteValuegeneric_OpCvt64Fto32F(v *Value) bool { func rewriteValuegeneric_OpDiv16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16 (Const16 [c]) (Const16 [d])) // cond: d != 0 @@ -3587,7 +3583,7 @@ func rewriteValuegeneric_OpDiv16u(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div16u (Const16 [c]) (Const16 [d])) // cond: d != 0 @@ -3768,7 +3764,7 @@ func rewriteValuegeneric_OpDiv32(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32 (Const32 [c]) (Const32 [d])) // cond: d != 0 @@ -4046,7 +4042,7 @@ func rewriteValuegeneric_OpDiv32u(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div32u (Const32 [c]) (Const32 [d])) // cond: d != 0 @@ -4282,9 +4278,7 @@ func rewriteValuegeneric_OpDiv32u(v *Value) bool { func rewriteValuegeneric_OpDiv64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64 (Const64 [c]) (Const64 [d])) // cond: d != 0 @@ -4523,7 +4517,7 @@ func rewriteValuegeneric_OpDiv64u(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div64u (Const64 [c]) (Const64 [d])) // cond: d != 0 @@ -4657,9 +4651,7 @@ func rewriteValuegeneric_OpDiv64u(v *Value) bool { func rewriteValuegeneric_OpDiv8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8 (Const8 [c]) (Const8 [d])) // cond: d != 0 @@ -4808,9 +4800,7 @@ func rewriteValuegeneric_OpDiv8(v *Value) bool { func rewriteValuegeneric_OpDiv8u(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Div8u (Const8 [c]) (Const8 [d])) // cond: d != 0 @@ -5285,9 +5275,7 @@ func rewriteValuegeneric_OpEqB(v *Value) bool { func rewriteValuegeneric_OpEqInter(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqInter x y) // cond: @@ -5308,9 +5296,7 @@ func rewriteValuegeneric_OpEqInter(v *Value) bool { func rewriteValuegeneric_OpEqPtr(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqPtr p (ConstNil)) // cond: @@ -5347,9 +5333,7 @@ func rewriteValuegeneric_OpEqPtr(v *Value) bool { func rewriteValuegeneric_OpEqSlice(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (EqSlice x y) // cond: @@ -6722,9 +6706,7 @@ func rewriteValuegeneric_OpLess8U(v *Value) bool { func rewriteValuegeneric_OpLoad(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Load p1 (Store {t2} p2 x _)) // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && t1.Size() == t2.(Type).Size() @@ -6989,9 +6971,7 @@ func rewriteValuegeneric_OpLsh16x32(v *Value) bool { func rewriteValuegeneric_OpLsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) // cond: @@ -7246,9 +7226,7 @@ func rewriteValuegeneric_OpLsh32x32(v *Value) bool { func rewriteValuegeneric_OpLsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) // cond: @@ -7503,9 +7481,7 @@ func rewriteValuegeneric_OpLsh64x32(v *Value) bool { func rewriteValuegeneric_OpLsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) // cond: @@ -7760,9 +7736,7 @@ func rewriteValuegeneric_OpLsh8x32(v *Value) bool { func rewriteValuegeneric_OpLsh8x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) // cond: @@ -8553,9 +8527,7 @@ func rewriteValuegeneric_OpMod8u(v *Value) bool { func rewriteValuegeneric_OpMul16(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul16 (Const16 [c]) (Const16 [d])) // cond: @@ -8741,9 +8713,7 @@ func rewriteValuegeneric_OpMul16(v *Value) bool { func rewriteValuegeneric_OpMul32(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul32 (Const32 [c]) (Const32 [d])) // cond: @@ -9054,9 +9024,7 @@ func rewriteValuegeneric_OpMul32F(v *Value) bool { func rewriteValuegeneric_OpMul64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul64 (Const64 [c]) (Const64 [d])) // cond: @@ -9367,9 +9335,7 @@ func rewriteValuegeneric_OpMul64F(v *Value) bool { func rewriteValuegeneric_OpMul8(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Mul8 (Const8 [c]) (Const8 [d])) // cond: @@ -10115,9 +10081,7 @@ func rewriteValuegeneric_OpNeqB(v *Value) bool { func rewriteValuegeneric_OpNeqInter(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqInter x y) // cond: @@ -10167,9 +10131,7 @@ func rewriteValuegeneric_OpNeqPtr(v *Value) bool { func rewriteValuegeneric_OpNeqSlice(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NeqSlice x y) // cond: @@ -10192,7 +10154,7 @@ func rewriteValuegeneric_OpNilCheck(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (NilCheck (GetG mem) mem) // cond: @@ -11975,7 +11937,7 @@ func rewriteValuegeneric_OpPtrIndex(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (PtrIndex ptr idx) // cond: config.PtrSize == 4 @@ -12130,9 +12092,7 @@ func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool { func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) // cond: @@ -12416,9 +12376,7 @@ func rewriteValuegeneric_OpRsh16x32(v *Value) bool { func rewriteValuegeneric_OpRsh16x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) // cond: @@ -12648,9 +12606,7 @@ func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool { func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) // cond: @@ -12963,9 +12919,7 @@ func rewriteValuegeneric_OpRsh32x32(v *Value) bool { func rewriteValuegeneric_OpRsh32x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) // cond: @@ -13224,9 +13178,7 @@ func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool { func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) // cond: @@ -13568,9 +13520,7 @@ func rewriteValuegeneric_OpRsh64x32(v *Value) bool { func rewriteValuegeneric_OpRsh64x64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) // cond: @@ -13858,9 +13808,7 @@ func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool { func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) // cond: @@ -14720,7 +14668,7 @@ func rewriteValuegeneric_OpStore(v *Value) bool { _ = b config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (Store _ (StructMake0) mem) // cond: @@ -15083,9 +15031,7 @@ func rewriteValuegeneric_OpStringPtr(v *Value) bool { func rewriteValuegeneric_OpStructSelect(v *Value) bool { b := v.Block _ = b - config := b.Func.Config - _ = config - fe := config.fe + fe := b.Func.fe _ = fe // match: (StructSelect (StructMake1 x)) // cond: @@ -17841,7 +17787,7 @@ func rewriteValuegeneric_OpZeroExt8to64(v *Value) bool { func rewriteBlockgeneric(b *Block) bool { config := b.Func.Config _ = config - fe := config.fe + fe := b.Func.fe _ = fe switch b.Kind { case BlockIf: diff --git a/src/cmd/compile/internal/ssa/schedule_test.go b/src/cmd/compile/internal/ssa/schedule_test.go index c541d4865c..fc4409ad25 100644 --- a/src/cmd/compile/internal/ssa/schedule_test.go +++ b/src/cmd/compile/internal/ssa/schedule_test.go @@ -9,7 +9,7 @@ import "testing" func TestSchedule(t *testing.T) { c := testConfig(t) cases := []fun{ - Fun(c, "entry", + Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem0", OpInitMem, TypeMem, 0, nil), Valu("ptr", OpConst64, TypeInt64, 0xABCD, nil), @@ -60,7 +60,7 @@ func TestStoreOrder(t *testing.T) { // In the function below, v2 depends on v3 and v4, v4 depends on v3, and v3 depends on store v5. // storeOrder did not handle this case correctly. c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem0", OpInitMem, TypeMem, 0, nil), Valu("a", OpAdd64, TypeInt64, 0, nil, "b", "c"), // v2 diff --git a/src/cmd/compile/internal/ssa/shift_test.go b/src/cmd/compile/internal/ssa/shift_test.go index de4d25a93f..2102612cdb 100644 --- a/src/cmd/compile/internal/ssa/shift_test.go +++ b/src/cmd/compile/internal/ssa/shift_test.go @@ -10,28 +10,29 @@ import ( func TestShiftConstAMD64(t *testing.T) { c := testConfig(t) - fun := makeConstShiftFunc(c, 18, OpLsh64x64, TypeUInt64) + fe := DummyFrontend{t} + fun := makeConstShiftFunc(c, fe, 18, OpLsh64x64, TypeUInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0}) - fun = makeConstShiftFunc(c, 66, OpLsh64x64, TypeUInt64) + fun = makeConstShiftFunc(c, fe, 66, OpLsh64x64, TypeUInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHLQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0}) - fun = makeConstShiftFunc(c, 18, OpRsh64Ux64, TypeUInt64) + fun = makeConstShiftFunc(c, fe, 18, OpRsh64Ux64, TypeUInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 1, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0}) - fun = makeConstShiftFunc(c, 66, OpRsh64Ux64, TypeUInt64) + fun = makeConstShiftFunc(c, fe, 66, OpRsh64Ux64, TypeUInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SHRQconst: 0, OpAMD64CMPQconst: 0, OpAMD64ANDQconst: 0}) - fun = makeConstShiftFunc(c, 18, OpRsh64x64, TypeInt64) + fun = makeConstShiftFunc(c, fe, 18, OpRsh64x64, TypeInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0}) - fun = makeConstShiftFunc(c, 66, OpRsh64x64, TypeInt64) + fun = makeConstShiftFunc(c, fe, 66, OpRsh64x64, TypeInt64) checkOpcodeCounts(t, fun.f, map[Op]int{OpAMD64SARQconst: 1, OpAMD64CMPQconst: 0}) } -func makeConstShiftFunc(c *Config, amount int64, op Op, typ Type) fun { +func makeConstShiftFunc(c *Config, fe Frontend, amount int64, op Op, typ Type) fun { ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"} - fun := Fun(c, "entry", + fun := Fun(c, fe, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("SP", OpSP, TypeUInt64, 0, nil), @@ -76,8 +77,9 @@ func TestShiftToExtensionAMD64(t *testing.T) { {8, OpLsh16x64, OpRsh16x64, TypeInt16}, } c := testConfig(t) + fe := DummyFrontend{t} for _, tc := range tests { - fun := makeShiftExtensionFunc(c, tc.amount, tc.left, tc.right, tc.typ) + fun := makeShiftExtensionFunc(c, fe, tc.amount, tc.left, tc.right, tc.typ) checkOpcodeCounts(t, fun.f, ops) } } @@ -87,9 +89,9 @@ func TestShiftToExtensionAMD64(t *testing.T) { // (rshift (lshift (Const64 [amount])) (Const64 [amount])) // // This may be equivalent to a sign or zero extension. -func makeShiftExtensionFunc(c *Config, amount int64, lshift, rshift Op, typ Type) fun { +func makeShiftExtensionFunc(c *Config, fe Frontend, amount int64, lshift, rshift Op, typ Type) fun { ptyp := &TypeImpl{Size_: 8, Ptr: true, Name: "ptr"} - fun := Fun(c, "entry", + fun := Fun(c, fe, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("SP", OpSP, TypeUInt64, 0, nil), diff --git a/src/cmd/compile/internal/ssa/shortcircuit.go b/src/cmd/compile/internal/ssa/shortcircuit.go index e6c274641e..d5dfdefbb8 100644 --- a/src/cmd/compile/internal/ssa/shortcircuit.go +++ b/src/cmd/compile/internal/ssa/shortcircuit.go @@ -17,8 +17,8 @@ func shortcircuit(f *Func) { // x = phi(a, ...) // // We can replace the "a" in the phi with the constant true. - ct := f.ConstBool(f.Entry.Pos, f.Config.fe.TypeBool(), true) - cf := f.ConstBool(f.Entry.Pos, f.Config.fe.TypeBool(), false) + ct := f.ConstBool(f.Entry.Pos, f.fe.TypeBool(), true) + cf := f.ConstBool(f.Entry.Pos, f.fe.TypeBool(), false) for _, b := range f.Blocks { for _, v := range b.Values { if v.Op != OpPhi { diff --git a/src/cmd/compile/internal/ssa/shortcircuit_test.go b/src/cmd/compile/internal/ssa/shortcircuit_test.go index f208801fc1..93033df469 100644 --- a/src/cmd/compile/internal/ssa/shortcircuit_test.go +++ b/src/cmd/compile/internal/ssa/shortcircuit_test.go @@ -9,7 +9,7 @@ import "testing" func TestShortCircuit(t *testing.T) { c := testConfig(t) - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("mem", OpInitMem, TypeMem, 0, nil), Valu("arg1", OpArg, TypeInt64, 0, nil), diff --git a/src/cmd/compile/internal/ssa/stackalloc.go b/src/cmd/compile/internal/ssa/stackalloc.go index 40edfc55c6..6957c8f630 100644 --- a/src/cmd/compile/internal/ssa/stackalloc.go +++ b/src/cmd/compile/internal/ssa/stackalloc.go @@ -40,7 +40,7 @@ func newStackAllocState(f *Func) *stackAllocState { return new(stackAllocState) } if s.f != nil { - f.Config.Fatalf(src.NoXPos, "newStackAllocState called without previous free") + f.fe.Fatalf(src.NoXPos, "newStackAllocState called without previous free") } return s } @@ -246,7 +246,7 @@ func (s *stackAllocState) stackalloc() { // If there is no unused stack slot, allocate a new one. if i == len(locs) { s.nAuto++ - locs = append(locs, LocalSlot{N: f.Config.fe.Auto(v.Type), Type: v.Type, Off: 0}) + locs = append(locs, LocalSlot{N: f.fe.Auto(v.Type), Type: v.Type, Off: 0}) locations[v.Type] = locs } // Use the stack variable at that index for v. diff --git a/src/cmd/compile/internal/ssa/stackframe.go b/src/cmd/compile/internal/ssa/stackframe.go index de32c60eda..08be62a051 100644 --- a/src/cmd/compile/internal/ssa/stackframe.go +++ b/src/cmd/compile/internal/ssa/stackframe.go @@ -6,5 +6,5 @@ package ssa // stackframe calls back into the frontend to assign frame offsets. func stackframe(f *Func) { - f.Config.fe.AllocFrame(f) + f.fe.AllocFrame(f) } diff --git a/src/cmd/compile/internal/ssa/value.go b/src/cmd/compile/internal/ssa/value.go index 9401ad577d..d5974d4eb2 100644 --- a/src/cmd/compile/internal/ssa/value.go +++ b/src/cmd/compile/internal/ssa/value.go @@ -226,7 +226,7 @@ func (v *Value) copyInto(b *Block) *Value { func (v *Value) Logf(msg string, args ...interface{}) { v.Block.Logf(msg, args...) } func (v *Value) Log() bool { return v.Block.Log() } func (v *Value) Fatalf(msg string, args ...interface{}) { - v.Block.Func.Config.Fatalf(v.Pos, msg, args...) + v.Block.Func.fe.Fatalf(v.Pos, msg, args...) } // isGenericIntConst returns whether v is a generic integer constant. diff --git a/src/cmd/compile/internal/ssa/writebarrier.go b/src/cmd/compile/internal/ssa/writebarrier.go index ddc476a676..3447540309 100644 --- a/src/cmd/compile/internal/ssa/writebarrier.go +++ b/src/cmd/compile/internal/ssa/writebarrier.go @@ -38,7 +38,7 @@ func needwb(v *Value) bool { // A sequence of WB stores for many pointer fields of a single type will // be emitted together, with a single branch. func writebarrier(f *Func) { - if !f.Config.fe.UseWriteBarrier() { + if !f.fe.UseWriteBarrier() { return } @@ -88,17 +88,17 @@ func writebarrier(f *Func) { } } if sb == nil { - sb = f.Entry.NewValue0(initpos, OpSB, f.Config.fe.TypeUintptr()) + sb = f.Entry.NewValue0(initpos, OpSB, f.fe.TypeUintptr()) } if sp == nil { - sp = f.Entry.NewValue0(initpos, OpSP, f.Config.fe.TypeUintptr()) + sp = f.Entry.NewValue0(initpos, OpSP, f.fe.TypeUintptr()) } - wbsym := &ExternSymbol{Typ: f.Config.fe.TypeBool(), Sym: f.Config.fe.Syslook("writeBarrier")} - wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.fe.TypeUInt32().PtrTo(), wbsym, sb) - writebarrierptr = f.Config.fe.Syslook("writebarrierptr") - typedmemmove = f.Config.fe.Syslook("typedmemmove") - typedmemclr = f.Config.fe.Syslook("typedmemclr") - const0 = f.ConstInt32(initpos, f.Config.fe.TypeUInt32(), 0) + wbsym := &ExternSymbol{Typ: f.fe.TypeBool(), Sym: f.fe.Syslook("writeBarrier")} + wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.fe.TypeUInt32().PtrTo(), wbsym, sb) + writebarrierptr = f.fe.Syslook("writebarrierptr") + typedmemmove = f.fe.Syslook("typedmemmove") + typedmemclr = f.fe.Syslook("typedmemclr") + const0 = f.ConstInt32(initpos, f.fe.TypeUInt32(), 0) // allocate auxiliary data structures for computing store order sset = f.newSparseSet(f.NumValues()) @@ -155,8 +155,8 @@ func writebarrier(f *Func) { // set up control flow for write barrier test // load word, test word, avoiding partial register write from load byte. - flag := b.NewValue2(pos, OpLoad, f.Config.fe.TypeUInt32(), wbaddr, mem) - flag = b.NewValue2(pos, OpNeq32, f.Config.fe.TypeBool(), flag, const0) + flag := b.NewValue2(pos, OpLoad, f.fe.TypeUInt32(), wbaddr, mem) + flag = b.NewValue2(pos, OpNeq32, f.fe.TypeBool(), flag, const0) b.Kind = BlockIf b.SetControl(flag) b.Likely = BranchUnlikely @@ -175,7 +175,7 @@ func writebarrier(f *Func) { ptr := w.Args[0] var typ interface{} if w.Op != OpStoreWB { - typ = &ExternSymbol{Typ: f.Config.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()} + typ = &ExternSymbol{Typ: f.fe.TypeUintptr(), Sym: w.Aux.(Type).Symbol()} } pos = w.Pos @@ -208,13 +208,13 @@ func writebarrier(f *Func) { } if f.NoWB { - f.Config.fe.Error(pos, "write barrier prohibited") + f.fe.Error(pos, "write barrier prohibited") } if !f.WBPos.IsKnown() { f.WBPos = pos } - if f.Config.fe.Debug_wb() { - f.Config.Warnl(pos, "write barrier") + if f.fe.Debug_wb() { + f.Warnl(pos, "write barrier") } } @@ -266,7 +266,7 @@ func wbcall(pos src.XPos, b *Block, fn *obj.LSym, typ interface{}, ptr, val, mem // a function call). Marshaling the args to typedmemmove might clobber the // value we're trying to move. t := val.Type.ElemType() - tmp = config.fe.Auto(t) + tmp = b.Func.fe.Auto(t) aux := &AutoSymbol{Typ: t, Node: tmp} mem = b.NewValue1A(pos, OpVarDef, TypeMem, tmp, mem) tmpaddr := b.NewValue1A(pos, OpAddr, t.PtrTo(), aux, sp) @@ -280,7 +280,7 @@ func wbcall(pos src.XPos, b *Block, fn *obj.LSym, typ interface{}, ptr, val, mem off := config.ctxt.FixedFrameSize() if typ != nil { // for typedmemmove - taddr := b.NewValue1A(pos, OpAddr, config.fe.TypeUintptr(), typ, sb) + taddr := b.NewValue1A(pos, OpAddr, b.Func.fe.TypeUintptr(), typ, sb) off = round(off, taddr.Type.Alignment()) arg := b.NewValue1I(pos, OpOffPtr, taddr.Type.PtrTo(), off, sp) mem = b.NewValue3A(pos, OpStore, TypeMem, ptr.Type, arg, taddr, mem) diff --git a/src/cmd/compile/internal/ssa/writebarrier_test.go b/src/cmd/compile/internal/ssa/writebarrier_test.go index a4e95f9310..41d6de9c13 100644 --- a/src/cmd/compile/internal/ssa/writebarrier_test.go +++ b/src/cmd/compile/internal/ssa/writebarrier_test.go @@ -10,7 +10,7 @@ func TestWriteBarrierStoreOrder(t *testing.T) { // Make sure writebarrier phase works even StoreWB ops are not in dependency order c := testConfig(t) ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), @@ -34,7 +34,7 @@ func TestWriteBarrierPhi(t *testing.T) { // See issue #19067. c := testConfig(t) ptrType := &TypeImpl{Size_: 8, Ptr: true, Name: "testptr"} // dummy for testing - fun := Fun(c, "entry", + fun := Fun(c, DummyFrontend{t}, "entry", Bloc("entry", Valu("start", OpInitMem, TypeMem, 0, nil), Valu("sb", OpSB, TypeInvalid, 0, nil), -- 2.48.1