From: Russ Cox Date: Fri, 20 Nov 2020 01:49:23 +0000 (-0500) Subject: [dev.regabi] cmd/compile: introduce cmd/compile/internal/base [generated] X-Git-Tag: go1.17beta1~1539^2~448 X-Git-Url: http://www.git.cypherpunks.su/?a=commitdiff_plain;h=26b66fd60b258d323d7b8df2c489d5bd292c0809;p=gostls13.git [dev.regabi] cmd/compile: introduce cmd/compile/internal/base [generated] Move Flag, Debug, Ctxt, Exit, and error messages to new package cmd/compile/internal/base. These are the core functionality that everything in gc uses and which otherwise prevent splitting any other code out of gc into different packages. A minor milestone: the compiler source code no longer contains the string "yy". [git-generate] cd src/cmd/compile/internal/gc rf ' mv atExit AtExit mv Ctxt atExitFuncs AtExit Exit base.go mv lineno Pos mv linestr FmtPos mv flusherrors FlushErrors mv yyerror Errorf mv yyerrorl ErrorfAt mv yyerrorv ErrorfVers mv noder.yyerrorpos noder.errorAt mv Warnl WarnfAt mv errorexit ErrorExit mv base.go debug.go flag.go print.go cmd/compile/internal/base ' : # update comments sed -i '' 's/yyerrorl/ErrorfAt/g; s/yyerror/Errorf/g' *.go : # bootstrap.go is not built by default so invisible to rf sed -i '' 's/Fatalf/base.Fatalf/' bootstrap.go goimports -w bootstrap.go : # update cmd/dist to add internal/base cd ../../../dist sed -i '' '/internal.amd64/a\ "cmd/compile/internal/base", ' buildtool.go gofmt -w buildtool.go Change-Id: I59903c7084222d6eaee38823fd222159ba24a31a Reviewed-on: https://go-review.googlesource.com/c/go/+/272250 Trust: Russ Cox Reviewed-by: Matthew Dempsky --- diff --git a/src/cmd/compile/internal/amd64/ggen.go b/src/cmd/compile/internal/amd64/ggen.go index 0c1456f4d0..ec98b8cca1 100644 --- a/src/cmd/compile/internal/amd64/ggen.go +++ b/src/cmd/compile/internal/amd64/ggen.go @@ -5,6 +5,7 @@ package amd64 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/x86" @@ -64,7 +65,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Pr if cnt%int64(gc.Widthreg) != 0 { // should only happen with nacl if cnt%int64(gc.Widthptr) != 0 { - gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt) + base.Fatalf("zerorange count not a multiple of widthptr %d", cnt) } if *state&ax == 0 { p = pp.Appendpp(p, x86.AMOVQ, obj.TYPE_CONST, 0, 0, obj.TYPE_REG, x86.REG_AX, 0) diff --git a/src/cmd/compile/internal/amd64/ssa.go b/src/cmd/compile/internal/amd64/ssa.go index 1f2d626721..5e3b962076 100644 --- a/src/cmd/compile/internal/amd64/ssa.go +++ b/src/cmd/compile/internal/amd64/ssa.go @@ -8,6 +8,7 @@ import ( "fmt" "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -975,7 +976,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { r := v.Reg() // See the comments in cmd/internal/obj/x86/obj6.go // near CanUse1InsnTLS for a detailed explanation of these instructions. - if x86.CanUse1InsnTLS(gc.Ctxt) { + if x86.CanUse1InsnTLS(base.Ctxt) { // MOVQ (TLS), r p := s.Prog(x86.AMOVQ) p.From.Type = obj.TYPE_MEM @@ -1017,7 +1018,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { } p := s.Prog(mov) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures + p.From.Offset = -base.Ctxt.FixedFrameSize() // 0 on amd64, just to be consistent with other architectures p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() @@ -1164,8 +1165,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpAMD64MOVBatomicload, ssa.OpAMD64MOVLatomicload, ssa.OpAMD64MOVQatomicload: p := s.Prog(v.Op.Asm()) diff --git a/src/cmd/compile/internal/arm/ssa.go b/src/cmd/compile/internal/arm/ssa.go index 82a5172ec7..7d34cc5170 100644 --- a/src/cmd/compile/internal/arm/ssa.go +++ b/src/cmd/compile/internal/arm/ssa.go @@ -9,6 +9,7 @@ import ( "math" "math/bits" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -741,8 +742,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpARMLoweredZero: // MOVW.P Rarg2, 4(R1) @@ -849,7 +850,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(arm.AMOVW) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() diff --git a/src/cmd/compile/internal/arm64/ssa.go b/src/cmd/compile/internal/arm64/ssa.go index dcbd8f9474..5e6f607708 100644 --- a/src/cmd/compile/internal/arm64/ssa.go +++ b/src/cmd/compile/internal/arm64/ssa.go @@ -7,6 +7,7 @@ package arm64 import ( "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -1038,8 +1039,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Line==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpARM64Equal, ssa.OpARM64NotEqual, @@ -1068,7 +1069,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(arm64.AMOVD) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() diff --git a/src/cmd/compile/internal/base/base.go b/src/cmd/compile/internal/base/base.go new file mode 100644 index 0000000000..e26b378472 --- /dev/null +++ b/src/cmd/compile/internal/base/base.go @@ -0,0 +1,28 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package base + +import ( + "os" + + "cmd/internal/obj" +) + +var Ctxt *obj.Link + +var atExitFuncs []func() + +func AtExit(f func()) { + atExitFuncs = append(atExitFuncs, f) +} + +func Exit(code int) { + for i := len(atExitFuncs) - 1; i >= 0; i-- { + f := atExitFuncs[i] + atExitFuncs = atExitFuncs[:i] + f() + } + os.Exit(code) +} diff --git a/src/cmd/compile/internal/gc/debug.go b/src/cmd/compile/internal/base/debug.go similarity index 99% rename from src/cmd/compile/internal/gc/debug.go rename to src/cmd/compile/internal/base/debug.go index 98e6631e5b..45a552a4d9 100644 --- a/src/cmd/compile/internal/gc/debug.go +++ b/src/cmd/compile/internal/base/debug.go @@ -4,7 +4,7 @@ // Debug arguments, set by -d flag. -package gc +package base import ( "fmt" diff --git a/src/cmd/compile/internal/gc/flag.go b/src/cmd/compile/internal/base/flag.go similarity index 99% rename from src/cmd/compile/internal/gc/flag.go rename to src/cmd/compile/internal/base/flag.go index 29aac3aa28..aadc70f496 100644 --- a/src/cmd/compile/internal/gc/flag.go +++ b/src/cmd/compile/internal/base/flag.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gc +package base import ( "encoding/json" @@ -13,7 +13,6 @@ import ( "os" "reflect" "runtime" - "strings" "cmd/internal/objabi" diff --git a/src/cmd/compile/internal/gc/print.go b/src/cmd/compile/internal/base/print.go similarity index 89% rename from src/cmd/compile/internal/gc/print.go rename to src/cmd/compile/internal/base/print.go index 345f433fe4..6831b3ada3 100644 --- a/src/cmd/compile/internal/gc/print.go +++ b/src/cmd/compile/internal/base/print.go @@ -2,16 +2,17 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gc +package base import ( - "cmd/internal/objabi" - "cmd/internal/src" "fmt" "os" "runtime/debug" "sort" "strings" + + "cmd/internal/objabi" + "cmd/internal/src" ) // An errorMsg is a queued error message, waiting to be printed. @@ -22,7 +23,7 @@ type errorMsg struct { // Pos is the current source position being processed, // printed by Errorf, ErrorfLang, Fatalf, and Warnf. -var lineno src.XPos +var Pos src.XPos var ( errorMsgs []errorMsg @@ -46,7 +47,7 @@ func addErrorMsg(pos src.XPos, format string, args ...interface{}) { // Only add the position if know the position. // See issue golang.org/issue/11361. if pos.IsKnown() { - msg = fmt.Sprintf("%v: %s", linestr(pos), msg) + msg = fmt.Sprintf("%v: %s", FmtPos(pos), msg) } errorMsgs = append(errorMsgs, errorMsg{ pos: pos, @@ -55,7 +56,7 @@ func addErrorMsg(pos src.XPos, format string, args ...interface{}) { } // FmtPos formats pos as a file:line string. -func linestr(pos src.XPos) string { +func FmtPos(pos src.XPos) string { if Ctxt == nil { return "???" } @@ -71,7 +72,7 @@ func (x byPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] } // FlushErrors sorts errors seen so far by line number, prints them to stdout, // and empties the errors array. -func flusherrors() { +func FlushErrors() { Ctxt.Bso.Flush() if len(errorMsgs) == 0 { return @@ -101,12 +102,12 @@ func sameline(a, b src.XPos) bool { } // Errorf reports a formatted error at the current line. -func yyerror(format string, args ...interface{}) { - yyerrorl(lineno, format, args...) +func Errorf(format string, args ...interface{}) { + ErrorfAt(Pos, format, args...) } // ErrorfAt reports a formatted error message at pos. -func yyerrorl(pos src.XPos, format string, args ...interface{}) { +func ErrorfAt(pos src.XPos, format string, args ...interface{}) { msg := fmt.Sprintf(format, args...) if strings.HasPrefix(msg, "syntax error") { @@ -134,15 +135,15 @@ func yyerrorl(pos src.XPos, format string, args ...interface{}) { hcrash() if numErrors >= 10 && Flag.LowerE == 0 { - flusherrors() - fmt.Printf("%v: too many errors\n", linestr(pos)) - errorexit() + FlushErrors() + fmt.Printf("%v: too many errors\n", FmtPos(pos)) + ErrorExit() } } // ErrorfVers reports that a language feature (format, args) requires a later version of Go. -func yyerrorv(lang string, format string, args ...interface{}) { - yyerror("%s requires %s or later (-lang was set to %s; check go.mod)", fmt.Sprintf(format, args...), lang, Flag.Lang) +func ErrorfVers(lang string, format string, args ...interface{}) { + Errorf("%s requires %s or later (-lang was set to %s; check go.mod)", fmt.Sprintf(format, args...), lang, Flag.Lang) } // UpdateErrorDot is a clumsy hack that rewrites the last error, @@ -163,17 +164,17 @@ func UpdateErrorDot(line string, name, expr string) { // so this should be used only when the user has opted in // to additional output by setting a particular flag. func Warn(format string, args ...interface{}) { - Warnl(lineno, format, args...) + WarnfAt(Pos, format, args...) } // WarnfAt reports a formatted warning at pos. // In general the Go compiler does NOT generate warnings, // so this should be used only when the user has opted in // to additional output by setting a particular flag. -func Warnl(pos src.XPos, format string, args ...interface{}) { +func WarnfAt(pos src.XPos, format string, args ...interface{}) { addErrorMsg(pos, format, args...) if Flag.LowerM != 0 { - flusherrors() + FlushErrors() } } @@ -190,7 +191,7 @@ func Warnl(pos src.XPos, format string, args ...interface{}) { // // If -h has been specified, Fatalf panics to force the usual runtime info dump. func Fatalf(format string, args ...interface{}) { - FatalfAt(lineno, format, args...) + FatalfAt(Pos, format, args...) } // FatalfAt reports a fatal error - an internal problem - at pos and exits. @@ -206,10 +207,10 @@ func Fatalf(format string, args ...interface{}) { // // If -h has been specified, FatalfAt panics to force the usual runtime info dump. func FatalfAt(pos src.XPos, format string, args ...interface{}) { - flusherrors() + FlushErrors() if Debug.Panic != 0 || numErrors == 0 { - fmt.Printf("%v: internal compiler error: ", linestr(pos)) + fmt.Printf("%v: internal compiler error: ", FmtPos(pos)) fmt.Printf(format, args...) fmt.Printf("\n") @@ -227,13 +228,13 @@ func FatalfAt(pos src.XPos, format string, args ...interface{}) { } hcrash() - errorexit() + ErrorExit() } // hcrash crashes the compiler when -h is set, to find out where a message is generated. func hcrash() { if Flag.LowerH != 0 { - flusherrors() + FlushErrors() if Flag.LowerO != "" { os.Remove(Flag.LowerO) } @@ -243,8 +244,8 @@ func hcrash() { // ErrorExit handles an error-status exit. // It flushes any pending errors, removes the output file, and exits. -func errorexit() { - flusherrors() +func ErrorExit() { + FlushErrors() if Flag.LowerO != "" { os.Remove(Flag.LowerO) } @@ -254,6 +255,6 @@ func errorexit() { // ExitIfErrors calls ErrorExit if any errors have been reported. func ExitIfErrors() { if Errors() > 0 { - errorexit() + ErrorExit() } } diff --git a/src/cmd/compile/internal/gc/alg.go b/src/cmd/compile/internal/gc/alg.go index 87b905ed59..517aaa4b81 100644 --- a/src/cmd/compile/internal/gc/alg.go +++ b/src/cmd/compile/internal/gc/alg.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" "fmt" @@ -203,7 +204,7 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) { return ret, nil } - Fatalf("algtype1: unexpected type %v", t) + base.Fatalf("algtype1: unexpected type %v", t) return 0, nil } @@ -214,7 +215,7 @@ func genhash(t *types.Type) *obj.LSym { switch algtype(t) { default: // genhash is only called for types that have equality - Fatalf("genhash %v", t) + base.Fatalf("genhash %v", t) case AMEM0: return sysClosure("memhash0") case AMEM8: @@ -282,11 +283,11 @@ func genhash(t *types.Type) *obj.LSym { } sym := typesymprefix(".hash", t) - if Flag.LowerR != 0 { + if base.Flag.LowerR != 0 { fmt.Printf("genhash %v %v %v\n", closure, sym, t) } - lineno = autogeneratedPos // less confusing than end of input + base.Pos = autogeneratedPos // less confusing than end of input dclcontext = PEXTERN // func sym(p *T, h uintptr) uintptr @@ -374,7 +375,7 @@ func genhash(t *types.Type) *obj.LSym { r.List.Append(nh) fn.Nbody.Append(r) - if Flag.LowerR != 0 { + if base.Flag.LowerR != 0 { dumplist("genhash body", fn.Nbody) } @@ -387,7 +388,7 @@ func genhash(t *types.Type) *obj.LSym { typecheckslice(fn.Nbody.Slice(), ctxStmt) Curfn = nil - if Debug.DclStack != 0 { + if base.Debug.DclStack != 0 { testdclstack() } @@ -407,7 +408,7 @@ func hashfor(t *types.Type) *Node { switch a, _ := algtype1(t); a { case AMEM: - Fatalf("hashfor with AMEM type") + base.Fatalf("hashfor with AMEM type") case AINTER: sym = Runtimepkg.Lookup("interhash") case ANILINTER: @@ -509,13 +510,13 @@ func geneq(t *types.Type) *obj.LSym { return closure } sym := typesymprefix(".eq", t) - if Flag.LowerR != 0 { + if base.Flag.LowerR != 0 { fmt.Printf("geneq %v\n", t) } // Autogenerate code for equality of structs and arrays. - lineno = autogeneratedPos // less confusing than end of input + base.Pos = autogeneratedPos // less confusing than end of input dclcontext = PEXTERN // func sym(p, q *T) bool @@ -539,7 +540,7 @@ func geneq(t *types.Type) *obj.LSym { // so t must be either an array or a struct. switch t.Etype { default: - Fatalf("geneq %v", t) + base.Fatalf("geneq %v", t) case TARRAY: nelem := t.NumElem() @@ -753,7 +754,7 @@ func geneq(t *types.Type) *obj.LSym { // We should really do a generic CL that shares epilogues across // the board. See #24936. - if Flag.LowerR != 0 { + if base.Flag.LowerR != 0 { dumplist("geneq body", fn.Nbody) } @@ -766,7 +767,7 @@ func geneq(t *types.Type) *obj.LSym { typecheckslice(fn.Nbody.Slice(), ctxStmt) Curfn = nil - if Debug.DclStack != 0 { + if base.Debug.DclStack != 0 { testdclstack() } @@ -859,7 +860,7 @@ func eqstring(s, t *Node) (eqlen, eqmem *Node) { // eqtab must be evaluated before eqdata, and shortcircuiting is required. func eqinterface(s, t *Node) (eqtab, eqdata *Node) { if !types.Identical(s.Type, t.Type) { - Fatalf("eqinterface %v %v", s.Type, t.Type) + base.Fatalf("eqinterface %v %v", s.Type, t.Type) } // func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool) // func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool) @@ -949,7 +950,7 @@ func memrun(t *types.Type, start int) (size int64, next int) { // by padding. func ispaddedfield(t *types.Type, i int) bool { if !t.IsStruct() { - Fatalf("ispaddedfield called non-struct %v", t) + base.Fatalf("ispaddedfield called non-struct %v", t) } end := t.Width if i+1 < t.NumFields() { diff --git a/src/cmd/compile/internal/gc/align.go b/src/cmd/compile/internal/gc/align.go index 563bd5030c..a8cbbfd322 100644 --- a/src/cmd/compile/internal/gc/align.go +++ b/src/cmd/compile/internal/gc/align.go @@ -6,6 +6,7 @@ package gc import ( "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/types" "fmt" "sort" @@ -21,7 +22,7 @@ var defercalc int func Rnd(o int64, r int64) int64 { if r < 1 || r > 8 || r&(r-1) != 0 { - Fatalf("rnd %d", r) + base.Fatalf("rnd %d", r) } return (o + r - 1) &^ (r - 1) } @@ -39,7 +40,7 @@ func expandiface(t *types.Type) { case langSupported(1, 14, t.Pkg()) && !explicit && types.Identical(m.Type, prev.Type): return default: - yyerrorl(m.Pos, "duplicate method %s", m.Sym.Name) + base.ErrorfAt(m.Pos, "duplicate method %s", m.Sym.Name) } methods = append(methods, m) } @@ -59,7 +60,7 @@ func expandiface(t *types.Type) { } if !m.Type.IsInterface() { - yyerrorl(m.Pos, "interface contains embedded non-interface %v", m.Type) + base.ErrorfAt(m.Pos, "interface contains embedded non-interface %v", m.Type) m.SetBroke(true) t.SetBroke(true) // Add to fields so that error messages @@ -83,7 +84,7 @@ func expandiface(t *types.Type) { sort.Sort(methcmp(methods)) if int64(len(methods)) >= thearch.MAXWIDTH/int64(Widthptr) { - yyerrorl(typePos(t), "interface too large") + base.ErrorfAt(typePos(t), "interface too large") } for i, m := range methods { m.Offset = int64(i) * int64(Widthptr) @@ -134,7 +135,7 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 { w := f.Type.Width if w < 0 { - Fatalf("invalid width %d", f.Type.Width) + base.Fatalf("invalid width %d", f.Type.Width) } if w == 0 { lastzero = o @@ -147,7 +148,7 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 { maxwidth = 1<<31 - 1 } if o >= maxwidth { - yyerrorl(typePos(errtype), "type %L too large", errtype) + base.ErrorfAt(typePos(errtype), "type %L too large", errtype) o = 8 // small but nonzero } } @@ -235,7 +236,7 @@ func reportTypeLoop(t *types.Type) { var l []*types.Type if !findTypeLoop(t, &l) { - Fatalf("failed to find type loop for: %v", t) + base.Fatalf("failed to find type loop for: %v", t) } // Rotate loop so that the earliest type declaration is first. @@ -250,11 +251,11 @@ func reportTypeLoop(t *types.Type) { var msg bytes.Buffer fmt.Fprintf(&msg, "invalid recursive type %v\n", l[0]) for _, t := range l { - fmt.Fprintf(&msg, "\t%v: %v refers to\n", linestr(typePos(t)), t) + fmt.Fprintf(&msg, "\t%v: %v refers to\n", base.FmtPos(typePos(t)), t) t.SetBroke(true) } - fmt.Fprintf(&msg, "\t%v: %v", linestr(typePos(l[0])), l[0]) - yyerrorl(typePos(l[0]), msg.String()) + fmt.Fprintf(&msg, "\t%v: %v", base.FmtPos(typePos(l[0])), l[0]) + base.ErrorfAt(typePos(l[0]), msg.String()) } // dowidth calculates and stores the size and alignment for t. @@ -268,7 +269,7 @@ func dowidth(t *types.Type) { return } if Widthptr == 0 { - Fatalf("dowidth without betypeinit") + base.Fatalf("dowidth without betypeinit") } if t == nil { @@ -292,7 +293,7 @@ func dowidth(t *types.Type) { return } t.SetBroke(true) - Fatalf("width not calculated: %v", t) + base.Fatalf("width not calculated: %v", t) } // break infinite recursion if the broken recursive type @@ -304,9 +305,9 @@ func dowidth(t *types.Type) { // defer checkwidth calls until after we're done defercheckwidth() - lno := lineno + lno := base.Pos if asNode(t.Nod) != nil { - lineno = asNode(t.Nod).Pos + base.Pos = asNode(t.Nod).Pos } t.Width = -2 @@ -327,7 +328,7 @@ func dowidth(t *types.Type) { var w int64 switch et { default: - Fatalf("dowidth: unknown type: %v", t) + base.Fatalf("dowidth: unknown type: %v", t) // compiler-specific stuff case TINT8, TUINT8, TBOOL: @@ -378,7 +379,7 @@ func dowidth(t *types.Type) { t1 := t.ChanArgs() dowidth(t1) // just in case if t1.Elem().Width >= 1<<16 { - yyerrorl(typePos(t1), "channel element type too large (>64kB)") + base.ErrorfAt(typePos(t1), "channel element type too large (>64kB)") } w = 1 // anything will do @@ -393,11 +394,11 @@ func dowidth(t *types.Type) { case TANY: // not a real type; should be replaced before use. - Fatalf("dowidth any") + base.Fatalf("dowidth any") case TSTRING: if sizeofString == 0 { - Fatalf("early dowidth string") + base.Fatalf("early dowidth string") } w = sizeofString t.Align = uint8(Widthptr) @@ -411,7 +412,7 @@ func dowidth(t *types.Type) { if t.Elem().Width != 0 { cap := (uint64(thearch.MAXWIDTH) - 1) / uint64(t.Elem().Width) if uint64(t.NumElem()) > cap { - yyerrorl(typePos(t), "type %L larger than address space", t) + base.ErrorfAt(typePos(t), "type %L larger than address space", t) } } w = t.NumElem() * t.Elem().Width @@ -427,7 +428,7 @@ func dowidth(t *types.Type) { case TSTRUCT: if t.IsFuncArgStruct() { - Fatalf("dowidth fn struct %v", t) + base.Fatalf("dowidth fn struct %v", t) } w = widstruct(t, t, 0, 1) @@ -447,24 +448,24 @@ func dowidth(t *types.Type) { w = widstruct(t1, t1.Results(), w, Widthreg) t1.Extra.(*types.Func).Argwid = w if w%int64(Widthreg) != 0 { - Warn("bad type %v %d\n", t1, w) + base.Warn("bad type %v %d\n", t1, w) } t.Align = 1 } if Widthptr == 4 && w != int64(int32(w)) { - yyerrorl(typePos(t), "type %v too large", t) + base.ErrorfAt(typePos(t), "type %v too large", t) } t.Width = w if t.Align == 0 { if w == 0 || w > 8 || w&(w-1) != 0 { - Fatalf("invalid alignment for %v", t) + base.Fatalf("invalid alignment for %v", t) } t.Align = uint8(w) } - lineno = lno + base.Pos = lno resumecheckwidth() } @@ -495,7 +496,7 @@ func checkwidth(t *types.Type) { // function arg structs should not be checked // outside of the enclosing function. if t.IsFuncArgStruct() { - Fatalf("checkwidth %v", t) + base.Fatalf("checkwidth %v", t) } if defercalc == 0 { diff --git a/src/cmd/compile/internal/gc/bootstrap.go b/src/cmd/compile/internal/gc/bootstrap.go index 967f75a9ac..2e13d6b57a 100644 --- a/src/cmd/compile/internal/gc/bootstrap.go +++ b/src/cmd/compile/internal/gc/bootstrap.go @@ -6,8 +6,11 @@ package gc -import "runtime" +import ( + "cmd/compile/internal/base" + "runtime" +) func startMutexProfiling() { - Fatalf("mutex profiling unavailable in version %v", runtime.Version()) + base.Fatalf("mutex profiling unavailable in version %v", runtime.Version()) } diff --git a/src/cmd/compile/internal/gc/bv.go b/src/cmd/compile/internal/gc/bv.go index e32ab97ad5..d82851e7cb 100644 --- a/src/cmd/compile/internal/gc/bv.go +++ b/src/cmd/compile/internal/gc/bv.go @@ -6,6 +6,8 @@ package gc import ( "math/bits" + + "cmd/compile/internal/base" ) const ( @@ -35,7 +37,7 @@ func bvbulkalloc(nbit int32, count int32) bulkBvec { nword := (nbit + wordBits - 1) / wordBits size := int64(nword) * int64(count) if int64(int32(size*4)) != size*4 { - Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size) + base.Fatalf("bvbulkalloc too big: nbit=%d count=%d nword=%d size=%d", nbit, count, nword, size) } return bulkBvec{ words: make([]uint32, size), @@ -52,7 +54,7 @@ func (b *bulkBvec) next() bvec { func (bv1 bvec) Eq(bv2 bvec) bool { if bv1.n != bv2.n { - Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n) + base.Fatalf("bvequal: lengths %d and %d are not equal", bv1.n, bv2.n) } for i, x := range bv1.b { if x != bv2.b[i] { @@ -68,7 +70,7 @@ func (dst bvec) Copy(src bvec) { func (bv bvec) Get(i int32) bool { if i < 0 || i >= bv.n { - Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n) + base.Fatalf("bvget: index %d is out of bounds with length %d\n", i, bv.n) } mask := uint32(1 << uint(i%wordBits)) return bv.b[i>>wordShift]&mask != 0 @@ -76,7 +78,7 @@ func (bv bvec) Get(i int32) bool { func (bv bvec) Set(i int32) { if i < 0 || i >= bv.n { - Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n) + base.Fatalf("bvset: index %d is out of bounds with length %d\n", i, bv.n) } mask := uint32(1 << uint(i%wordBits)) bv.b[i/wordBits] |= mask @@ -84,7 +86,7 @@ func (bv bvec) Set(i int32) { func (bv bvec) Unset(i int32) { if i < 0 || i >= bv.n { - Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n) + base.Fatalf("bvunset: index %d is out of bounds with length %d\n", i, bv.n) } mask := uint32(1 << uint(i%wordBits)) bv.b[i/wordBits] &^= mask diff --git a/src/cmd/compile/internal/gc/closure.go b/src/cmd/compile/internal/gc/closure.go index c25a446999..ad255c9c06 100644 --- a/src/cmd/compile/internal/gc/closure.go +++ b/src/cmd/compile/internal/gc/closure.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/syntax" "cmd/compile/internal/types" "cmd/internal/src" @@ -101,7 +102,7 @@ func typecheckclosure(clo *Node, top int) { if !n.Name.Captured() { n.Name.SetCaptured(true) if n.Name.Decldepth == 0 { - Fatalf("typecheckclosure: var %S does not have decldepth assigned", n) + base.Fatalf("typecheckclosure: var %S does not have decldepth assigned", n) } // Ignore assignments to the variable in straightline code @@ -171,8 +172,8 @@ var capturevarscomplete bool // We use value capturing for values <= 128 bytes that are never reassigned // after capturing (effectively constant). func capturevars(dcl *Node) { - lno := lineno - lineno = dcl.Pos + lno := base.Pos + base.Pos = dcl.Pos fn := dcl.Func cvars := fn.ClosureVars.Slice() out := cvars[:0] @@ -203,7 +204,7 @@ func capturevars(dcl *Node) { outer = nod(OADDR, outer, nil) } - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { var name *types.Sym if v.Name.Curfn != nil && v.Name.Curfn.Func.Nname != nil { name = v.Name.Curfn.Func.Nname.Sym @@ -212,7 +213,7 @@ func capturevars(dcl *Node) { if v.Name.Byval() { how = "value" } - Warnl(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width)) + base.WarnfAt(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width)) } outer = typecheck(outer, ctxExpr) @@ -220,14 +221,14 @@ func capturevars(dcl *Node) { } fn.ClosureVars.Set(out) - lineno = lno + base.Pos = lno } // transformclosure is called in a separate phase after escape analysis. // It transform closure bodies to properly reference captured variables. func transformclosure(dcl *Node) { - lno := lineno - lineno = dcl.Pos + lno := base.Pos + base.Pos = dcl.Pos fn := dcl.Func if fn.ClosureCalled { @@ -325,7 +326,7 @@ func transformclosure(dcl *Node) { } } - lineno = lno + base.Pos = lno } // hasemptycvars reports whether closure clo has an @@ -337,15 +338,15 @@ func hasemptycvars(clo *Node) bool { // closuredebugruntimecheck applies boilerplate checks for debug flags // and compiling runtime func closuredebugruntimecheck(clo *Node) { - if Debug.Closure > 0 { + if base.Debug.Closure > 0 { if clo.Esc == EscHeap { - Warnl(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars) + base.WarnfAt(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars) } else { - Warnl(clo.Pos, "stack closure, captured vars = %v", clo.Func.ClosureVars) + base.WarnfAt(clo.Pos, "stack closure, captured vars = %v", clo.Func.ClosureVars) } } - if Flag.CompilingRuntime && clo.Esc == EscHeap { - yyerrorl(clo.Pos, "heap-allocated closure, not allowed in runtime") + if base.Flag.CompilingRuntime && clo.Esc == EscHeap { + base.ErrorfAt(clo.Pos, "heap-allocated closure, not allowed in runtime") } } @@ -386,8 +387,8 @@ func walkclosure(clo *Node, init *Nodes) *Node { // If no closure vars, don't bother wrapping. if hasemptycvars(clo) { - if Debug.Closure > 0 { - Warnl(clo.Pos, "closure converted to global") + if base.Debug.Closure > 0 { + base.WarnfAt(clo.Pos, "closure converted to global") } return fn.Nname } @@ -423,7 +424,7 @@ func typecheckpartialcall(dot *Node, sym *types.Sym) { break default: - Fatalf("invalid typecheckpartialcall") + base.Fatalf("invalid typecheckpartialcall") } // Create top-level function. @@ -448,13 +449,13 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node { sym.SetUniq(true) savecurfn := Curfn - saveLineNo := lineno + saveLineNo := base.Pos Curfn = nil // Set line number equal to the line number where the method is declared. var m *types.Field if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() { - lineno = m.Pos + base.Pos = m.Pos } // Note: !m.Pos.IsKnown() happens for method expressions where // the method is implicitly declared. The Error method of the @@ -512,7 +513,7 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node { sym.Def = asTypesNode(dcl) xtop = append(xtop, dcl) Curfn = savecurfn - lineno = saveLineNo + base.Pos = saveLineNo return dcl } @@ -579,14 +580,14 @@ func walkpartialcall(n *Node, init *Nodes) *Node { // referenced by method value n. func callpartMethod(n *Node) *types.Field { if n.Op != OCALLPART { - Fatalf("expected OCALLPART, got %v", n) + base.Fatalf("expected OCALLPART, got %v", n) } // TODO(mdempsky): Optimize this. If necessary, // makepartialcall could save m for us somewhere. var m *types.Field if lookdot0(n.Right.Sym, n.Left.Type, &m, false) != 1 { - Fatalf("failed to find field for OCALLPART") + base.Fatalf("failed to find field for OCALLPART") } return m diff --git a/src/cmd/compile/internal/gc/const.go b/src/cmd/compile/internal/gc/const.go index e72962124a..98473b4cfb 100644 --- a/src/cmd/compile/internal/gc/const.go +++ b/src/cmd/compile/internal/gc/const.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "fmt" @@ -28,7 +29,7 @@ const ( func (n *Node) ValueInterface() interface{} { switch v := n.Val(); v.Kind() { default: - Fatalf("unexpected constant: %v", v) + base.Fatalf("unexpected constant: %v", v) panic("unreachable") case constant.Bool: return constant.BoolVal(v) @@ -55,7 +56,7 @@ func int64Val(t *types.Type, v constant.Value) int64 { return x } } - Fatalf("%v out of range for %v", v, t) + base.Fatalf("%v out of range for %v", v, t) panic("unreachable") } @@ -63,7 +64,7 @@ func float64Val(v constant.Value) float64 { if x, _ := constant.Float64Val(v); !math.IsInf(x, 0) { return x + 0 // avoid -0 (should not be needed, but be conservative) } - Fatalf("bad float64 value: %v", v) + base.Fatalf("bad float64 value: %v", v) panic("unreachable") } @@ -80,7 +81,7 @@ func bigFloatVal(v constant.Value) *big.Float { case *big.Rat: f.SetRat(u) default: - Fatalf("unexpected: %v", u) + base.Fatalf("unexpected: %v", u) } return f } @@ -89,11 +90,11 @@ func bigFloatVal(v constant.Value) *big.Float { // n must be an integer or rune constant. func (n *Node) Int64Val() int64 { if !Isconst(n, constant.Int) { - Fatalf("Int64Val(%v)", n) + base.Fatalf("Int64Val(%v)", n) } x, ok := constant.Int64Val(n.Val()) if !ok { - Fatalf("Int64Val(%v)", n) + base.Fatalf("Int64Val(%v)", n) } return x } @@ -114,11 +115,11 @@ func (n *Node) CanInt64() bool { // n must be an integer or rune constant. func (n *Node) Uint64Val() uint64 { if !Isconst(n, constant.Int) { - Fatalf("Uint64Val(%v)", n) + base.Fatalf("Uint64Val(%v)", n) } x, ok := constant.Uint64Val(n.Val()) if !ok { - Fatalf("Uint64Val(%v)", n) + base.Fatalf("Uint64Val(%v)", n) } return x } @@ -127,7 +128,7 @@ func (n *Node) Uint64Val() uint64 { // n must be a boolean constant. func (n *Node) BoolVal() bool { if !Isconst(n, constant.Bool) { - Fatalf("BoolVal(%v)", n) + base.Fatalf("BoolVal(%v)", n) } return constant.BoolVal(n.Val()) } @@ -136,7 +137,7 @@ func (n *Node) BoolVal() bool { // n must be a string constant. func (n *Node) StringVal() string { if !Isconst(n, constant.String) { - Fatalf("StringVal(%v)", n) + base.Fatalf("StringVal(%v)", n) } return constant.StringVal(n.Val()) } @@ -150,7 +151,7 @@ func roundFloat(v constant.Value, sz int64) constant.Value { f, _ := constant.Float64Val(v) return makeFloat64(f) } - Fatalf("unexpected size: %v", sz) + base.Fatalf("unexpected size: %v", sz) panic("unreachable") } @@ -169,7 +170,7 @@ func truncfltlit(v constant.Value, t *types.Type) constant.Value { // truncate Real and Imag parts of Mpcplx to 32-bit or 64-bit // precision, according to type; return truncated value. In case of -// overflow, calls yyerror but does not truncate the input value. +// overflow, calls Errorf but does not truncate the input value. func trunccmplxlit(v constant.Value, t *types.Type) constant.Value { if t.IsUntyped() || overflow(v, t) { // If there was overflow, simply continuing would set the @@ -199,10 +200,10 @@ func defaultlit(n *Node, t *types.Type) *Node { return convlit1(n, t, false, nil // message. func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Node { if explicit && t == nil { - Fatalf("explicit conversion missing type") + base.Fatalf("explicit conversion missing type") } if t != nil && t.IsUntyped() { - Fatalf("bad conversion to untyped: %v", t) + base.Fatalf("bad conversion to untyped: %v", t) } if n == nil || n.Type == nil { @@ -223,10 +224,10 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod // Nil is technically not a constant, so handle it specially. if n.Type.Etype == TNIL { if n.Op != ONIL { - Fatalf("unexpected op: %v (%v)", n, n.Op) + base.Fatalf("unexpected op: %v (%v)", n, n.Op) } if t == nil { - yyerror("use of untyped nil") + base.Errorf("use of untyped nil") n.SetDiag(true) n.Type = nil return n @@ -247,7 +248,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod switch n.Op { default: - Fatalf("unexpected untyped expression: %v", n) + base.Fatalf("unexpected untyped expression: %v", n) case OLITERAL: v := convertVal(n.Val(), t, explicit) @@ -287,7 +288,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod return n } if !types.Identical(n.Left.Type, n.Right.Type) { - yyerror("invalid operation: %v (mismatched types %v and %v)", n, n.Left.Type, n.Right.Type) + base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, n.Left.Type, n.Right.Type) n.Type = nil return n } @@ -306,7 +307,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod n.Left = convlit1(n.Left, t, explicit, nil) n.Type = n.Left.Type if n.Type != nil && !n.Type.IsInteger() { - yyerror("invalid operation: %v (shift of type %v)", n, n.Type) + base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type) n.Type = nil } return n @@ -315,11 +316,11 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod if !n.Diag() { if !t.Broke() { if explicit { - yyerror("cannot convert %L to type %v", n, t) + base.Errorf("cannot convert %L to type %v", n, t) } else if context != nil { - yyerror("cannot use %L as type %v in %s", n, t, context()) + base.Errorf("cannot use %L as type %v in %s", n, t, context()) } else { - yyerror("cannot use %L as type %v", n, t) + base.Errorf("cannot use %L as type %v", n, t) } } n.SetDiag(true) @@ -395,7 +396,7 @@ func tocplx(v constant.Value) constant.Value { func toflt(v constant.Value) constant.Value { if v.Kind() == constant.Complex { if constant.Sign(constant.Imag(v)) != 0 { - yyerror("constant %v truncated to real", v) + base.Errorf("constant %v truncated to real", v) } v = constant.Real(v) } @@ -406,7 +407,7 @@ func toflt(v constant.Value) constant.Value { func toint(v constant.Value) constant.Value { if v.Kind() == constant.Complex { if constant.Sign(constant.Imag(v)) != 0 { - yyerror("constant %v truncated to integer", v) + base.Errorf("constant %v truncated to integer", v) } v = constant.Real(v) } @@ -426,14 +427,14 @@ func toint(v constant.Value) constant.Value { // (See issue #11371). f := bigFloatVal(v) if f.MantExp(nil) > 2*Mpprec { - yyerror("integer too large") + base.Errorf("integer too large") } else { var t big.Float t.Parse(fmt.Sprint(v), 0) if t.IsInt() { - yyerror("constant truncated to integer") + base.Errorf("constant truncated to integer") } else { - yyerror("constant %v truncated to integer", v) + base.Errorf("constant %v truncated to integer", v) } } @@ -470,7 +471,7 @@ func doesoverflow(v constant.Value, t *types.Type) bool { ft := floatForComplex(t) return doesoverflow(constant.Real(v), ft) || doesoverflow(constant.Imag(v), ft) } - Fatalf("doesoverflow: %v, %v", v, t) + base.Fatalf("doesoverflow: %v, %v", v, t) panic("unreachable") } @@ -483,11 +484,11 @@ func overflow(v constant.Value, t *types.Type) bool { return false } if v.Kind() == constant.Int && constant.BitLen(v) > Mpprec { - yyerror("integer too large") + base.Errorf("integer too large") return true } if doesoverflow(v, t) { - yyerror("constant %v overflows %v", vconv(v, 0), t) + base.Errorf("constant %v overflows %v", vconv(v, 0), t) return true } return false @@ -568,12 +569,12 @@ func evalConst(n *Node) *Node { // check for divisor underflow in complex division (see issue 20227) if op == ODIV && n.Type.IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 { - yyerror("complex division by zero") + base.Errorf("complex division by zero") n.Type = nil return n } if (op == ODIV || op == OMOD) && constant.Sign(rval) == 0 { - yyerror("division by zero") + base.Errorf("division by zero") n.Type = nil return n } @@ -596,7 +597,7 @@ func evalConst(n *Node) *Node { const shiftBound = 1023 - 1 + 52 s, ok := constant.Uint64Val(nr.Val()) if !ok || s > shiftBound { - yyerror("invalid shift count %v", nr) + base.Errorf("invalid shift count %v", nr) n.Type = nil break } @@ -702,7 +703,7 @@ func makeInt(i *big.Int) constant.Value { func makeFloat64(f float64) constant.Value { if math.IsInf(f, 0) { - Fatalf("infinity is not a valid constant") + base.Fatalf("infinity is not a valid constant") } v := constant.MakeFloat64(f) v = constant.ToFloat(v) // workaround #42641 (MakeFloat64(0).Kind() returns Int, not Float) @@ -732,7 +733,7 @@ var overflowNames = [...]string{ func origConst(n *Node, v constant.Value) *Node { lno := setlineno(n) v = convertVal(v, n.Type, false) - lineno = lno + base.Pos = lno switch v.Kind() { case constant.Int: @@ -743,9 +744,9 @@ func origConst(n *Node, v constant.Value) *Node { case constant.Unknown: what := overflowNames[n.Op] if what == "" { - Fatalf("unexpected overflow: %v", n.Op) + base.Fatalf("unexpected overflow: %v", n.Op) } - yyerrorl(n.Pos, "constant %v overflow", what) + base.ErrorfAt(n.Pos, "constant %v overflow", what) n.Type = nil return n } @@ -760,7 +761,7 @@ func origConst(n *Node, v constant.Value) *Node { func assertRepresents(t *types.Type, v constant.Value) { if !represents(t, v) { - Fatalf("%v does not represent %v", t, v) + base.Fatalf("%v does not represent %v", t, v) } } @@ -780,7 +781,7 @@ func represents(t *types.Type, v constant.Value) bool { return t.IsComplex() } - Fatalf("unexpected constant kind: %v", v) + base.Fatalf("unexpected constant kind: %v", v) panic("unreachable") } @@ -815,7 +816,7 @@ func idealType(ct constant.Kind) *types.Type { case constant.Complex: return types.UntypedComplex } - Fatalf("unexpected Ctype: %v", ct) + base.Fatalf("unexpected Ctype: %v", ct) return nil } @@ -876,7 +877,7 @@ func mixUntyped(t1, t2 *types.Type) *types.Type { case types.UntypedComplex: return 3 } - Fatalf("bad type %v", t) + base.Fatalf("bad type %v", t) panic("unreachable") } @@ -906,7 +907,7 @@ func defaultType(t *types.Type) *types.Type { return types.Types[TCOMPLEX128] } - Fatalf("bad type %v", t) + base.Fatalf("bad type %v", t) return nil } @@ -1023,7 +1024,7 @@ func (s *constSet) add(pos src.XPos, n *Node, what, where string) { return } if n.Type.IsUntyped() { - Fatalf("%v is untyped", n) + base.Fatalf("%v is untyped", n) } // Consts are only duplicates if they have the same value and @@ -1059,9 +1060,9 @@ func (s *constSet) add(pos src.XPos, n *Node, what, where string) { } if prevPos, isDup := s.m[k]; isDup { - yyerrorl(pos, "duplicate %s %s in %s\n\tprevious %s at %v", + base.ErrorfAt(pos, "duplicate %s %s in %s\n\tprevious %s at %v", what, nodeAndVal(n), where, - what, linestr(prevPos)) + what, base.FmtPos(prevPos)) } else { s.m[k] = pos } diff --git a/src/cmd/compile/internal/gc/dcl.go b/src/cmd/compile/internal/gc/dcl.go index 3f193e3a01..63a52a9f36 100644 --- a/src/cmd/compile/internal/gc/dcl.go +++ b/src/cmd/compile/internal/gc/dcl.go @@ -6,6 +6,7 @@ package gc import ( "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" "cmd/internal/src" @@ -20,7 +21,7 @@ var externdcl []*Node func testdclstack() { if !types.IsDclstackValid() { - Fatalf("mark left on the dclstack") + base.Fatalf("mark left on the dclstack") } } @@ -31,7 +32,7 @@ func redeclare(pos src.XPos, s *types.Sym, where string) { if pkg == nil { pkg = s.Pkg } - yyerrorl(pos, "%v redeclared %s\n"+ + base.ErrorfAt(pos, "%v redeclared %s\n"+ "\tprevious declaration during import %q", s, where, pkg.Path) } else { prevPos := s.Lastlineno @@ -44,8 +45,8 @@ func redeclare(pos src.XPos, s *types.Sym, where string) { pos, prevPos = prevPos, pos } - yyerrorl(pos, "%v redeclared %s\n"+ - "\tprevious declaration at %v", s, where, linestr(prevPos)) + base.ErrorfAt(pos, "%v redeclared %s\n"+ + "\tprevious declaration at %v", s, where, base.FmtPos(prevPos)) } } @@ -71,22 +72,22 @@ func declare(n *Node, ctxt Class) { // kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later. if !inimport && !typecheckok && s.Pkg != localpkg { - yyerrorl(n.Pos, "cannot declare name %v", s) + base.ErrorfAt(n.Pos, "cannot declare name %v", s) } gen := 0 if ctxt == PEXTERN { if s.Name == "init" { - yyerrorl(n.Pos, "cannot declare init - must be func") + base.ErrorfAt(n.Pos, "cannot declare init - must be func") } if s.Name == "main" && s.Pkg.Name == "main" { - yyerrorl(n.Pos, "cannot declare main - must be func") + base.ErrorfAt(n.Pos, "cannot declare main - must be func") } externdcl = append(externdcl, n) } else { if Curfn == nil && ctxt == PAUTO { - lineno = n.Pos - Fatalf("automatic outside function") + base.Pos = n.Pos + base.Fatalf("automatic outside function") } if Curfn != nil && ctxt != PFUNC { Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) @@ -115,7 +116,7 @@ func declare(n *Node, ctxt Class) { } s.Block = types.Block - s.Lastlineno = lineno + s.Lastlineno = base.Pos s.Def = asTypesNode(n) n.Name.Vargen = int32(gen) n.SetClass(ctxt) @@ -128,7 +129,7 @@ func declare(n *Node, ctxt Class) { func addvar(n *Node, t *types.Type, ctxt Class) { if n == nil || n.Sym == nil || (n.Op != ONAME && n.Op != ONONAME) || t == nil { - Fatalf("addvar: n=%v t=%v nil", n, t) + base.Fatalf("addvar: n=%v t=%v nil", n, t) } n.Op = ONAME @@ -165,7 +166,7 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node { var e *Node if doexpr { if len(el) == 0 { - yyerror("assignment mismatch: %d variables but %d values", len(vl), nel) + base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel) break } e = el[0] @@ -189,7 +190,7 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node { } if len(el) != 0 { - yyerror("assignment mismatch: %d variables but %d values", len(vl), nel) + base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel) } return init } @@ -197,7 +198,7 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node { // newnoname returns a new ONONAME Node associated with symbol s. func newnoname(s *types.Sym) *Node { if s == nil { - Fatalf("newnoname nil") + base.Fatalf("newnoname nil") } n := nod(ONONAME, nil, nil) n.Sym = s @@ -208,7 +209,7 @@ func newnoname(s *types.Sym) *Node { // newfuncnamel generates a new name node for a function or method. func newfuncnamel(pos src.XPos, s *types.Sym, fn *Func) *Node { if fn.Nname != nil { - Fatalf("newfuncnamel - already have name") + base.Fatalf("newfuncnamel - already have name") } n := newnamel(pos, s) n.Func = fn @@ -304,7 +305,7 @@ func importName(sym *types.Sym) *Node { n := oldname(sym) if !types.IsExported(sym.Name) && sym.Pkg != localpkg { n.SetDiag(true) - yyerror("cannot refer to unexported name %s.%s", sym.Pkg.Name, sym.Name) + base.Errorf("cannot refer to unexported name %s.%s", sym.Pkg.Name, sym.Name) } return n } @@ -336,13 +337,13 @@ func colasdefn(left []*Node, defn *Node) { continue } if !colasname(n) { - yyerrorl(defn.Pos, "non-name %v on left side of :=", n) + base.ErrorfAt(defn.Pos, "non-name %v on left side of :=", n) nerr++ continue } if !n.Sym.Uniq() { - yyerrorl(defn.Pos, "%v repeated on left side of :=", n.Sym) + base.ErrorfAt(defn.Pos, "%v repeated on left side of :=", n.Sym) n.SetDiag(true) nerr++ continue @@ -362,7 +363,7 @@ func colasdefn(left []*Node, defn *Node) { } if nnew == 0 && nerr == 0 { - yyerrorl(defn.Pos, "no new variables on left side of :=") + base.ErrorfAt(defn.Pos, "no new variables on left side of :=") } } @@ -370,11 +371,11 @@ func colasdefn(left []*Node, defn *Node) { // interface field declaration. func ifacedcl(n *Node) { if n.Op != ODCLFIELD || n.Left == nil { - Fatalf("ifacedcl") + base.Fatalf("ifacedcl") } if n.Sym.IsBlank() { - yyerror("methods must have a unique non-blank name") + base.Errorf("methods must have a unique non-blank name") } } @@ -399,7 +400,7 @@ func funchdr(n *Node) { func funcargs(nt *Node) { if nt.Op != OTFUNC { - Fatalf("funcargs %v", nt.Op) + base.Fatalf("funcargs %v", nt.Op) } // re-start the variable generation number @@ -449,7 +450,7 @@ func funcargs(nt *Node) { func funcarg(n *Node, ctxt Class) { if n.Op != ODCLFIELD { - Fatalf("funcarg %v", n.Op) + base.Fatalf("funcarg %v", n.Op) } if n.Sym == nil { return @@ -469,7 +470,7 @@ func funcarg(n *Node, ctxt Class) { // used functype directly to parse the function's type. func funcargs2(t *types.Type) { if t.Etype != TFUNC { - Fatalf("funcargs2 %v", t) + base.Fatalf("funcargs2 %v", t) } for _, f := range t.Recvs().Fields().Slice() { @@ -522,23 +523,23 @@ func checkembeddedtype(t *types.Type) { if t.Sym == nil && t.IsPtr() { t = t.Elem() if t.IsInterface() { - yyerror("embedded type cannot be a pointer to interface") + base.Errorf("embedded type cannot be a pointer to interface") } } if t.IsPtr() || t.IsUnsafePtr() { - yyerror("embedded type cannot be a pointer") + base.Errorf("embedded type cannot be a pointer") } else if t.Etype == TFORW && !t.ForwardType().Embedlineno.IsKnown() { - t.ForwardType().Embedlineno = lineno + t.ForwardType().Embedlineno = base.Pos } } func structfield(n *Node) *types.Field { - lno := lineno - lineno = n.Pos + lno := base.Pos + base.Pos = n.Pos if n.Op != ODCLFIELD { - Fatalf("structfield: oops %v\n", n) + base.Fatalf("structfield: oops %v\n", n) } if n.Left != nil { @@ -556,7 +557,7 @@ func structfield(n *Node) *types.Field { f.Note = constant.StringVal(n.Val()) } - lineno = lno + base.Pos = lno return f } @@ -570,7 +571,7 @@ func checkdupfields(what string, fss ...[]*types.Field) { continue } if seen[f.Sym] { - yyerrorl(f.Pos, "duplicate %s %s", what, f.Sym.Name) + base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name) continue } seen[f.Sym] = true @@ -631,15 +632,15 @@ func tofunargsfield(fields []*types.Field, funarg types.Funarg) *types.Type { } func interfacefield(n *Node) *types.Field { - lno := lineno - lineno = n.Pos + lno := base.Pos + base.Pos = n.Pos if n.Op != ODCLFIELD { - Fatalf("interfacefield: oops %v\n", n) + base.Fatalf("interfacefield: oops %v\n", n) } if n.HasVal() { - yyerror("interface method cannot have annotation") + base.Errorf("interface method cannot have annotation") } // MethodSpec = MethodName Signature | InterfaceTypeName . @@ -655,7 +656,7 @@ func interfacefield(n *Node) *types.Field { f := types.NewField(n.Pos, n.Sym, n.Type) - lineno = lno + base.Pos = lno return f } @@ -774,13 +775,13 @@ func methodSym(recv *types.Type, msym *types.Sym) *types.Sym { // start with a letter, number, or period. func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sym { if msym.IsBlank() { - Fatalf("blank method name") + base.Fatalf("blank method name") } rsym := recv.Sym if recv.IsPtr() { if rsym != nil { - Fatalf("declared pointer receiver type: %v", recv) + base.Fatalf("declared pointer receiver type: %v", recv) } rsym = recv.Elem().Sym } @@ -824,13 +825,13 @@ func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sy // Returns a pointer to the existing or added Field; or nil if there's an error. func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field { if msym == nil { - Fatalf("no method symbol") + base.Fatalf("no method symbol") } // get parent type sym rf := t.Recv() // ptr to this structure if rf == nil { - yyerror("missing receiver") + base.Errorf("missing receiver") return nil } @@ -840,7 +841,7 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) t := pa if t != nil && t.IsPtr() { if t.Sym != nil { - yyerror("invalid receiver type %v (%v is a pointer type)", pa, t) + base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t) return nil } t = t.Elem() @@ -850,21 +851,21 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) case t == nil || t.Broke(): // rely on typecheck having complained before case t.Sym == nil: - yyerror("invalid receiver type %v (%v is not a defined type)", pa, t) + base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t) case t.IsPtr(): - yyerror("invalid receiver type %v (%v is a pointer type)", pa, t) + base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t) case t.IsInterface(): - yyerror("invalid receiver type %v (%v is an interface type)", pa, t) + base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t) default: // Should have picked off all the reasons above, // but just in case, fall back to generic error. - yyerror("invalid receiver type %v (%L / %L)", pa, pa, t) + base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t) } return nil } if local && mt.Sym.Pkg != localpkg { - yyerror("cannot define new methods on non-local type %v", mt) + base.Errorf("cannot define new methods on non-local type %v", mt) return nil } @@ -875,7 +876,7 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) if mt.IsStruct() { for _, f := range mt.Fields().Slice() { if f.Sym == msym { - yyerror("type %v has both field and method named %v", mt, msym) + base.Errorf("type %v has both field and method named %v", mt, msym) f.SetBroke(true) return nil } @@ -889,12 +890,12 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) // types.Identical only checks that incoming and result parameters match, // so explicitly check that the receiver parameters match too. if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) { - yyerror("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t) + base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t) } return f } - f := types.NewField(lineno, msym, t) + f := types.NewField(base.Pos, msym, t) f.Nname = asTypesNode(n.Func.Nname) f.SetNointerface(nointerface) @@ -923,7 +924,7 @@ func funcsym(s *types.Sym) *types.Sym { // When dynamically linking, the necessary function // symbols will be created explicitly with makefuncsym. // See the makefuncsym comment for details. - if !Ctxt.Flag_dynlink && !existed { + if !base.Ctxt.Flag_dynlink && !existed { funcsyms = append(funcsyms, s) } funcsymsmu.Unlock() @@ -940,13 +941,13 @@ func funcsym(s *types.Sym) *types.Sym { // So instead, when dynamic linking, we only create // the s·f stubs in s's package. func makefuncsym(s *types.Sym) { - if !Ctxt.Flag_dynlink { - Fatalf("makefuncsym dynlink") + if !base.Ctxt.Flag_dynlink { + base.Fatalf("makefuncsym dynlink") } if s.IsBlank() { return } - if Flag.CompilingRuntime && (s.Name == "getg" || s.Name == "getclosureptr" || s.Name == "getcallerpc" || s.Name == "getcallersp") { + if base.Flag.CompilingRuntime && (s.Name == "getg" || s.Name == "getclosureptr" || s.Name == "getcallerpc" || s.Name == "getcallersp") { // runtime.getg(), getclosureptr(), getcallerpc(), and // getcallersp() are not real functions and so do not // get funcsyms. @@ -960,7 +961,7 @@ func makefuncsym(s *types.Sym) { // setNodeNameFunc marks a node as a function. func setNodeNameFunc(n *Node) { if n.Op != ONAME || n.Class() != Pxxx { - Fatalf("expected ONAME/Pxxx node, got %v", n) + base.Fatalf("expected ONAME/Pxxx node, got %v", n) } n.SetClass(PFUNC) @@ -969,11 +970,11 @@ func setNodeNameFunc(n *Node) { func dclfunc(sym *types.Sym, tfn *Node) *Node { if tfn.Op != OTFUNC { - Fatalf("expected OTFUNC node, got %v", tfn) + base.Fatalf("expected OTFUNC node, got %v", tfn) } fn := nod(ODCLFUNC, nil, nil) - fn.Func.Nname = newfuncnamel(lineno, sym, fn.Func) + fn.Func.Nname = newfuncnamel(base.Pos, sym, fn.Func) fn.Func.Nname.Name.Defn = fn fn.Func.Nname.Name.Param.Ntype = tfn setNodeNameFunc(fn.Func.Nname) @@ -1045,10 +1046,10 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *Node) bool { case OCLOSURE: callee = arg.Func.Decl default: - Fatalf("expected ONAME or OCLOSURE node, got %+v", arg) + base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg) } if callee.Op != ODCLFUNC { - Fatalf("expected ODCLFUNC node, got %+v", callee) + base.Fatalf("expected ODCLFUNC node, got %+v", callee) } c.extraCalls[c.curfn] = append(c.extraCalls[c.curfn], nowritebarrierrecCall{callee, n.Pos}) return true @@ -1064,7 +1065,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *Node) bool { // This can be called concurrently for different from Nodes. func (c *nowritebarrierrecChecker) recordCall(from *Node, to *obj.LSym, pos src.XPos) { if from.Op != ODCLFUNC { - Fatalf("expected ODCLFUNC, got %v", from) + base.Fatalf("expected ODCLFUNC, got %v", from) } // We record this information on the *Func so this is // concurrent-safe. @@ -1105,7 +1106,7 @@ func (c *nowritebarrierrecChecker) check() { } // Check go:nowritebarrier functions. if n.Func.Pragma&Nowritebarrier != 0 && n.Func.WBPos.IsKnown() { - yyerrorl(n.Func.WBPos, "write barrier prohibited") + base.ErrorfAt(n.Func.WBPos, "write barrier prohibited") } } @@ -1133,10 +1134,10 @@ func (c *nowritebarrierrecChecker) check() { var err bytes.Buffer call := funcs[fn] for call.target != nil { - fmt.Fprintf(&err, "\n\t%v: called by %v", linestr(call.lineno), call.target.Func.Nname) + fmt.Fprintf(&err, "\n\t%v: called by %v", base.FmtPos(call.lineno), call.target.Func.Nname) call = funcs[call.target] } - yyerrorl(fn.Func.WBPos, "write barrier prohibited by caller; %v%s", fn.Func.Nname, err.String()) + base.ErrorfAt(fn.Func.WBPos, "write barrier prohibited by caller; %v%s", fn.Func.Nname, err.String()) continue } diff --git a/src/cmd/compile/internal/gc/dump.go b/src/cmd/compile/internal/gc/dump.go index 29eb1c1e48..56dc474465 100644 --- a/src/cmd/compile/internal/gc/dump.go +++ b/src/cmd/compile/internal/gc/dump.go @@ -9,6 +9,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "fmt" @@ -146,7 +147,7 @@ func (p *dumper) dump(x reflect.Value, depth int) { x = reflect.ValueOf(v.Slice()) case src.XPos: - p.printf("%s", linestr(v)) + p.printf("%s", base.FmtPos(v)) return case *types.Node: diff --git a/src/cmd/compile/internal/gc/dwinl.go b/src/cmd/compile/internal/gc/dwinl.go index edde7a4cc5..5da2871748 100644 --- a/src/cmd/compile/internal/gc/dwinl.go +++ b/src/cmd/compile/internal/gc/dwinl.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/internal/dwarf" "cmd/internal/obj" "cmd/internal/src" @@ -26,8 +27,8 @@ type varPos struct { func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls { var inlcalls dwarf.InlCalls - if Debug.DwarfInl != 0 { - Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name) + if base.Debug.DwarfInl != 0 { + base.Ctxt.Logf("assembling DWARF inlined routine info for %v\n", fnsym.Name) } // This maps inline index (from Ctxt.InlTree) to index in inlcalls.Calls @@ -106,7 +107,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls { } m = makePreinlineDclMap(fnsym) } else { - ifnlsym := Ctxt.InlTree.InlinedFunction(int(ii - 1)) + ifnlsym := base.Ctxt.InlTree.InlinedFunction(int(ii - 1)) m = makePreinlineDclMap(ifnlsym) } @@ -181,7 +182,7 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls { } // Debugging - if Debug.DwarfInl != 0 { + if base.Debug.DwarfInl != 0 { dumpInlCalls(inlcalls) dumpInlVars(dwVars) } @@ -205,15 +206,15 @@ func assembleInlines(fnsym *obj.LSym, dwVars []*dwarf.Var) dwarf.InlCalls { // abstract function DIE for an inlined routine imported from a // previously compiled package. func genAbstractFunc(fn *obj.LSym) { - ifn := Ctxt.DwFixups.GetPrecursorFunc(fn) + ifn := base.Ctxt.DwFixups.GetPrecursorFunc(fn) if ifn == nil { - Ctxt.Diag("failed to locate precursor fn for %v", fn) + base.Ctxt.Diag("failed to locate precursor fn for %v", fn) return } - if Debug.DwarfInl != 0 { - Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name) + if base.Debug.DwarfInl != 0 { + base.Ctxt.Logf("DwarfAbstractFunc(%v)\n", fn.Name) } - Ctxt.DwarfAbstractFunc(ifn, fn, Ctxt.Pkgpath) + base.Ctxt.DwarfAbstractFunc(ifn, fn, base.Ctxt.Pkgpath) } // Undo any versioning performed when a name was written @@ -235,7 +236,7 @@ func makePreinlineDclMap(fnsym *obj.LSym) map[varPos]int { dcl := preInliningDcls(fnsym) m := make(map[varPos]int) for i, n := range dcl { - pos := Ctxt.InnermostPos(n.Pos) + pos := base.Ctxt.InnermostPos(n.Pos) vp := varPos{ DeclName: unversion(n.Sym.Name), DeclFile: pos.RelFilename(), @@ -243,7 +244,7 @@ func makePreinlineDclMap(fnsym *obj.LSym) map[varPos]int { DeclCol: pos.Col(), } if _, found := m[vp]; found { - Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name) + base.Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name) } m[vp] = i } @@ -260,17 +261,17 @@ func insertInlCall(dwcalls *dwarf.InlCalls, inlIdx int, imap map[int]int) int { // is one. We do this first so that parents appear before their // children in the resulting table. parCallIdx := -1 - parInlIdx := Ctxt.InlTree.Parent(inlIdx) + parInlIdx := base.Ctxt.InlTree.Parent(inlIdx) if parInlIdx >= 0 { parCallIdx = insertInlCall(dwcalls, parInlIdx, imap) } // Create new entry for this inline - inlinedFn := Ctxt.InlTree.InlinedFunction(inlIdx) - callXPos := Ctxt.InlTree.CallPos(inlIdx) - absFnSym := Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn) - pb := Ctxt.PosTable.Pos(callXPos).Base() - callFileSym := Ctxt.Lookup(pb.SymFilename()) + inlinedFn := base.Ctxt.InlTree.InlinedFunction(inlIdx) + callXPos := base.Ctxt.InlTree.CallPos(inlIdx) + absFnSym := base.Ctxt.DwFixups.AbsFuncDwarfSym(inlinedFn) + pb := base.Ctxt.PosTable.Pos(callXPos).Base() + callFileSym := base.Ctxt.Lookup(pb.SymFilename()) ic := dwarf.InlCall{ InlIndex: inlIdx, CallFile: callFileSym, @@ -298,7 +299,7 @@ func insertInlCall(dwcalls *dwarf.InlCalls, inlIdx int, imap map[int]int) int { // the index for a node from the inlined body of D will refer to the // call to D from C. Whew. func posInlIndex(xpos src.XPos) int { - pos := Ctxt.PosTable.Pos(xpos) + pos := base.Ctxt.PosTable.Pos(xpos) if b := pos.Base(); b != nil { ii := b.InliningIndex() if ii >= 0 { @@ -324,7 +325,7 @@ func addRange(calls []dwarf.InlCall, start, end int64, ii int, imap map[int]int) // Append range to correct inlined call callIdx, found := imap[ii] if !found { - Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start) + base.Fatalf("can't find inlIndex %d in imap for prog at %d\n", ii, start) } call := &calls[callIdx] call.Ranges = append(call.Ranges, dwarf.Range{Start: start, End: end}) @@ -332,23 +333,23 @@ func addRange(calls []dwarf.InlCall, start, end int64, ii int, imap map[int]int) func dumpInlCall(inlcalls dwarf.InlCalls, idx, ilevel int) { for i := 0; i < ilevel; i++ { - Ctxt.Logf(" ") + base.Ctxt.Logf(" ") } ic := inlcalls.Calls[idx] - callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex) - Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name) + callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex) + base.Ctxt.Logf(" %d: II:%d (%s) V: (", idx, ic.InlIndex, callee.Name) for _, f := range ic.InlVars { - Ctxt.Logf(" %v", f.Name) + base.Ctxt.Logf(" %v", f.Name) } - Ctxt.Logf(" ) C: (") + base.Ctxt.Logf(" ) C: (") for _, k := range ic.Children { - Ctxt.Logf(" %v", k) + base.Ctxt.Logf(" %v", k) } - Ctxt.Logf(" ) R:") + base.Ctxt.Logf(" ) R:") for _, r := range ic.Ranges { - Ctxt.Logf(" [%d,%d)", r.Start, r.End) + base.Ctxt.Logf(" [%d,%d)", r.Start, r.End) } - Ctxt.Logf("\n") + base.Ctxt.Logf("\n") for _, k := range ic.Children { dumpInlCall(inlcalls, k, ilevel+1) } @@ -373,7 +374,7 @@ func dumpInlVars(dwvars []*dwarf.Var) { if dwv.IsInAbstract { ia = 1 } - Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ) + base.Ctxt.Logf("V%d: %s CI:%d II:%d IA:%d %s\n", i, dwv.Name, dwv.ChildIndex, dwv.InlIndex-1, ia, typ) } } @@ -410,7 +411,7 @@ func checkInlCall(funcName string, inlCalls dwarf.InlCalls, funcSize int64, idx, // Callee ic := inlCalls.Calls[idx] - callee := Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name + callee := base.Ctxt.InlTree.InlinedFunction(ic.InlIndex).Name calleeRanges := ic.Ranges // Caller @@ -418,14 +419,14 @@ func checkInlCall(funcName string, inlCalls dwarf.InlCalls, funcSize int64, idx, parentRanges := []dwarf.Range{dwarf.Range{Start: int64(0), End: funcSize}} if parentIdx != -1 { pic := inlCalls.Calls[parentIdx] - caller = Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name + caller = base.Ctxt.InlTree.InlinedFunction(pic.InlIndex).Name parentRanges = pic.Ranges } // Callee ranges contained in caller ranges? c, m := rangesContainsAll(parentRanges, calleeRanges) if !c { - Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m) + base.Fatalf("** malformed inlined routine range in %s: caller %s callee %s II=%d %s\n", funcName, caller, callee, idx, m) } // Now visit kids diff --git a/src/cmd/compile/internal/gc/embed.go b/src/cmd/compile/internal/gc/embed.go index 5559d62813..f6c1b7cdcc 100644 --- a/src/cmd/compile/internal/gc/embed.go +++ b/src/cmd/compile/internal/gc/embed.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/syntax" "cmd/compile/internal/types" "cmd/internal/obj" @@ -43,30 +44,30 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma pos := embeds[0].Pos if !haveEmbed { - p.yyerrorpos(pos, "invalid go:embed: missing import \"embed\"") + p.errorAt(pos, "invalid go:embed: missing import \"embed\"") return exprs } - if Flag.Cfg.Embed.Patterns == nil { - p.yyerrorpos(pos, "invalid go:embed: build system did not supply embed configuration") + if base.Flag.Cfg.Embed.Patterns == nil { + p.errorAt(pos, "invalid go:embed: build system did not supply embed configuration") return exprs } if len(names) > 1 { - p.yyerrorpos(pos, "go:embed cannot apply to multiple vars") + p.errorAt(pos, "go:embed cannot apply to multiple vars") return exprs } if len(exprs) > 0 { - p.yyerrorpos(pos, "go:embed cannot apply to var with initializer") + p.errorAt(pos, "go:embed cannot apply to var with initializer") return exprs } if typ == nil { // Should not happen, since len(exprs) == 0 now. - p.yyerrorpos(pos, "go:embed cannot apply to var without type") + p.errorAt(pos, "go:embed cannot apply to var without type") return exprs } kind := embedKindApprox(typ) if kind == embedUnknown { - p.yyerrorpos(pos, "go:embed cannot apply to var of type %v", typ) + p.errorAt(pos, "go:embed cannot apply to var of type %v", typ) return exprs } @@ -75,13 +76,13 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma var list []string for _, e := range embeds { for _, pattern := range e.Patterns { - files, ok := Flag.Cfg.Embed.Patterns[pattern] + files, ok := base.Flag.Cfg.Embed.Patterns[pattern] if !ok { - p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern) + p.errorAt(e.Pos, "invalid go:embed: build system did not map pattern: %s", pattern) } for _, file := range files { - if Flag.Cfg.Embed.Files[file] == "" { - p.yyerrorpos(e.Pos, "invalid go:embed: build system did not map file: %s", file) + if base.Flag.Cfg.Embed.Files[file] == "" { + p.errorAt(e.Pos, "invalid go:embed: build system did not map file: %s", file) continue } if !have[file] { @@ -103,7 +104,7 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma if kind == embedString || kind == embedBytes { if len(list) > 1 { - p.yyerrorpos(pos, "invalid go:embed: multiple files for type %v", typ) + p.errorAt(pos, "invalid go:embed: multiple files for type %v", typ) return exprs } } @@ -129,7 +130,7 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma // can't tell whether "string" and "byte" really mean "string" and "byte". // The result must be confirmed later, after type checking, using embedKind. func embedKindApprox(typ *Node) int { - if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && Ctxt.Pkgpath == "embed")) { + if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) { return embedFiles } // These are not guaranteed to match only string and []byte - @@ -147,7 +148,7 @@ func embedKindApprox(typ *Node) int { // embedKind determines the kind of embedding variable. func embedKind(typ *types.Type) int { - if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && Ctxt.Pkgpath == "embed")) { + if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) { return embedFiles } if typ == types.Types[TSTRING] { @@ -194,13 +195,13 @@ func initEmbed(v *Node) { files := v.Name.Param.EmbedFiles() switch kind := embedKind(v.Type); kind { case embedUnknown: - yyerrorl(v.Pos, "go:embed cannot apply to var of type %v", v.Type) + base.ErrorfAt(v.Pos, "go:embed cannot apply to var of type %v", v.Type) case embedString, embedBytes: file := files[0] - fsym, size, err := fileStringSym(v.Pos, Flag.Cfg.Embed.Files[file], kind == embedString, nil) + fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], kind == embedString, nil) if err != nil { - yyerrorl(v.Pos, "embed %s: %v", file, err) + base.ErrorfAt(v.Pos, "embed %s: %v", file, err) } sym := v.Sym.Linksym() off := 0 @@ -211,7 +212,7 @@ func initEmbed(v *Node) { } case embedFiles: - slicedata := Ctxt.Lookup(`"".` + v.Sym.Name + `.files`) + slicedata := base.Ctxt.Lookup(`"".` + v.Sym.Name + `.files`) off := 0 // []files pointed at by Files off = dsymptr(slicedata, off, slicedata, 3*Widthptr) // []file, pointing just past slice @@ -234,13 +235,13 @@ func initEmbed(v *Node) { off = duintptr(slicedata, off, 0) off += hashSize } else { - fsym, size, err := fileStringSym(v.Pos, Flag.Cfg.Embed.Files[file], true, hash) + fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], true, hash) if err != nil { - yyerrorl(v.Pos, "embed %s: %v", file, err) + base.ErrorfAt(v.Pos, "embed %s: %v", file, err) } off = dsymptr(slicedata, off, fsym, 0) // data string off = duintptr(slicedata, off, uint64(size)) - off = int(slicedata.WriteBytes(Ctxt, int64(off), hash)) + off = int(slicedata.WriteBytes(base.Ctxt, int64(off), hash)) } } ggloblsym(slicedata, int32(off), obj.RODATA|obj.LOCAL) diff --git a/src/cmd/compile/internal/gc/esc.go b/src/cmd/compile/internal/gc/esc.go index 74b85e1ae8..5cf8c4a1c6 100644 --- a/src/cmd/compile/internal/gc/esc.go +++ b/src/cmd/compile/internal/gc/esc.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "fmt" ) @@ -263,11 +264,11 @@ func addrescapes(n *Node) { Curfn = Curfn.Func.Decl panic("can't happen") } - ln := lineno - lineno = Curfn.Pos + ln := base.Pos + base.Pos = Curfn.Pos moveToHeap(n) Curfn = oldfn - lineno = ln + base.Pos = ln // ODOTPTR has already been introduced, // so these are the non-pointer ODOT and OINDEX. @@ -283,15 +284,15 @@ func addrescapes(n *Node) { // moveToHeap records the parameter or local variable n as moved to the heap. func moveToHeap(n *Node) { - if Flag.LowerR != 0 { + if base.Flag.LowerR != 0 { Dump("MOVE", n) } - if Flag.CompilingRuntime { - yyerror("%v escapes to heap, not allowed in runtime", n) + if base.Flag.CompilingRuntime { + base.Errorf("%v escapes to heap, not allowed in runtime", n) } if n.Class() == PAUTOHEAP { Dump("n", n) - Fatalf("double move to heap") + base.Fatalf("double move to heap") } // Allocate a local stack variable to hold the pointer to the heap copy. @@ -311,7 +312,7 @@ func moveToHeap(n *Node) { // the function. if n.Class() == PPARAM || n.Class() == PPARAMOUT { if n.Xoffset == BADWIDTH { - Fatalf("addrescapes before param assignment") + base.Fatalf("addrescapes before param assignment") } // We rewrite n below to be a heap variable (indirection of heapaddr). @@ -350,7 +351,7 @@ func moveToHeap(n *Node) { } } if !found { - Fatalf("cannot find %v in local variable list", n) + base.Fatalf("cannot find %v in local variable list", n) } Curfn.Func.Dcl = append(Curfn.Func.Dcl, n) } @@ -360,8 +361,8 @@ func moveToHeap(n *Node) { n.Xoffset = 0 n.Name.Param.Heapaddr = heapaddr n.Esc = EscHeap - if Flag.LowerM != 0 { - Warnl(n.Pos, "moved to heap: %v", n) + if base.Flag.LowerM != 0 { + base.WarnfAt(n.Pos, "moved to heap: %v", n) } } @@ -390,8 +391,8 @@ func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string { // but we are reusing the ability to annotate an individual function // argument and pass those annotations along to importing code. if f.Type.IsUintptr() { - if Flag.LowerM != 0 { - Warnl(f.Pos, "assuming %v is unsafe uintptr", name()) + if base.Flag.LowerM != 0 { + base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name()) } return unsafeUintptrTag } @@ -405,12 +406,12 @@ func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string { // External functions are assumed unsafe, unless // //go:noescape is given before the declaration. if fn.Func.Pragma&Noescape != 0 { - if Flag.LowerM != 0 && f.Sym != nil { - Warnl(f.Pos, "%v does not escape", name()) + if base.Flag.LowerM != 0 && f.Sym != nil { + base.WarnfAt(f.Pos, "%v does not escape", name()) } } else { - if Flag.LowerM != 0 && f.Sym != nil { - Warnl(f.Pos, "leaking param: %v", name()) + if base.Flag.LowerM != 0 && f.Sym != nil { + base.WarnfAt(f.Pos, "leaking param: %v", name()) } esc.AddHeap(0) } @@ -420,15 +421,15 @@ func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string { if fn.Func.Pragma&UintptrEscapes != 0 { if f.Type.IsUintptr() { - if Flag.LowerM != 0 { - Warnl(f.Pos, "marking %v as escaping uintptr", name()) + if base.Flag.LowerM != 0 { + base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name()) } return uintptrEscapesTag } if f.IsDDD() && f.Type.Elem().IsUintptr() { // final argument is ...uintptr. - if Flag.LowerM != 0 { - Warnl(f.Pos, "marking %v as escaping ...uintptr", name()) + if base.Flag.LowerM != 0 { + base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name()) } return uintptrEscapesTag } @@ -449,22 +450,22 @@ func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string { esc := loc.paramEsc esc.Optimize() - if Flag.LowerM != 0 && !loc.escapes { + if base.Flag.LowerM != 0 && !loc.escapes { if esc.Empty() { - Warnl(f.Pos, "%v does not escape", name()) + base.WarnfAt(f.Pos, "%v does not escape", name()) } if x := esc.Heap(); x >= 0 { if x == 0 { - Warnl(f.Pos, "leaking param: %v", name()) + base.WarnfAt(f.Pos, "leaking param: %v", name()) } else { // TODO(mdempsky): Mention level=x like below? - Warnl(f.Pos, "leaking param content: %v", name()) + base.WarnfAt(f.Pos, "leaking param content: %v", name()) } } for i := 0; i < numEscResults; i++ { if x := esc.Result(i); x >= 0 { res := fn.Type.Results().Field(i).Sym - Warnl(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x) + base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x) } } } diff --git a/src/cmd/compile/internal/gc/escape.go b/src/cmd/compile/internal/gc/escape.go index 27645fb888..aaf768d85a 100644 --- a/src/cmd/compile/internal/gc/escape.go +++ b/src/cmd/compile/internal/gc/escape.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/logopt" "cmd/compile/internal/types" "cmd/internal/src" @@ -180,7 +181,7 @@ func escFmt(n *Node, short bool) string { func escapeFuncs(fns []*Node, recursive bool) { for _, fn := range fns { if fn.Op != ODCLFUNC { - Fatalf("unexpected node: %v", fn) + base.Fatalf("unexpected node: %v", fn) } } @@ -202,10 +203,10 @@ func escapeFuncs(fns []*Node, recursive bool) { func (e *Escape) initFunc(fn *Node) { if fn.Op != ODCLFUNC || fn.Esc != EscFuncUnknown { - Fatalf("unexpected node: %v", fn) + base.Fatalf("unexpected node: %v", fn) } fn.Esc = EscFuncPlanned - if Flag.LowerM > 3 { + if base.Flag.LowerM > 3 { Dump("escAnalyze", fn) } @@ -279,18 +280,18 @@ func (e *Escape) stmt(n *Node) { lno := setlineno(n) defer func() { - lineno = lno + base.Pos = lno }() - if Flag.LowerM > 2 { - fmt.Printf("%v:[%d] %v stmt: %v\n", linestr(lineno), e.loopDepth, funcSym(e.curfn), n) + if base.Flag.LowerM > 2 { + fmt.Printf("%v:[%d] %v stmt: %v\n", base.FmtPos(base.Pos), e.loopDepth, funcSym(e.curfn), n) } e.stmts(n.Ninit) switch n.Op { default: - Fatalf("unexpected stmt: %v", n) + base.Fatalf("unexpected stmt: %v", n) case ODCLCONST, ODCLTYPE, OEMPTY, OFALL, OINLMARK: // nop @@ -310,16 +311,16 @@ func (e *Escape) stmt(n *Node) { case OLABEL: switch asNode(n.Sym.Label) { case nonlooping: - if Flag.LowerM > 2 { - fmt.Printf("%v:%v non-looping label\n", linestr(lineno), n) + if base.Flag.LowerM > 2 { + fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n) } case looping: - if Flag.LowerM > 2 { - fmt.Printf("%v: %v looping label\n", linestr(lineno), n) + if base.Flag.LowerM > 2 { + fmt.Printf("%v: %v looping label\n", base.FmtPos(base.Pos), n) } e.loopDepth++ default: - Fatalf("label missing tag") + base.Fatalf("label missing tag") } n.Sym.Label = nil @@ -460,7 +461,7 @@ func (e *Escape) exprSkipInit(k EscHole, n *Node) { lno := setlineno(n) defer func() { - lineno = lno + base.Pos = lno }() uintptrEscapesHack := k.uintptrEscapesHack @@ -474,7 +475,7 @@ func (e *Escape) exprSkipInit(k EscHole, n *Node) { switch n.Op { default: - Fatalf("unexpected expr: %v", n) + base.Fatalf("unexpected expr: %v", n) case OLITERAL, ONIL, OGETG, OCLOSUREVAR, OTYPE, OMETHEXPR: // nop @@ -653,7 +654,7 @@ func (e *Escape) exprSkipInit(k EscHole, n *Node) { // for conversions from an unsafe.Pointer. func (e *Escape) unsafeValue(k EscHole, n *Node) { if n.Type.Etype != TUINTPTR { - Fatalf("unexpected type %v for %v", n.Type, n) + base.Fatalf("unexpected type %v for %v", n.Type, n) } e.stmts(n.Ninit) @@ -711,7 +712,7 @@ func (e *Escape) addr(n *Node) EscHole { switch n.Op { default: - Fatalf("unexpected addr: %v", n) + base.Fatalf("unexpected addr: %v", n) case ONAME: if n.Class() == PEXTERN { break @@ -752,8 +753,8 @@ func (e *Escape) addrs(l Nodes) []EscHole { func (e *Escape) assign(dst, src *Node, why string, where *Node) { // Filter out some no-op assignments for escape analysis. ignore := dst != nil && src != nil && isSelfAssign(dst, src) - if ignore && Flag.LowerM != 0 { - Warnl(where.Pos, "%v ignoring self-assignment in %S", funcSym(e.curfn), where) + if ignore && base.Flag.LowerM != 0 { + base.WarnfAt(where.Pos, "%v ignoring self-assignment in %S", funcSym(e.curfn), where) } k := e.addr(dst) @@ -797,7 +798,7 @@ func (e *Escape) call(ks []EscHole, call, where *Node) { switch call.Op { default: - Fatalf("unexpected call op: %v", call.Op) + base.Fatalf("unexpected call op: %v", call.Op) case OCALLFUNC, OCALLMETH, OCALLINTER: fixVariadicCall(call) @@ -936,7 +937,7 @@ func (e *Escape) tagHole(ks []EscHole, fn *Node, param *types.Field) EscHole { func (e *Escape) inMutualBatch(fn *Node) bool { if fn.Name.Defn != nil && fn.Name.Defn.Esc < EscFuncTagged { if fn.Name.Defn.Esc == EscFuncUnknown { - Fatalf("graph inconsistency") + base.Fatalf("graph inconsistency") } return true } @@ -964,9 +965,9 @@ type EscNote struct { func (k EscHole) note(where *Node, why string) EscHole { if where == nil || why == "" { - Fatalf("note: missing where/why") + base.Fatalf("note: missing where/why") } - if Flag.LowerM >= 2 || logopt.Enabled() { + if base.Flag.LowerM >= 2 || logopt.Enabled() { k.notes = &EscNote{ next: k.notes, where: where, @@ -979,7 +980,7 @@ func (k EscHole) note(where *Node, why string) EscHole { func (k EscHole) shift(delta int) EscHole { k.derefs += delta if k.derefs < -1 { - Fatalf("derefs underflow: %v", k.derefs) + base.Fatalf("derefs underflow: %v", k.derefs) } return k } @@ -1016,7 +1017,7 @@ func (e *Escape) teeHole(ks ...EscHole) EscHole { // *ltmp" and "l2 = ltmp" and return "ltmp = &_" // instead. if k.derefs < 0 { - Fatalf("teeHole: negative derefs") + base.Fatalf("teeHole: negative derefs") } e.flow(k, loc) @@ -1054,7 +1055,7 @@ func canonicalNode(n *Node) *Node { if n != nil && n.Op == ONAME && n.Name.IsClosureVar() { n = n.Name.Defn if n.Name.IsClosureVar() { - Fatalf("still closure var") + base.Fatalf("still closure var") } } @@ -1063,10 +1064,10 @@ func canonicalNode(n *Node) *Node { func (e *Escape) newLoc(n *Node, transient bool) *EscLocation { if e.curfn == nil { - Fatalf("e.curfn isn't set") + base.Fatalf("e.curfn isn't set") } if n != nil && n.Type != nil && n.Type.NotInHeap() { - yyerrorl(n.Pos, "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type) + base.ErrorfAt(n.Pos, "%v is incomplete (or unallocatable); stack allocation disallowed", n.Type) } n = canonicalNode(n) @@ -1079,11 +1080,11 @@ func (e *Escape) newLoc(n *Node, transient bool) *EscLocation { e.allLocs = append(e.allLocs, loc) if n != nil { if n.Op == ONAME && n.Name.Curfn != e.curfn { - Fatalf("curfn mismatch: %v != %v", n.Name.Curfn, e.curfn) + base.Fatalf("curfn mismatch: %v != %v", n.Name.Curfn, e.curfn) } if n.HasOpt() { - Fatalf("%v already has a location", n) + base.Fatalf("%v already has a location", n) } n.SetOpt(loc) @@ -1112,9 +1113,9 @@ func (e *Escape) flow(k EscHole, src *EscLocation) { return } if dst.escapes && k.derefs < 0 { // dst = &src - if Flag.LowerM >= 2 || logopt.Enabled() { - pos := linestr(src.n.Pos) - if Flag.LowerM >= 2 { + if base.Flag.LowerM >= 2 || logopt.Enabled() { + pos := base.FmtPos(src.n.Pos) + if base.Flag.LowerM >= 2 { fmt.Printf("%s: %v escapes to heap:\n", pos, src.n) } explanation := e.explainFlow(pos, dst, src, k.derefs, k.notes, []*logopt.LoggedOpt{}) @@ -1214,9 +1215,9 @@ func (e *Escape) walkOne(root *EscLocation, walkgen uint32, enqueue func(*EscLoc // that value flow for tagging the function // later. if l.isName(PPARAM) { - if (logopt.Enabled() || Flag.LowerM >= 2) && !l.escapes { - if Flag.LowerM >= 2 { - fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", linestr(l.n.Pos), l.n, e.explainLoc(root), derefs) + if (logopt.Enabled() || base.Flag.LowerM >= 2) && !l.escapes { + if base.Flag.LowerM >= 2 { + fmt.Printf("%s: parameter %v leaks to %s with derefs=%d:\n", base.FmtPos(l.n.Pos), l.n, e.explainLoc(root), derefs) } explanation := e.explainPath(root, l) if logopt.Enabled() { @@ -1231,9 +1232,9 @@ func (e *Escape) walkOne(root *EscLocation, walkgen uint32, enqueue func(*EscLoc // outlives it, then l needs to be heap // allocated. if addressOf && !l.escapes { - if logopt.Enabled() || Flag.LowerM >= 2 { - if Flag.LowerM >= 2 { - fmt.Printf("%s: %v escapes to heap:\n", linestr(l.n.Pos), l.n) + if logopt.Enabled() || base.Flag.LowerM >= 2 { + if base.Flag.LowerM >= 2 { + fmt.Printf("%s: %v escapes to heap:\n", base.FmtPos(l.n.Pos), l.n) } explanation := e.explainPath(root, l) if logopt.Enabled() { @@ -1265,12 +1266,12 @@ func (e *Escape) walkOne(root *EscLocation, walkgen uint32, enqueue func(*EscLoc // explainPath prints an explanation of how src flows to the walk root. func (e *Escape) explainPath(root, src *EscLocation) []*logopt.LoggedOpt { visited := make(map[*EscLocation]bool) - pos := linestr(src.n.Pos) + pos := base.FmtPos(src.n.Pos) var explanation []*logopt.LoggedOpt for { // Prevent infinite loop. if visited[src] { - if Flag.LowerM >= 2 { + if base.Flag.LowerM >= 2 { fmt.Printf("%s: warning: truncated explanation due to assignment cycle; see golang.org/issue/35518\n", pos) } break @@ -1279,7 +1280,7 @@ func (e *Escape) explainPath(root, src *EscLocation) []*logopt.LoggedOpt { dst := src.dst edge := &dst.edges[src.dstEdgeIdx] if edge.src != src { - Fatalf("path inconsistency: %v != %v", edge.src, src) + base.Fatalf("path inconsistency: %v != %v", edge.src, src) } explanation = e.explainFlow(pos, dst, src, edge.derefs, edge.notes, explanation) @@ -1298,7 +1299,7 @@ func (e *Escape) explainFlow(pos string, dst, srcloc *EscLocation, derefs int, n if derefs >= 0 { ops = strings.Repeat("*", derefs) } - print := Flag.LowerM >= 2 + print := base.Flag.LowerM >= 2 flow := fmt.Sprintf(" flow: %s = %s%v:", e.explainLoc(dst), ops, e.explainLoc(srcloc)) if print { @@ -1316,7 +1317,7 @@ func (e *Escape) explainFlow(pos string, dst, srcloc *EscLocation, derefs int, n for note := notes; note != nil; note = note.next { if print { - fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, linestr(note.where.Pos)) + fmt.Printf("%s: from %v (%v) at %s\n", pos, note.where, note.why, base.FmtPos(note.where.Pos)) } if logopt.Enabled() { explanation = append(explanation, logopt.NewLoggedOpt(note.where.Pos, "escflow", "escape", e.curfn.funcname(), @@ -1394,7 +1395,7 @@ func (e *Escape) outlives(l, other *EscLocation) bool { // containsClosure reports whether c is a closure contained within f. func containsClosure(f, c *Node) bool { if f.Op != ODCLFUNC || c.Op != ODCLFUNC { - Fatalf("bad containsClosure: %v, %v", f, c) + base.Fatalf("bad containsClosure: %v, %v", f, c) } // Common case. @@ -1452,8 +1453,8 @@ func (e *Escape) finish(fns []*Node) { if loc.escapes { if n.Op != ONAME { - if Flag.LowerM != 0 { - Warnl(n.Pos, "%S escapes to heap", n) + if base.Flag.LowerM != 0 { + base.WarnfAt(n.Pos, "%S escapes to heap", n) } if logopt.Enabled() { logopt.LogOpt(n.Pos, "escape", "escape", e.curfn.funcname()) @@ -1462,8 +1463,8 @@ func (e *Escape) finish(fns []*Node) { n.Esc = EscHeap addrescapes(n) } else { - if Flag.LowerM != 0 && n.Op != ONAME { - Warnl(n.Pos, "%S does not escape", n) + if base.Flag.LowerM != 0 && n.Op != ONAME { + base.WarnfAt(n.Pos, "%S does not escape", n) } n.Esc = EscNone if loc.transient { @@ -1516,7 +1517,7 @@ func (l *EscLeaks) add(i, derefs int) { func (l *EscLeaks) set(i, derefs int) { v := derefs + 1 if v < 0 { - Fatalf("invalid derefs count: %v", derefs) + base.Fatalf("invalid derefs count: %v", derefs) } if v > math.MaxUint8 { v = math.MaxUint8 diff --git a/src/cmd/compile/internal/gc/export.go b/src/cmd/compile/internal/gc/export.go index 48f77fa182..1fa64fbe44 100644 --- a/src/cmd/compile/internal/gc/export.go +++ b/src/cmd/compile/internal/gc/export.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/bio" "cmd/internal/src" @@ -14,7 +15,7 @@ import ( func exportf(bout *bio.Writer, format string, args ...interface{}) { fmt.Fprintf(bout, format, args...) - if Debug.Export != 0 { + if base.Debug.Export != 0 { fmt.Printf(format, args...) } } @@ -28,7 +29,7 @@ func exportsym(n *Node) { } n.Sym.SetOnExportList(true) - if Flag.E != 0 { + if base.Flag.E != 0 { fmt.Printf("export symbol %v\n", n.Sym) } @@ -53,7 +54,7 @@ func autoexport(n *Node, ctxt Class) { if types.IsExported(n.Sym.Name) || initname(n.Sym.Name) { exportsym(n) } - if Flag.AsmHdr != "" && !n.Sym.Asm() { + if base.Flag.AsmHdr != "" && !n.Sym.Asm() { n.Sym.SetAsm(true) asmlist = append(asmlist, n) } @@ -67,8 +68,8 @@ func dumpexport(bout *bio.Writer) { size := bout.Offset() - off exportf(bout, "\n$$\n") - if Debug.Export != 0 { - fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", Ctxt.Pkgpath, size) + if base.Debug.Export != 0 { + fmt.Printf("BenchmarkExportSize:%s 1 %d bytes\n", base.Ctxt.Pkgpath, size) } } @@ -80,7 +81,7 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node { // is declarations for Runtimepkg, which are populated // by loadsys instead. if s.Pkg != Runtimepkg { - Fatalf("missing ONONAME for %v\n", s) + base.Fatalf("missing ONONAME for %v\n", s) } n = dclname(s) @@ -88,7 +89,7 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node { s.Importdef = ipkg } if n.Op != ONONAME && n.Op != op { - redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path)) + redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path)) } return n } @@ -111,7 +112,7 @@ func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type { t := n.Type if t == nil { - Fatalf("importtype %v", s) + base.Fatalf("importtype %v", s) } return t } @@ -122,7 +123,7 @@ func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op Op, ctxt Class, t n := importsym(ipkg, s, op) if n.Op != ONONAME { if n.Op == op && (n.Class() != ctxt || !types.Identical(n.Type, t)) { - redeclare(lineno, s, fmt.Sprintf("during import %q", ipkg.Path)) + redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path)) } return nil } @@ -147,7 +148,7 @@ func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val n.SetVal(val) - if Flag.E != 0 { + if base.Flag.E != 0 { fmt.Printf("import const %v %L = %v\n", s, t, val) } } @@ -162,7 +163,7 @@ func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) { n.Func = new(Func) - if Flag.E != 0 { + if base.Flag.E != 0 { fmt.Printf("import func %v%S\n", s, t) } } @@ -175,7 +176,7 @@ func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) { return } - if Flag.E != 0 { + if base.Flag.E != 0 { fmt.Printf("import var %v %L\n", s, t) } } @@ -188,15 +189,15 @@ func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) { return } - if Flag.E != 0 { + if base.Flag.E != 0 { fmt.Printf("import type %v = %L\n", s, t) } } func dumpasmhdr() { - b, err := bio.Create(Flag.AsmHdr) + b, err := bio.Create(base.Flag.AsmHdr) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", localpkg.Name) for _, n := range asmlist { diff --git a/src/cmd/compile/internal/gc/fmt.go b/src/cmd/compile/internal/gc/fmt.go index 51e139e319..9248eb22aa 100644 --- a/src/cmd/compile/internal/gc/fmt.go +++ b/src/cmd/compile/internal/gc/fmt.go @@ -6,6 +6,7 @@ package gc import ( "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "fmt" @@ -47,7 +48,7 @@ func fmtFlag(s fmt.State, verb rune) FmtFlag { flag |= FmtSign } if s.Flag(' ') { - Fatalf("FmtUnsigned in format string") + base.Fatalf("FmtUnsigned in format string") } if _, ok := s.Precision(); ok { flag |= FmtComma @@ -313,7 +314,7 @@ func (m fmtMode) prepareArgs(args []interface{}) { case int32, int64, string, types.EType, constant.Value: // OK: printing these types doesn't depend on mode default: - Fatalf("mode.prepareArgs type %T", arg) + base.Fatalf("mode.prepareArgs type %T", arg) } } } @@ -339,14 +340,14 @@ func (n *Node) jconv(s fmt.State, flag FmtFlag) { short := flag&FmtShort != 0 // Useful to see which nodes in an AST printout are actually identical - if Debug.DumpPtrs != 0 { + if base.Debug.DumpPtrs != 0 { fmt.Fprintf(s, " p(%p)", n) } if !short && n.Name != nil && n.Name.Vargen != 0 { fmt.Fprintf(s, " g(%d)", n.Name.Vargen) } - if Debug.DumpPtrs != 0 && !short && n.Name != nil && n.Name.Defn != nil { + if base.Debug.DumpPtrs != 0 && !short && n.Name != nil && n.Name.Defn != nil { // Useful to see where Defn is set and what node it points to fmt.Fprintf(s, " defn(%p)", n.Name.Defn) } @@ -817,7 +818,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited case mt.Hiter: b.WriteString("map.iter[") default: - Fatalf("unknown internal map type") + base.Fatalf("unknown internal map type") } tconv2(b, m.Key(), 0, mode, visited) b.WriteByte(']') @@ -1416,7 +1417,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) { case OSLICEHEADER: if n.List.Len() != 2 { - Fatalf("bad OSLICEHEADER list length %d", n.List.Len()) + base.Fatalf("bad OSLICEHEADER list length %d", n.List.Len()) } mode.Fprintf(s, "sliceheader{%v,%v,%v}", n.Left, n.List.First(), n.List.Second()) @@ -1806,7 +1807,7 @@ func (n *Node) nconv(s fmt.State, flag FmtFlag, mode fmtMode) { dumpdepth-- default: - Fatalf("unhandled %%N mode: %d", mode) + base.Fatalf("unhandled %%N mode: %d", mode) } } diff --git a/src/cmd/compile/internal/gc/gen.go b/src/cmd/compile/internal/gc/gen.go index d882d6d672..a70bddca81 100644 --- a/src/cmd/compile/internal/gc/gen.go +++ b/src/cmd/compile/internal/gc/gen.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" "cmd/internal/src" @@ -52,14 +53,14 @@ func autotmpname(n int) string { // make a new Node off the books func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node { if curfn == nil { - Fatalf("no curfn for tempAt") + base.Fatalf("no curfn for tempAt") } if curfn.Op == OCLOSURE { Dump("tempAt", curfn) - Fatalf("adding tempAt to wrong closure function") + base.Fatalf("adding tempAt to wrong closure function") } if t == nil { - Fatalf("tempAt called with nil type") + base.Fatalf("tempAt called with nil type") } s := &types.Sym{ @@ -82,5 +83,5 @@ func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node { } func temp(t *types.Type) *Node { - return tempAt(lineno, Curfn, t) + return tempAt(base.Pos, Curfn, t) } diff --git a/src/cmd/compile/internal/gc/go.go b/src/cmd/compile/internal/gc/go.go index 947dae476b..e9ff5aeb13 100644 --- a/src/cmd/compile/internal/gc/go.go +++ b/src/cmd/compile/internal/gc/go.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/compile/internal/types" "cmd/internal/obj" @@ -39,7 +40,7 @@ var ( // isRuntimePkg reports whether p is package runtime. func isRuntimePkg(p *types.Pkg) bool { - if Flag.CompilingRuntime && p == localpkg { + if base.Flag.CompilingRuntime && p == localpkg { return true } return p.Path == "runtime" @@ -48,7 +49,7 @@ func isRuntimePkg(p *types.Pkg) bool { // isReflectPkg reports whether p is package reflect. func isReflectPkg(p *types.Pkg) bool { if p == localpkg { - return Ctxt.Pkgpath == "reflect" + return base.Ctxt.Pkgpath == "reflect" } return p.Path == "reflect" } @@ -182,8 +183,6 @@ var instrumenting bool // Whether we are tracking lexical scopes for DWARF. var trackScopes bool -var Ctxt *obj.Link - var nodfp *Node var autogeneratedPos src.XPos diff --git a/src/cmd/compile/internal/gc/gsubr.go b/src/cmd/compile/internal/gc/gsubr.go index 00d425a77c..92a3611cb7 100644 --- a/src/cmd/compile/internal/gc/gsubr.go +++ b/src/cmd/compile/internal/gc/gsubr.go @@ -31,6 +31,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/internal/obj" "cmd/internal/objabi" @@ -57,8 +58,8 @@ type Progs struct { // worker indicates which of the backend workers will use the Progs. func newProgs(fn *Node, worker int) *Progs { pp := new(Progs) - if Ctxt.CanReuseProgs() { - sz := len(sharedProgArray) / Flag.LowerC + if base.Ctxt.CanReuseProgs() { + sz := len(sharedProgArray) / base.Flag.LowerC pp.progcache = sharedProgArray[sz*worker : sz*(worker+1)] } pp.curfn = fn @@ -83,19 +84,19 @@ func (pp *Progs) NewProg() *obj.Prog { } else { p = new(obj.Prog) } - p.Ctxt = Ctxt + p.Ctxt = base.Ctxt return p } // Flush converts from pp to machine code. func (pp *Progs) Flush() { plist := &obj.Plist{Firstpc: pp.Text, Curfn: pp.curfn} - obj.Flushplist(Ctxt, plist, pp.NewProg, Ctxt.Pkgpath) + obj.Flushplist(base.Ctxt, plist, pp.NewProg, base.Ctxt.Pkgpath) } // Free clears pp and any associated resources. func (pp *Progs) Free() { - if Ctxt.CanReuseProgs() { + if base.Ctxt.CanReuseProgs() { // Clear progs to enable GC and avoid abuse. s := pp.progcache[:pp.cacheidx] for i := range s { @@ -133,8 +134,8 @@ func (pp *Progs) Prog(as obj.As) *obj.Prog { pp.clearp(pp.next) p.Link = pp.next - if !pp.pos.IsKnown() && Flag.K != 0 { - Warn("prog: unknown position (line 0)") + if !pp.pos.IsKnown() && base.Flag.K != 0 { + base.Warn("prog: unknown position (line 0)") } p.As = as @@ -174,7 +175,7 @@ func (pp *Progs) Appendpp(p *obj.Prog, as obj.As, ftype obj.AddrType, freg int16 func (pp *Progs) settext(fn *Node) { if pp.Text != nil { - Fatalf("Progs.settext called twice") + base.Fatalf("Progs.settext called twice") } ptxt := pp.Prog(obj.ATEXT) pp.Text = ptxt @@ -193,7 +194,7 @@ func (pp *Progs) settext(fn *Node) { // called for both functions with bodies and functions without bodies. func (f *Func) initLSym(hasBody bool) { if f.lsym != nil { - Fatalf("Func.initLSym called twice") + base.Fatalf("Func.initLSym called twice") } if nam := f.Nname; !nam.isBlank() { @@ -215,7 +216,7 @@ func (f *Func) initLSym(hasBody bool) { // using the expected ABI. want := obj.ABIInternal if f.lsym.ABI() != want { - Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want) + base.Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want) } } @@ -249,7 +250,7 @@ func (f *Func) initLSym(hasBody bool) { } asym.SetABI(aliasABI) asym.Set(obj.AttrDuplicateOK, true) - Ctxt.ABIAliases = append(Ctxt.ABIAliases, asym) + base.Ctxt.ABIAliases = append(base.Ctxt.ABIAliases, asym) } } @@ -278,14 +279,14 @@ func (f *Func) initLSym(hasBody bool) { // Clumsy but important. // See test/recover.go for test cases and src/reflect/value.go // for the actual functions being considered. - if Ctxt.Pkgpath == "reflect" { + if base.Ctxt.Pkgpath == "reflect" { switch f.Nname.Sym.Name { case "callReflect", "callMethod": flag |= obj.WRAPPER } } - Ctxt.InitTextSym(f.lsym, flag) + base.Ctxt.InitTextSym(f.lsym, flag) } func ggloblnod(nam *Node) { @@ -298,7 +299,7 @@ func ggloblnod(nam *Node) { if nam.Type != nil && !nam.Type.HasPointers() { flags |= obj.NOPTR } - Ctxt.Globl(s, nam.Type.Width, flags) + base.Ctxt.Globl(s, nam.Type.Width, flags) if nam.Name.LibfuzzerExtraCounter() { s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER } @@ -315,7 +316,7 @@ func ggloblsym(s *obj.LSym, width int32, flags int16) { s.Set(obj.AttrLocal, true) flags &^= obj.LOCAL } - Ctxt.Globl(s, int64(width), int(flags)) + base.Ctxt.Globl(s, int64(width), int(flags)) } func Addrconst(a *obj.Addr, v int64) { @@ -326,7 +327,7 @@ func Addrconst(a *obj.Addr, v int64) { func Patch(p *obj.Prog, to *obj.Prog) { if p.To.Type != obj.TYPE_BRANCH { - Fatalf("patch: not a branch") + base.Fatalf("patch: not a branch") } p.To.SetTarget(to) p.To.Offset = to.Pc diff --git a/src/cmd/compile/internal/gc/iexport.go b/src/cmd/compile/internal/gc/iexport.go index 447f938a0a..246a057ade 100644 --- a/src/cmd/compile/internal/gc/iexport.go +++ b/src/cmd/compile/internal/gc/iexport.go @@ -204,6 +204,7 @@ package gc import ( "bufio" "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/goobj" "cmd/internal/src" @@ -266,7 +267,7 @@ func iexport(out *bufio.Writer) { p.typIndex[pt] = uint64(i) } if len(p.typIndex) > predeclReserved { - Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved) + base.Fatalf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved) } // Initialize work queue with exported declarations. @@ -304,8 +305,8 @@ func iexport(out *bufio.Writer) { // Add fingerprint (used by linker object file). // Attach this to the end, so tools (e.g. gcimporter) don't care. - copy(Ctxt.Fingerprint[:], h.Sum(nil)[:]) - out.Write(Ctxt.Fingerprint[:]) + copy(base.Ctxt.Fingerprint[:], h.Sum(nil)[:]) + out.Write(base.Ctxt.Fingerprint[:]) } // writeIndex writes out an object index. mainIndex indicates whether @@ -394,7 +395,7 @@ func (p *iexporter) stringOff(s string) uint64 { // pushDecl adds n to the declaration work queue, if not already present. func (p *iexporter) pushDecl(n *Node) { if n.Sym == nil || asNode(n.Sym.Def) != n && n.Op != OTYPE { - Fatalf("weird Sym: %v, %v", n, n.Sym) + base.Fatalf("weird Sym: %v, %v", n, n.Sym) } // Don't export predeclared declarations. @@ -437,7 +438,7 @@ func (p *iexporter) doDecl(n *Node) { case PFUNC: if n.IsMethod() { - Fatalf("unexpected method: %v", n) + base.Fatalf("unexpected method: %v", n) } // Function. @@ -447,7 +448,7 @@ func (p *iexporter) doDecl(n *Node) { w.funcExt(n) default: - Fatalf("unexpected class: %v, %v", n, n.Class()) + base.Fatalf("unexpected class: %v, %v", n, n.Class()) } case OLITERAL: @@ -503,7 +504,7 @@ func (p *iexporter) doDecl(n *Node) { } default: - Fatalf("unexpected node: %v", n) + base.Fatalf("unexpected node: %v", n) } p.declIndex[n] = w.flush() @@ -523,7 +524,7 @@ func (p *iexporter) doInline(f *Node) { } func (w *exportWriter) pos(pos src.XPos) { - p := Ctxt.PosTable.Pos(pos) + p := base.Ctxt.PosTable.Pos(pos) file := p.Base().AbsFilename() line := int64(p.RelLine()) column := int64(p.RelCol()) @@ -579,7 +580,7 @@ func (w *exportWriter) qualifiedIdent(n *Node) { func (w *exportWriter) selector(s *types.Sym) { if w.currPkg == nil { - Fatalf("missing currPkg") + base.Fatalf("missing currPkg") } // Method selectors are rewritten into method symbols (of the @@ -594,7 +595,7 @@ func (w *exportWriter) selector(s *types.Sym) { pkg = localpkg } if s.Pkg != pkg { - Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path) + base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path) } } @@ -633,7 +634,7 @@ func (w *exportWriter) startType(k itag) { func (w *exportWriter) doTyp(t *types.Type) { if t.Sym != nil { if t.Sym.Pkg == builtinpkg || t.Sym.Pkg == unsafepkg { - Fatalf("builtin type missing from typIndex: %v", t) + base.Fatalf("builtin type missing from typIndex: %v", t) } w.startType(definedType) @@ -710,7 +711,7 @@ func (w *exportWriter) doTyp(t *types.Type) { } default: - Fatalf("unexpected type: %v", t) + base.Fatalf("unexpected type: %v", t) } } @@ -773,7 +774,7 @@ func constTypeOf(typ *types.Type) constant.Kind { return constant.Complex } - Fatalf("unexpected constant type: %v", typ) + base.Fatalf("unexpected constant type: %v", typ) return 0 } @@ -851,7 +852,7 @@ func (w *exportWriter) mpint(x constant.Value, typ *types.Type) { negative := constant.Sign(x) < 0 if !signed && negative { - Fatalf("negative unsigned integer; type %v, value %v", typ, x) + base.Fatalf("negative unsigned integer; type %v, value %v", typ, x) } b := constant.Bytes(x) // little endian @@ -860,10 +861,10 @@ func (w *exportWriter) mpint(x constant.Value, typ *types.Type) { } if len(b) > 0 && b[0] == 0 { - Fatalf("leading zeros") + base.Fatalf("leading zeros") } if uint(len(b)) > maxBytes { - Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x) + base.Fatalf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x) } maxSmall := 256 - maxBytes @@ -900,7 +901,7 @@ func (w *exportWriter) mpint(x constant.Value, typ *types.Type) { } } if n < maxSmall || n >= 256 { - Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n) + base.Fatalf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n) } w.data.WriteByte(byte(n)) @@ -916,7 +917,7 @@ func (w *exportWriter) mpint(x constant.Value, typ *types.Type) { func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) { f := bigFloatVal(v) if f.IsInf() { - Fatalf("infinite constant") + base.Fatalf("infinite constant") } // Break into f = mant × 2**exp, with 0.5 <= mant < 1. @@ -930,7 +931,7 @@ func (w *exportWriter) mpfloat(v constant.Value, typ *types.Type) { manti, acc := mant.Int(nil) if acc != big.Exact { - Fatalf("mantissa scaling failed for %f (%s)", f, acc) + base.Fatalf("mantissa scaling failed for %f (%s)", f, acc) } w.mpint(makeInt(manti), typ) if manti.Sign() != 0 { @@ -1158,7 +1159,7 @@ func (w *exportWriter) stmt(n *Node) { w.string(n.Sym.Name) default: - Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op) + base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op) } } @@ -1169,7 +1170,7 @@ func (w *exportWriter) caseList(sw *Node) { w.uint64(uint64(len(cases))) for _, cas := range cases { if cas.Op != OCASE { - Fatalf("expected OCASE, got %v", cas) + base.Fatalf("expected OCASE, got %v", cas) } w.pos(cas.Pos) w.stmtList(cas.List) @@ -1207,7 +1208,7 @@ func (w *exportWriter) expr(n *Node) { // (somewhat closely following the structure of exprfmt in fmt.go) case ONIL: if !n.Type.HasNil() { - Fatalf("unexpected type for nil: %v", n.Type) + base.Fatalf("unexpected type for nil: %v", n.Type) } if n.Orig != nil && n.Orig != n { w.expr(n.Orig) @@ -1256,7 +1257,7 @@ func (w *exportWriter) expr(n *Node) { var s *types.Sym if n.Left != nil { if n.Left.Op != ONONAME { - Fatalf("expected ONONAME, got %v", n.Left) + base.Fatalf("expected ONONAME, got %v", n.Left) } s = n.Left.Sym } @@ -1365,7 +1366,7 @@ func (w *exportWriter) expr(n *Node) { if op == OAPPEND { w.bool(n.IsDDD()) } else if n.IsDDD() { - Fatalf("exporter: unexpected '...' with %v call", op) + base.Fatalf("exporter: unexpected '...' with %v call", op) } case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER, OGETG: @@ -1419,7 +1420,7 @@ func (w *exportWriter) expr(n *Node) { // has already been replaced with literals default: - Fatalf("cannot export %v (%d) node\n"+ + base.Fatalf("cannot export %v (%d) node\n"+ "\t==> please file an issue and assign to gri@", n.Op, int(n.Op)) } } @@ -1484,18 +1485,18 @@ func (w *exportWriter) localIdent(s *types.Sym, v int32) { // TODO(mdempsky): Fix autotmp hack. if i := strings.LastIndex(name, "."); i >= 0 && !strings.HasPrefix(name, ".autotmp_") { - Fatalf("unexpected dot in identifier: %v", name) + base.Fatalf("unexpected dot in identifier: %v", name) } if v > 0 { if strings.Contains(name, "·") { - Fatalf("exporter: unexpected · in symbol name") + base.Fatalf("exporter: unexpected · in symbol name") } name = fmt.Sprintf("%s·%d", name, v) } if !types.IsExported(name) && s.Pkg != w.currPkg { - Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path) + base.Fatalf("weird package in name: %v => %v, not %q", s, name, w.currPkg.Path) } w.string(name) diff --git a/src/cmd/compile/internal/gc/iimport.go b/src/cmd/compile/internal/gc/iimport.go index a8a84b8cbc..cc0209ed03 100644 --- a/src/cmd/compile/internal/gc/iimport.go +++ b/src/cmd/compile/internal/gc/iimport.go @@ -8,6 +8,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/bio" "cmd/internal/goobj" @@ -60,7 +61,7 @@ func expandInline(fn *Node) { r := importReaderFor(fn, inlineImporter) if r == nil { - Fatalf("missing import reader for %v", fn) + base.Fatalf("missing import reader for %v", fn) } r.doInline(fn) @@ -83,8 +84,8 @@ type intReader struct { func (r *intReader) int64() int64 { i, err := binary.ReadVarint(r.Reader) if err != nil { - yyerror("import %q: read error: %v", r.pkg.Path, err) - errorexit() + base.Errorf("import %q: read error: %v", r.pkg.Path, err) + base.ErrorExit() } return i } @@ -92,8 +93,8 @@ func (r *intReader) int64() int64 { func (r *intReader) uint64() uint64 { i, err := binary.ReadUvarint(r.Reader) if err != nil { - yyerror("import %q: read error: %v", r.pkg.Path, err) - errorexit() + base.Errorf("import %q: read error: %v", r.pkg.Path, err) + base.ErrorExit() } return i } @@ -103,8 +104,8 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) version := ird.uint64() if version != iexportVersion { - yyerror("import %q: unknown export format version %d", pkg.Path, version) - errorexit() + base.Errorf("import %q: unknown export format version %d", pkg.Path, version) + base.ErrorExit() } sLen := ird.uint64() @@ -115,8 +116,8 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) // returning individual substrings very efficiently. data, err := mapFile(in.File(), in.Offset(), int64(sLen+dLen)) if err != nil { - yyerror("import %q: mapping input: %v", pkg.Path, err) - errorexit() + base.Errorf("import %q: mapping input: %v", pkg.Path, err) + base.ErrorExit() } stringData := data[:sLen] declData := data[sLen:] @@ -152,10 +153,10 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) pkg.Lookup("_").Def = asTypesNode(nblank) } else { if pkg.Name != pkgName { - Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path) + base.Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path) } if pkg.Height != pkgHeight { - Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path) + base.Fatalf("conflicting package heights %v and %v for path %q", pkg.Height, pkgHeight, pkg.Path) } } @@ -171,7 +172,7 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) // Create stub declaration. If used, this will // be overwritten by expandDecl. if s.Def != nil { - Fatalf("unexpected definition for %v: %v", s, asNode(s.Def)) + base.Fatalf("unexpected definition for %v: %v", s, asNode(s.Def)) } s.Def = asTypesNode(npos(src.NoXPos, dclname(s))) } @@ -195,8 +196,8 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) // Fingerprint. _, err = io.ReadFull(in, fingerprint[:]) if err != nil { - yyerror("import %s: error reading fingerprint", pkg.Path) - errorexit() + base.Errorf("import %s: error reading fingerprint", pkg.Path) + base.ErrorExit() } return fingerprint } @@ -218,7 +219,7 @@ func (p *iimporter) stringAt(off uint64) string { slen, n := binary.Uvarint(x[:n]) if n <= 0 { - Fatalf("varint failed") + base.Fatalf("varint failed") } spos := off + uint64(n) return p.stringData[spos : spos+slen] @@ -281,7 +282,7 @@ func (r *importReader) setPkg() { func (r *importReader) doDecl(n *Node) { if n.Op != ONONAME { - Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op) + base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op) } tag := r.byte() @@ -352,7 +353,7 @@ func (r *importReader) doDecl(n *Node) { r.varExt(n) default: - Fatalf("unexpected tag: %v", tag) + base.Fatalf("unexpected tag: %v", tag) } } @@ -372,7 +373,7 @@ func (p *importReader) value(typ *types.Type) constant.Value { return makeComplex(p.float(typ), p.float(typ)) } - Fatalf("unexpected value type: %v", typ) + base.Fatalf("unexpected value type: %v", typ) panic("unreachable") } @@ -405,7 +406,7 @@ func (p *importReader) mpint(x *big.Int, typ *types.Type) { v = -(n &^ 1) >> 1 } if v < 1 || uint(v) > maxBytes { - Fatalf("weird decoding: %v, %v => %v", n, signed, v) + base.Fatalf("weird decoding: %v, %v => %v", n, signed, v) } b := make([]byte, v) p.Read(b) @@ -462,10 +463,10 @@ func (r *importReader) pos() src.XPos { } if r.prevBase == nil { - Fatalf("missing posbase") + base.Fatalf("missing posbase") } pos := src.MakePos(r.prevBase, uint(r.prevLine), uint(r.prevColumn)) - return Ctxt.PosTable.XPos(pos) + return base.Ctxt.PosTable.XPos(pos) } func (r *importReader) typ() *types.Type { @@ -476,7 +477,7 @@ func (p *iimporter) typAt(off uint64) *types.Type { t, ok := p.typCache[off] if !ok { if off < predeclReserved { - Fatalf("predeclared type missing from cache: %d", off) + base.Fatalf("predeclared type missing from cache: %d", off) } t = p.newReader(off-predeclReserved, nil).typ1() p.typCache[off] = t @@ -487,7 +488,7 @@ func (p *iimporter) typAt(off uint64) *types.Type { func (r *importReader) typ1() *types.Type { switch k := r.kind(); k { default: - Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k) + base.Fatalf("unexpected kind tag in %q: %v", r.p.ipkg.Path, k) return nil case definedType: @@ -502,7 +503,7 @@ func (r *importReader) typ1() *types.Type { expandDecl(n) } if n.Op != OTYPE { - Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n) + base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n) } return n.Type case pointerType: @@ -610,7 +611,7 @@ func (r *importReader) bool() bool { func (r *importReader) int64() int64 { n, err := binary.ReadVarint(r) if err != nil { - Fatalf("readVarint: %v", err) + base.Fatalf("readVarint: %v", err) } return n } @@ -618,7 +619,7 @@ func (r *importReader) int64() int64 { func (r *importReader) uint64() uint64 { n, err := binary.ReadUvarint(r) if err != nil { - Fatalf("readVarint: %v", err) + base.Fatalf("readVarint: %v", err) } return n } @@ -626,7 +627,7 @@ func (r *importReader) uint64() uint64 { func (r *importReader) byte() byte { x, err := r.ReadByte() if err != nil { - Fatalf("declReader.ReadByte: %v", err) + base.Fatalf("declReader.ReadByte: %v", err) } return x } @@ -674,7 +675,7 @@ func (r *importReader) symIdx(s *types.Sym) { idx := int32(r.int64()) if idx != -1 { if s.Linkname != "" { - Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx) + base.Fatalf("bad index for linknamed symbol: %v %d\n", lsym, idx) } lsym.SymIdx = idx lsym.Set(obj.AttrIndexed, true) @@ -695,7 +696,7 @@ var typeSymIdx = make(map[*types.Type][2]int64) func (r *importReader) doInline(n *Node) { if len(n.Func.Inl.Body) != 0 { - Fatalf("%v already has inline body", n) + base.Fatalf("%v already has inline body", n) } funchdr(n) @@ -714,8 +715,8 @@ func (r *importReader) doInline(n *Node) { importlist = append(importlist, n) - if Flag.E > 0 && Flag.LowerM > 2 { - if Flag.LowerM > 3 { + if base.Flag.E > 0 && base.Flag.LowerM > 2 { + if base.Flag.LowerM > 3 { fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type, asNodes(n.Func.Inl.Body)) } else { fmt.Printf("inl body for %v %#v: %v\n", n, n.Type, asNodes(n.Func.Inl.Body)) @@ -793,7 +794,7 @@ func (r *importReader) exprList() []*Node { func (r *importReader) expr() *Node { n := r.node() if n != nil && n.Op == OBLOCK { - Fatalf("unexpected block node: %v", n) + base.Fatalf("unexpected block node: %v", n) } return n } @@ -854,11 +855,11 @@ func (r *importReader) node() *Node { case OSTRUCTLIT: // TODO(mdempsky): Export position information for OSTRUCTKEY nodes. - savedlineno := lineno - lineno = r.pos() - n := nodl(lineno, OCOMPLIT, nil, typenod(r.typ())) + savedlineno := base.Pos + base.Pos = r.pos() + n := nodl(base.Pos, OCOMPLIT, nil, typenod(r.typ())) n.List.Set(r.elemList()) // special handling of field names - lineno = savedlineno + base.Pos = savedlineno return n // case OARRAYLIT, OSLICELIT, OMAPLIT: @@ -1070,7 +1071,7 @@ func (r *importReader) node() *Node { return nil default: - Fatalf("cannot import %v (%d) node\n"+ + base.Fatalf("cannot import %v (%d) node\n"+ "\t==> please file an issue and assign to gri@", op, int(op)) panic("unreachable") // satisfy compiler } diff --git a/src/cmd/compile/internal/gc/init.go b/src/cmd/compile/internal/gc/init.go index c3b66a2ad2..9319faf6a0 100644 --- a/src/cmd/compile/internal/gc/init.go +++ b/src/cmd/compile/internal/gc/init.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" ) @@ -44,7 +45,7 @@ func fninit(n []*Node) { // Make a function that contains all the initialization statements. if len(nf) > 0 { - lineno = nf[0].Pos // prolog/epilog gets line number of first init stmt + base.Pos = nf[0].Pos // prolog/epilog gets line number of first init stmt initializers := lookup("init") fn := dclfunc(initializers, nod(OTFUNC, nil, nil)) for _, dcl := range initTodo.Func.Dcl { @@ -67,7 +68,7 @@ func fninit(n []*Node) { // We only generate temps using initTodo if there // are package-scope initialization statements, so // something's weird if we get here. - Fatalf("initTodo still has declarations") + base.Fatalf("initTodo still has declarations") } initTodo = nil diff --git a/src/cmd/compile/internal/gc/initorder.go b/src/cmd/compile/internal/gc/initorder.go index ecbfc5631a..f553a3f057 100644 --- a/src/cmd/compile/internal/gc/initorder.go +++ b/src/cmd/compile/internal/gc/initorder.go @@ -8,6 +8,8 @@ import ( "bytes" "container/heap" "fmt" + + "cmd/compile/internal/base" ) // Package initialization @@ -89,7 +91,7 @@ func initOrder(l []*Node) []*Node { case ODCLCONST, ODCLFUNC, ODCLTYPE: // nop default: - Fatalf("unexpected package-level statement: %v", n) + base.Fatalf("unexpected package-level statement: %v", n) } } @@ -104,10 +106,10 @@ func initOrder(l []*Node) []*Node { // confused us and there might not be // a loop. Let the user fix those // first. - ExitIfErrors() + base.ExitIfErrors() findInitLoopAndExit(firstLHS(n), new([]*Node)) - Fatalf("initialization unfinished, but failed to identify loop") + base.Fatalf("initialization unfinished, but failed to identify loop") } } } @@ -115,7 +117,7 @@ func initOrder(l []*Node) []*Node { // Invariant consistency check. If this is non-zero, then we // should have found a cycle above. if len(o.blocking) != 0 { - Fatalf("expected empty map: %v", o.blocking) + base.Fatalf("expected empty map: %v", o.blocking) } return s.out @@ -123,7 +125,7 @@ func initOrder(l []*Node) []*Node { func (o *InitOrder) processAssign(n *Node) { if n.Initorder() != InitNotStarted || n.Xoffset != BADWIDTH { - Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset) + base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset) } n.SetInitorder(InitPending) @@ -154,7 +156,7 @@ func (o *InitOrder) flushReady(initialize func(*Node)) { for o.ready.Len() != 0 { n := heap.Pop(&o.ready).(*Node) if n.Initorder() != InitPending || n.Xoffset != 0 { - Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset) + base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset) } initialize(n) @@ -238,8 +240,8 @@ func reportInitLoopAndExit(l []*Node) { } fmt.Fprintf(&msg, "\t%v: %v", l[0].Line(), l[0]) - yyerrorl(l[0].Pos, msg.String()) - errorexit() + base.ErrorfAt(l[0].Pos, msg.String()) + base.ErrorExit() } // collectDeps returns all of the package-level functions and @@ -256,7 +258,7 @@ func collectDeps(n *Node, transitive bool) NodeSet { case ODCLFUNC: d.inspectList(n.Nbody) default: - Fatalf("unexpected Op: %v", n.Op) + base.Fatalf("unexpected Op: %v", n.Op) } return d.seen } @@ -347,6 +349,6 @@ func firstLHS(n *Node) *Node { return n.List.First() } - Fatalf("unexpected Op: %v", n.Op) + base.Fatalf("unexpected Op: %v", n.Op) return nil } diff --git a/src/cmd/compile/internal/gc/inl.go b/src/cmd/compile/internal/gc/inl.go index fc467dd95a..d71ea9b5ed 100644 --- a/src/cmd/compile/internal/gc/inl.go +++ b/src/cmd/compile/internal/gc/inl.go @@ -27,6 +27,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/logopt" "cmd/compile/internal/types" "cmd/internal/obj" @@ -60,7 +61,7 @@ func fnpkg(fn *Node) *types.Pkg { rcvr = rcvr.Elem() } if rcvr.Sym == nil { - Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym, fn, rcvr) + base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym, fn, rcvr) } return rcvr.Sym.Pkg } @@ -86,7 +87,7 @@ func typecheckinl(fn *Node) { return // typecheckinl on local function } - if Flag.LowerM > 2 || Debug.Export != 0 { + if base.Flag.LowerM > 2 || base.Debug.Export != 0 { fmt.Printf("typecheck import [%v] %L { %#v }\n", fn.Sym, fn, asNodes(fn.Func.Inl.Body)) } @@ -103,7 +104,7 @@ func typecheckinl(fn *Node) { fn.Func.Inl.Dcl = append(fn.Func.Inl.Dcl, fn.Func.Dcl...) fn.Func.Dcl = nil - lineno = lno + base.Pos = lno } // Caninl determines whether fn is inlineable. @@ -111,17 +112,17 @@ func typecheckinl(fn *Node) { // fn and ->nbody will already have been typechecked. func caninl(fn *Node) { if fn.Op != ODCLFUNC { - Fatalf("caninl %v", fn) + base.Fatalf("caninl %v", fn) } if fn.Func.Nname == nil { - Fatalf("caninl no nname %+v", fn) + base.Fatalf("caninl no nname %+v", fn) } var reason string // reason, if any, that the function was not inlined - if Flag.LowerM > 1 || logopt.Enabled() { + if base.Flag.LowerM > 1 || logopt.Enabled() { defer func() { if reason != "" { - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { fmt.Printf("%v: cannot inline %v: %s\n", fn.Line(), fn.Func.Nname, reason) } if logopt.Enabled() { @@ -138,13 +139,13 @@ func caninl(fn *Node) { } // If marked "go:norace" and -race compilation, don't inline. - if Flag.Race && fn.Func.Pragma&Norace != 0 { + if base.Flag.Race && fn.Func.Pragma&Norace != 0 { reason = "marked go:norace with -race compilation" return } // If marked "go:nocheckptr" and -d checkptr compilation, don't inline. - if Debug.Checkptr != 0 && fn.Func.Pragma&NoCheckPtr != 0 { + if base.Debug.Checkptr != 0 && fn.Func.Pragma&NoCheckPtr != 0 { reason = "marked go:nocheckptr" return } @@ -179,7 +180,7 @@ func caninl(fn *Node) { } if fn.Typecheck() == 0 { - Fatalf("caninl on non-typechecked function %v", fn) + base.Fatalf("caninl on non-typechecked function %v", fn) } n := fn.Func.Nname @@ -189,7 +190,7 @@ func caninl(fn *Node) { defer n.Func.SetInlinabilityChecked(true) cc := int32(inlineExtraCallCost) - if Flag.LowerL == 4 { + if base.Flag.LowerL == 4 { cc = 1 // this appears to yield better performance than 0. } @@ -222,9 +223,9 @@ func caninl(fn *Node) { Body: inlcopylist(fn.Nbody.Slice()), } - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { fmt.Printf("%v: can inline %#v with cost %d as: %#v { %#v }\n", fn.Line(), n, inlineMaxBudget-visitor.budget, fn.Type, asNodes(n.Func.Inl.Body)) - } else if Flag.LowerM != 0 { + } else if base.Flag.LowerM != 0 { fmt.Printf("%v: can inline %v\n", fn.Line(), n) } if logopt.Enabled() { @@ -239,10 +240,10 @@ func inlFlood(n *Node) { return } if n.Op != ONAME || n.Class() != PFUNC { - Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op, n.Class()) + base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op, n.Class()) } if n.Func == nil { - Fatalf("inlFlood: missing Func on %v", n) + base.Fatalf("inlFlood: missing Func on %v", n) } if n.Func.Inl == nil { return @@ -286,7 +287,7 @@ func inlFlood(n *Node) { // // When we do, we'll probably want: // inlFlood(n.Func.Closure.Func.Nname) - Fatalf("unexpected closure in inlinable function") + base.Fatalf("unexpected closure in inlinable function") } return true }) @@ -352,7 +353,7 @@ func (v *hairyVisitor) visit(n *Node) bool { case OCALLMETH: t := n.Left.Type if t == nil { - Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) + base.Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) } if isRuntimePkg(n.Left.Sym.Pkg) { fn := n.Left.Sym.Name @@ -413,7 +414,7 @@ func (v *hairyVisitor) visit(n *Node) bool { case OBREAK, OCONTINUE: if n.Sym != nil { // Should have short-circuited due to labeledControl above. - Fatalf("unexpected labeled break/continue: %v", n) + base.Fatalf("unexpected labeled break/continue: %v", n) } case OIF: @@ -433,7 +434,7 @@ func (v *hairyVisitor) visit(n *Node) bool { v.budget-- // When debugging, don't stop early, to get full cost of inlining this function - if v.budget < 0 && Flag.LowerM < 2 && !logopt.Enabled() { + if v.budget < 0 && base.Flag.LowerM < 2 && !logopt.Enabled() { return true } @@ -465,7 +466,7 @@ func inlcopy(n *Node) *Node { m := n.copy() if n.Op != OCALLPART && m.Func != nil { - Fatalf("unexpected Func: %v", m) + base.Fatalf("unexpected Func: %v", m) } m.Left = inlcopy(n.Left) m.Right = inlcopy(n.Right) @@ -517,7 +518,7 @@ func inlcalls(fn *Node) { inlMap := make(map[*Node]bool) fn = inlnode(fn, maxCost, inlMap) if fn != Curfn { - Fatalf("inlnode replaced curfn") + base.Fatalf("inlnode replaced curfn") } Curfn = savefn } @@ -548,7 +549,7 @@ func inlconv2expr(n *Node) *Node { // statements. func inlconv2list(n *Node) []*Node { if n.Op != OINLCALL || n.Rlist.Len() == 0 { - Fatalf("inlconv2list %+v\n", n) + base.Fatalf("inlconv2list %+v\n", n) } s := n.Rlist.Slice() @@ -595,7 +596,7 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { case OCALLMETH: // Prevent inlining some reflect.Value methods when using checkptr, // even when package reflect was compiled without it (#35073). - if s := n.Left.Sym; Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { + if s := n.Left.Sym; base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") { return n } } @@ -676,7 +677,7 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { switch n.Op { case OCALLFUNC: - if Flag.LowerM > 3 { + if base.Flag.LowerM > 3 { fmt.Printf("%v:call to func %+v\n", n.Line(), n.Left) } if isIntrinsicCall(n) { @@ -687,19 +688,19 @@ func inlnode(n *Node, maxCost int32, inlMap map[*Node]bool) *Node { } case OCALLMETH: - if Flag.LowerM > 3 { + if base.Flag.LowerM > 3 { fmt.Printf("%v:call to meth %L\n", n.Line(), n.Left.Right) } // typecheck should have resolved ODOTMETH->type, whose nname points to the actual function. if n.Left.Type == nil { - Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) + base.Fatalf("no function type for [%p] %+v\n", n.Left, n.Left) } n = mkinlcall(n, n.Left.MethodName(), maxCost, inlMap) } - lineno = lno + base.Pos = lno return n } @@ -767,12 +768,12 @@ FindRHS: break FindRHS } } - Fatalf("%v missing from LHS of %v", n, defn) + base.Fatalf("%v missing from LHS of %v", n, defn) default: return nil } if rhs == nil { - Fatalf("RHS is nil: %v", defn) + base.Fatalf("RHS is nil: %v", defn) } unsafe, _ := reassigned(n) @@ -791,7 +792,7 @@ FindRHS: // TODO: handle initial declaration not including an assignment and followed by a single assignment? func reassigned(n *Node) (bool, *Node) { if n.Op != ONAME { - Fatalf("reassigned %v", n) + base.Fatalf("reassigned %v", n) } // no way to reliably check for no-reassignment of globals, assume it can be if n.Name.Curfn == nil { @@ -869,7 +870,7 @@ func inlParam(t *types.Field, as *Node, inlvars map[*Node]*Node) *Node { inlvar := inlvars[n] if inlvar == nil { - Fatalf("missing inlvar for %v", n) + base.Fatalf("missing inlvar for %v", n) } as.Ninit.Append(nod(ODCL, inlvar, nil)) inlvar.Name.Defn = as @@ -922,7 +923,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { } if inlMap[fn] { - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { fmt.Printf("%v: cannot inline %v into %v: repeated recursive cycle\n", n.Line(), fn, Curfn.funcname()) } return n @@ -931,17 +932,17 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { defer func() { inlMap[fn] = false }() - if Debug.TypecheckInl == 0 { + if base.Debug.TypecheckInl == 0 { typecheckinl(fn) } // We have a function node, and it has an inlineable body. - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { fmt.Printf("%v: inlining call to %v %#v { %#v }\n", n.Line(), fn.Sym, fn.Type, asNodes(fn.Func.Inl.Body)) - } else if Flag.LowerM != 0 { + } else if base.Flag.LowerM != 0 { fmt.Printf("%v: inlining call to %v\n", n.Line(), fn) } - if Flag.LowerM > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("%v: Before inlining: %+v\n", n.Line(), n) } @@ -962,7 +963,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { callee = callee.Left } if callee.Op != ONAME && callee.Op != OCLOSURE && callee.Op != OMETHEXPR { - Fatalf("unexpected callee expression: %v", callee) + base.Fatalf("unexpected callee expression: %v", callee) } } @@ -986,7 +987,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { // the reassigned check via some sort of copy propagation this would most // likely need to be changed to a loop to walk up to the correct Param if o == nil || (o.Name.Curfn != Curfn && o.Name.Curfn.Func.OClosure != Curfn) { - Fatalf("%v: unresolvable capture %v %v\n", n.Line(), fn, v) + base.Fatalf("%v: unresolvable capture %v %v\n", n.Line(), fn, v) } if v.Name.Byval() { @@ -1022,11 +1023,11 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { // this never actually happens. We currently // perform inlining before escape analysis, so // nothing should have moved to the heap yet. - Fatalf("impossible: %v", ln) + base.Fatalf("impossible: %v", ln) } inlf := typecheck(inlvar(ln), ctxExpr) inlvars[ln] = inlf - if Flag.GenDwarfInl > 0 { + if base.Flag.GenDwarfInl > 0 { if ln.Class() == PPARAM { inlf.Name.SetInlFormal(true) } else { @@ -1064,7 +1065,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { m = retvar(t, i) } - if Flag.GenDwarfInl > 0 { + if base.Flag.GenDwarfInl > 0 { // Don't update the src.Pos on a return variable if it // was manufactured by the inliner (e.g. "~R2"); such vars // were not part of the original callee. @@ -1083,7 +1084,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { as.SetColas(true) if n.Op == OCALLMETH { if n.Left.Left == nil { - Fatalf("method call without receiver: %+v", n) + base.Fatalf("method call without receiver: %+v", n) } as.Rlist.Append(n.Left.Left) } @@ -1150,10 +1151,10 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { inlgen++ parent := -1 - if b := Ctxt.PosTable.Pos(n.Pos).Base(); b != nil { + if b := base.Ctxt.PosTable.Pos(n.Pos).Base(); b != nil { parent = b.InliningIndex() } - newIndex := Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym()) + newIndex := base.Ctxt.InlTree.Add(parent, n.Pos, fn.Sym.Linksym()) // Add an inline mark just before the inlined body. // This mark is inline in the code so that it's a reasonable spot @@ -1165,9 +1166,9 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { inlMark.Xoffset = int64(newIndex) ninit.Append(inlMark) - if Flag.GenDwarfInl > 0 { + if base.Flag.GenDwarfInl > 0 { if !fn.Sym.Linksym().WasInlined() { - Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn) + base.Ctxt.DwFixups.SetPrecursorFunc(fn.Sym.Linksym(), fn) fn.Sym.Linksym().Set(obj.AttrWasInlined, true) } } @@ -1188,7 +1189,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { typecheckslice(body, ctxStmt) - if Flag.GenDwarfInl > 0 { + if base.Flag.GenDwarfInl > 0 { for _, v := range inlfvars { v.Pos = subst.updatedPos(v.Pos) } @@ -1216,7 +1217,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { } } - if Flag.LowerM > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("%v: After inlining %+v\n\n", call.Line(), call) } @@ -1227,7 +1228,7 @@ func mkinlcall(n, fn *Node, maxCost int32, inlMap map[*Node]bool) *Node { // PAUTO's in the calling functions, and link them off of the // PPARAM's, PAUTOS and PPARAMOUTs of the called function. func inlvar(var_ *Node) *Node { - if Flag.LowerM > 3 { + if base.Flag.LowerM > 3 { fmt.Printf("inlvar %+v\n", var_) } @@ -1310,13 +1311,13 @@ func (subst *inlsubst) node(n *Node) *Node { switch n.Op { case ONAME: if inlvar := subst.inlvars[n]; inlvar != nil { // These will be set during inlnode - if Flag.LowerM > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("substituting name %+v -> %+v\n", n, inlvar) } return inlvar } - if Flag.LowerM > 2 { + if base.Flag.LowerM > 2 { fmt.Printf("not substituting name %+v\n", n) } return n @@ -1382,7 +1383,7 @@ func (subst *inlsubst) node(n *Node) *Node { m.Ninit.Set(nil) if n.Op == OCLOSURE { - Fatalf("cannot inline function containing closure: %+v", n) + base.Fatalf("cannot inline function containing closure: %+v", n) } m.Left = subst.node(n.Left) @@ -1396,7 +1397,7 @@ func (subst *inlsubst) node(n *Node) *Node { } func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos { - pos := Ctxt.PosTable.Pos(xpos) + pos := base.Ctxt.PosTable.Pos(xpos) oldbase := pos.Base() // can be nil newbase := subst.bases[oldbase] if newbase == nil { @@ -1404,7 +1405,7 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos { subst.bases[oldbase] = newbase } pos.SetBase(newbase) - return Ctxt.PosTable.XPos(pos) + return base.Ctxt.PosTable.XPos(pos) } func pruneUnusedAutos(ll []*Node, vis *hairyVisitor) []*Node { @@ -1449,22 +1450,22 @@ func devirtualizeCall(call *Node) { x = typecheck(x, ctxExpr|ctxCallee) switch x.Op { case ODOTMETH: - if Flag.LowerM != 0 { - Warnl(call.Pos, "devirtualizing %v to %v", call.Left, typ) + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos, "devirtualizing %v to %v", call.Left, typ) } call.Op = OCALLMETH call.Left = x case ODOTINTER: // Promoted method from embedded interface-typed field (#42279). - if Flag.LowerM != 0 { - Warnl(call.Pos, "partially devirtualizing %v to %v", call.Left, typ) + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos, "partially devirtualizing %v to %v", call.Left, typ) } call.Op = OCALLINTER call.Left = x default: // TODO(mdempsky): Turn back into Fatalf after more testing. - if Flag.LowerM != 0 { - Warnl(call.Pos, "failed to devirtualize %v (%v)", x, x.Op) + if base.Flag.LowerM != 0 { + base.WarnfAt(call.Pos, "failed to devirtualize %v (%v)", x, x.Op) } return } diff --git a/src/cmd/compile/internal/gc/lex.go b/src/cmd/compile/internal/gc/lex.go index f01891f365..30ef4d0eb2 100644 --- a/src/cmd/compile/internal/gc/lex.go +++ b/src/cmd/compile/internal/gc/lex.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/syntax" "cmd/internal/objabi" "cmd/internal/src" @@ -13,7 +14,7 @@ import ( ) func makePos(b *src.PosBase, line, col uint) src.XPos { - return Ctxt.PosTable.XPos(src.MakePos(b, line, col)) + return base.Ctxt.PosTable.XPos(src.MakePos(b, line, col)) } func isSpace(c rune) bool { diff --git a/src/cmd/compile/internal/gc/main.go b/src/cmd/compile/internal/gc/main.go index 2794ba3694..c66139027a 100644 --- a/src/cmd/compile/internal/gc/main.go +++ b/src/cmd/compile/internal/gc/main.go @@ -9,6 +9,7 @@ package gc import ( "bufio" "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" "cmd/compile/internal/types" @@ -35,13 +36,13 @@ import ( ) func hidePanic() { - if Debug.Panic == 0 && Errors() > 0 { + if base.Debug.Panic == 0 && base.Errors() > 0 { // If we've already complained about things // in the program, don't bother complaining // about a panic too; let the user clean up // the code and try again. if err := recover(); err != nil { - errorexit() + base.ErrorExit() } } } @@ -61,16 +62,16 @@ func Main(archInit func(*Arch)) { archInit(&thearch) - Ctxt = obj.Linknew(thearch.LinkArch) - Ctxt.DiagFunc = yyerror - Ctxt.DiagFlush = flusherrors - Ctxt.Bso = bufio.NewWriter(os.Stdout) + base.Ctxt = obj.Linknew(thearch.LinkArch) + base.Ctxt.DiagFunc = base.Errorf + base.Ctxt.DiagFlush = base.FlushErrors + base.Ctxt.Bso = bufio.NewWriter(os.Stdout) // UseBASEntries is preferred because it shaves about 2% off build time, but LLDB, dsymutil, and dwarfdump // on Darwin don't support it properly, especially since macOS 10.14 (Mojave). This is exposed as a flag // to allow testing with LLVM tools on Linux, and to help with reporting this bug to the LLVM project. // See bugs 31188 and 21945 (CLs 170638, 98075, 72371). - Ctxt.UseBASEntries = Ctxt.Headtype != objabi.Hdarwin + base.Ctxt.UseBASEntries = base.Ctxt.Headtype != objabi.Hdarwin localpkg = types.NewPkg("", "") localpkg.Prefix = "\"\"" @@ -112,15 +113,15 @@ func Main(archInit func(*Arch)) { // pseudo-package used for methods with anonymous receivers gopkg = types.NewPkg("go", "") - DebugSSA = ssa.PhaseOption - ParseFlags() + base.DebugSSA = ssa.PhaseOption + base.ParseFlags() // Record flags that affect the build result. (And don't // record flags that don't, since that would cause spurious // changes in the binary.) recordFlags("B", "N", "l", "msan", "race", "shared", "dynlink", "dwarflocationlists", "dwarfbasentries", "smallframes", "spectre") - if !enableTrace && Flag.LowerT { + if !enableTrace && base.Flag.LowerT { log.Fatalf("compiler not built with support for -t") } @@ -128,59 +129,59 @@ func Main(archInit func(*Arch)) { // default: inlining on. (Flag.LowerL == 1) // -l: inlining off (Flag.LowerL == 0) // -l=2, -l=3: inlining on again, with extra debugging (Flag.LowerL > 1) - if Flag.LowerL <= 1 { - Flag.LowerL = 1 - Flag.LowerL + if base.Flag.LowerL <= 1 { + base.Flag.LowerL = 1 - base.Flag.LowerL } - if Flag.SmallFrames { + if base.Flag.SmallFrames { maxStackVarSize = 128 * 1024 maxImplicitStackVarSize = 16 * 1024 } - if Flag.Dwarf { - Ctxt.DebugInfo = debuginfo - Ctxt.GenAbstractFunc = genAbstractFunc - Ctxt.DwFixups = obj.NewDwarfFixupTable(Ctxt) + if base.Flag.Dwarf { + base.Ctxt.DebugInfo = debuginfo + base.Ctxt.GenAbstractFunc = genAbstractFunc + base.Ctxt.DwFixups = obj.NewDwarfFixupTable(base.Ctxt) } else { // turn off inline generation if no dwarf at all - Flag.GenDwarfInl = 0 - Ctxt.Flag_locationlists = false + base.Flag.GenDwarfInl = 0 + base.Ctxt.Flag_locationlists = false } - if Ctxt.Flag_locationlists && len(Ctxt.Arch.DWARFRegisters) == 0 { - log.Fatalf("location lists requested but register mapping not available on %v", Ctxt.Arch.Name) + if base.Ctxt.Flag_locationlists && len(base.Ctxt.Arch.DWARFRegisters) == 0 { + log.Fatalf("location lists requested but register mapping not available on %v", base.Ctxt.Arch.Name) } checkLang() - if Flag.SymABIs != "" { - readSymABIs(Flag.SymABIs, Ctxt.Pkgpath) + if base.Flag.SymABIs != "" { + readSymABIs(base.Flag.SymABIs, base.Ctxt.Pkgpath) } if ispkgin(omit_pkgs) { - Flag.Race = false - Flag.MSan = false + base.Flag.Race = false + base.Flag.MSan = false } - thearch.LinkArch.Init(Ctxt) + thearch.LinkArch.Init(base.Ctxt) startProfile() - if Flag.Race { + if base.Flag.Race { racepkg = types.NewPkg("runtime/race", "") } - if Flag.MSan { + if base.Flag.MSan { msanpkg = types.NewPkg("runtime/msan", "") } - if Flag.Race || Flag.MSan { + if base.Flag.Race || base.Flag.MSan { instrumenting = true } - if Flag.Dwarf { - dwarf.EnableLogging(Debug.DwarfInl != 0) + if base.Flag.Dwarf { + dwarf.EnableLogging(base.Debug.DwarfInl != 0) } - if Debug.SoftFloat != 0 { + if base.Debug.SoftFloat != 0 { thearch.SoftFloat = true } - if Flag.JSON != "" { // parse version,destination from json logging optimization. - logopt.LogJsonOption(Flag.JSON) + if base.Flag.JSON != "" { // parse version,destination from json logging optimization. + logopt.LogJsonOption(base.Flag.JSON) } ssaDump = os.Getenv("GOSSAFUNC") @@ -197,7 +198,7 @@ func Main(archInit func(*Arch)) { } } - trackScopes = Flag.Dwarf + trackScopes = base.Flag.Dwarf Widthptr = thearch.LinkArch.PtrSize Widthreg = thearch.LinkArch.RegSize @@ -207,7 +208,7 @@ func Main(archInit func(*Arch)) { // would lead to import cycles) types.Widthptr = Widthptr types.Dowidth = dowidth - types.Fatalf = Fatalf + types.Fatalf = base.Fatalf types.Sconv = func(s *types.Sym, flag, mode int) string { return sconv(s, FmtFlag(flag), fmtMode(mode)) } @@ -226,7 +227,7 @@ func Main(archInit func(*Arch)) { types.FmtLeft = int(FmtLeft) types.FmtUnsigned = int(FmtUnsigned) types.FErr = int(FErr) - types.Ctxt = Ctxt + types.Ctxt = base.Ctxt initUniverse() @@ -288,10 +289,10 @@ func Main(archInit func(*Arch)) { if n.Op == ODCLFUNC { Curfn = n decldepth = 1 - errorsBefore := Errors() + errorsBefore := base.Errors() typecheckslice(Curfn.Nbody.Slice(), ctxStmt) checkreturn(Curfn) - if Errors() > errorsBefore { + if base.Errors() > errorsBefore { Curfn.Nbody.Set(nil) // type errors; do not compile } // Now that we've checked whether n terminates, @@ -304,7 +305,7 @@ func Main(archInit func(*Arch)) { // check past phase 9 isn't sufficient, as we may exit with other errors // before then, thus skipping map key errors. checkMapKeys() - ExitIfErrors() + base.ExitIfErrors() timings.AddEvent(fcount, "funcs") @@ -322,11 +323,11 @@ func Main(archInit func(*Arch)) { } capturevarscomplete = true Curfn = nil - ExitIfErrors() + base.ExitIfErrors() // Phase 5: Inlining timings.Start("fe", "inlining") - if Debug.TypecheckInl != 0 { + if base.Debug.TypecheckInl != 0 { // Typecheck imported function bodies if Debug.l > 1, // otherwise lazily when used or re-exported. for _, n := range importlist { @@ -334,10 +335,10 @@ func Main(archInit func(*Arch)) { typecheckinl(n) } } - ExitIfErrors() + base.ExitIfErrors() } - if Flag.LowerL != 0 { + if base.Flag.LowerL != 0 { // Find functions that can be inlined and clone them before walk expands them. visitBottomUp(xtop, func(list []*Node, recursive bool) { numfns := numNonClosures(list) @@ -348,7 +349,7 @@ func Main(archInit func(*Arch)) { // across more than one function. caninl(n) } else { - if Flag.LowerM > 1 { + if base.Flag.LowerM > 1 { fmt.Printf("%v: cannot inline %v: recursive\n", n.Line(), n.Func.Nname) } } @@ -379,7 +380,7 @@ func Main(archInit func(*Arch)) { // checking. This must happen before transformclosure. // We'll do the final check after write barriers are // inserted. - if Flag.CompilingRuntime { + if base.Flag.CompilingRuntime { nowritebarrierrecCheck = newNowritebarrierrecChecker() } @@ -430,10 +431,10 @@ func Main(archInit func(*Arch)) { // Finalize DWARF inline routine DIEs, then explicitly turn off // DWARF inlining gen so as to avoid problems with generated // method wrappers. - if Ctxt.DwFixups != nil { - Ctxt.DwFixups.Finalize(Ctxt.Pkgpath, Debug.DwarfInl != 0) - Ctxt.DwFixups = nil - Flag.GenDwarfInl = 0 + if base.Ctxt.DwFixups != nil { + base.Ctxt.DwFixups.Finalize(base.Ctxt.Pkgpath, base.Debug.DwarfInl != 0) + base.Ctxt.DwFixups = nil + base.Flag.GenDwarfInl = 0 } // Phase 9: Check external declarations. @@ -446,14 +447,14 @@ func Main(archInit func(*Arch)) { // Check the map keys again, since we typechecked the external // declarations. checkMapKeys() - ExitIfErrors() + base.ExitIfErrors() // Write object data to disk. timings.Start("be", "dumpobj") dumpdata() - Ctxt.NumberSyms() + base.Ctxt.NumberSyms() dumpobj() - if Flag.AsmHdr != "" { + if base.Flag.AsmHdr != "" { dumpasmhdr() } @@ -463,27 +464,27 @@ func Main(archInit func(*Arch)) { }) for _, large := range largeStackFrames { if large.callee != 0 { - yyerrorl(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20) + base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args + %d MB callee", large.locals>>20, large.args>>20, large.callee>>20) } else { - yyerrorl(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20) + base.ErrorfAt(large.pos, "stack frame too large (>1GB): %d MB locals + %d MB args", large.locals>>20, large.args>>20) } } if len(funcStack) != 0 { - Fatalf("funcStack is non-empty: %v", len(funcStack)) + base.Fatalf("funcStack is non-empty: %v", len(funcStack)) } if len(compilequeue) != 0 { - Fatalf("%d uncompiled functions", len(compilequeue)) + base.Fatalf("%d uncompiled functions", len(compilequeue)) } - logopt.FlushLoggedOpts(Ctxt, Ctxt.Pkgpath) - ExitIfErrors() + logopt.FlushLoggedOpts(base.Ctxt, base.Ctxt.Pkgpath) + base.ExitIfErrors() - flusherrors() + base.FlushErrors() timings.Stop() - if Flag.Bench != "" { - if err := writebench(Flag.Bench); err != nil { + if base.Flag.Bench != "" { + if err := writebench(base.Flag.Bench); err != nil { log.Fatalf("cannot write benchmark data: %v", err) } } @@ -510,7 +511,7 @@ func writebench(filename string) error { fmt.Fprintln(&buf, "commit:", objabi.Version) fmt.Fprintln(&buf, "goos:", runtime.GOOS) fmt.Fprintln(&buf, "goarch:", runtime.GOARCH) - timings.Write(&buf, "BenchmarkCompile:"+Ctxt.Pkgpath+":") + timings.Write(&buf, "BenchmarkCompile:"+base.Ctxt.Pkgpath+":") n, err := f.Write(buf.Bytes()) if err != nil { @@ -622,12 +623,12 @@ func islocalname(name string) bool { func findpkg(name string) (file string, ok bool) { if islocalname(name) { - if Flag.NoLocalImports { + if base.Flag.NoLocalImports { return "", false } - if Flag.Cfg.PackageFile != nil { - file, ok = Flag.Cfg.PackageFile[name] + if base.Flag.Cfg.PackageFile != nil { + file, ok = base.Flag.Cfg.PackageFile[name] return file, ok } @@ -649,16 +650,16 @@ func findpkg(name string) (file string, ok bool) { // don't want to see "encoding/../encoding/base64" // as different from "encoding/base64". if q := path.Clean(name); q != name { - yyerror("non-canonical import path %q (should be %q)", name, q) + base.Errorf("non-canonical import path %q (should be %q)", name, q) return "", false } - if Flag.Cfg.PackageFile != nil { - file, ok = Flag.Cfg.PackageFile[name] + if base.Flag.Cfg.PackageFile != nil { + file, ok = base.Flag.Cfg.PackageFile[name] return file, ok } - for _, dir := range Flag.Cfg.ImportDirs { + for _, dir := range base.Flag.Cfg.ImportDirs { file = fmt.Sprintf("%s/%s.a", dir, name) if _, err := os.Stat(file); err == nil { return file, true @@ -672,13 +673,13 @@ func findpkg(name string) (file string, ok bool) { if objabi.GOROOT != "" { suffix := "" suffixsep := "" - if Flag.InstallSuffix != "" { + if base.Flag.InstallSuffix != "" { suffixsep = "_" - suffix = Flag.InstallSuffix - } else if Flag.Race { + suffix = base.Flag.InstallSuffix + } else if base.Flag.Race { suffixsep = "_" suffix = "race" - } else if Flag.MSan { + } else if base.Flag.MSan { suffixsep = "_" suffix = "msan" } @@ -715,7 +716,7 @@ func loadsys() { case varTag: importvar(Runtimepkg, src.NoXPos, sym, typ) default: - Fatalf("unhandled declaration tag %v", d.tag) + base.Fatalf("unhandled declaration tag %v", d.tag) } } @@ -729,13 +730,13 @@ var myheight int func importfile(f constant.Value) *types.Pkg { if f.Kind() != constant.String { - yyerror("import path must be a string") + base.Errorf("import path must be a string") return nil } path_ := constant.StringVal(f) if len(path_) == 0 { - yyerror("import path is empty") + base.Errorf("import path is empty") return nil } @@ -748,16 +749,16 @@ func importfile(f constant.Value) *types.Pkg { // the main package, just as we reserve the import // path "math" to identify the standard math package. if path_ == "main" { - yyerror("cannot import \"main\"") - errorexit() + base.Errorf("cannot import \"main\"") + base.ErrorExit() } - if Ctxt.Pkgpath != "" && path_ == Ctxt.Pkgpath { - yyerror("import %q while compiling that package (import cycle)", path_) - errorexit() + if base.Ctxt.Pkgpath != "" && path_ == base.Ctxt.Pkgpath { + base.Errorf("import %q while compiling that package (import cycle)", path_) + base.ErrorExit() } - if mapped, ok := Flag.Cfg.ImportMap[path_]; ok { + if mapped, ok := base.Flag.Cfg.ImportMap[path_]; ok { path_ = mapped } @@ -767,13 +768,13 @@ func importfile(f constant.Value) *types.Pkg { if islocalname(path_) { if path_[0] == '/' { - yyerror("import path cannot be absolute path") + base.Errorf("import path cannot be absolute path") return nil } - prefix := Ctxt.Pathname - if Flag.D != "" { - prefix = Flag.D + prefix := base.Ctxt.Pathname + if base.Flag.D != "" { + prefix = base.Flag.D } path_ = path.Join(prefix, path_) @@ -784,8 +785,8 @@ func importfile(f constant.Value) *types.Pkg { file, found := findpkg(path_) if !found { - yyerror("can't find import: %q", path_) - errorexit() + base.Errorf("can't find import: %q", path_) + base.ErrorExit() } importpkg := types.NewPkg(path_, "") @@ -797,48 +798,48 @@ func importfile(f constant.Value) *types.Pkg { imp, err := bio.Open(file) if err != nil { - yyerror("can't open import: %q: %v", path_, err) - errorexit() + base.Errorf("can't open import: %q: %v", path_, err) + base.ErrorExit() } defer imp.Close() // check object header p, err := imp.ReadString('\n') if err != nil { - yyerror("import %s: reading input: %v", file, err) - errorexit() + base.Errorf("import %s: reading input: %v", file, err) + base.ErrorExit() } if p == "!\n" { // package archive // package export block should be first sz := arsize(imp.Reader, "__.PKGDEF") if sz <= 0 { - yyerror("import %s: not a package file", file) - errorexit() + base.Errorf("import %s: not a package file", file) + base.ErrorExit() } p, err = imp.ReadString('\n') if err != nil { - yyerror("import %s: reading input: %v", file, err) - errorexit() + base.Errorf("import %s: reading input: %v", file, err) + base.ErrorExit() } } if !strings.HasPrefix(p, "go object ") { - yyerror("import %s: not a go object file: %s", file, p) - errorexit() + base.Errorf("import %s: not a go object file: %s", file, p) + base.ErrorExit() } q := fmt.Sprintf("%s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring()) if p[10:] != q { - yyerror("import %s: object is [%s] expected [%s]", file, p[10:], q) - errorexit() + base.Errorf("import %s: object is [%s] expected [%s]", file, p[10:], q) + base.ErrorExit() } // process header lines for { p, err = imp.ReadString('\n') if err != nil { - yyerror("import %s: reading input: %v", file, err) - errorexit() + base.Errorf("import %s: reading input: %v", file, err) + base.ErrorExit() } if p == "\n" { break // header ends with blank line @@ -870,41 +871,41 @@ func importfile(f constant.Value) *types.Pkg { var fingerprint goobj.FingerprintType switch c { case '\n': - yyerror("cannot import %s: old export format no longer supported (recompile library)", path_) + base.Errorf("cannot import %s: old export format no longer supported (recompile library)", path_) return nil case 'B': - if Debug.Export != 0 { + if base.Debug.Export != 0 { fmt.Printf("importing %s (%s)\n", path_, file) } imp.ReadByte() // skip \n after $$B c, err = imp.ReadByte() if err != nil { - yyerror("import %s: reading input: %v", file, err) - errorexit() + base.Errorf("import %s: reading input: %v", file, err) + base.ErrorExit() } // Indexed format is distinguished by an 'i' byte, // whereas previous export formats started with 'c', 'd', or 'v'. if c != 'i' { - yyerror("import %s: unexpected package format byte: %v", file, c) - errorexit() + base.Errorf("import %s: unexpected package format byte: %v", file, c) + base.ErrorExit() } fingerprint = iimport(importpkg, imp) default: - yyerror("no import in %q", path_) - errorexit() + base.Errorf("no import in %q", path_) + base.ErrorExit() } // assume files move (get installed) so don't record the full path - if Flag.Cfg.PackageFile != nil { + if base.Flag.Cfg.PackageFile != nil { // If using a packageFile map, assume path_ can be recorded directly. - Ctxt.AddImport(path_, fingerprint) + base.Ctxt.AddImport(path_, fingerprint) } else { // For file "/Users/foo/go/pkg/darwin_amd64/math.a" record "math.a". - Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint) + base.Ctxt.AddImport(file[len(file)-len(path_)-len(".a"):], fingerprint) } if importpkg.Height >= myheight { @@ -926,21 +927,21 @@ func pkgnotused(lineno src.XPos, path string, name string) { elem = elem[i+1:] } if name == "" || elem == name { - yyerrorl(lineno, "imported and not used: %q", path) + base.ErrorfAt(lineno, "imported and not used: %q", path) } else { - yyerrorl(lineno, "imported and not used: %q as %s", path, name) + base.ErrorfAt(lineno, "imported and not used: %q as %s", path, name) } } func mkpackage(pkgname string) { if localpkg.Name == "" { if pkgname == "_" { - yyerror("invalid package name _") + base.Errorf("invalid package name _") } localpkg.Name = pkgname } else { if pkgname != localpkg.Name { - yyerror("package %s; expected %s", pkgname, localpkg.Name) + base.Errorf("package %s; expected %s", pkgname, localpkg.Name) } } } @@ -964,7 +965,7 @@ func clearImports() { // leave s->block set to cause redeclaration // errors if a conflicting top-level name is // introduced by a different file. - if !n.Name.Used() && SyntaxErrors() == 0 { + if !n.Name.Used() && base.SyntaxErrors() == 0 { unused = append(unused, importedPkg{n.Pos, n.Name.Pkg.Path, s.Name}) } s.Def = nil @@ -973,7 +974,7 @@ func clearImports() { if IsAlias(s) { // throw away top-level name left over // from previous import . "x" - if n.Name != nil && n.Name.Pack != nil && !n.Name.Pack.Name.Used() && SyntaxErrors() == 0 { + if n.Name != nil && n.Name.Pack != nil && !n.Name.Pack.Name.Used() && base.SyntaxErrors() == 0 { unused = append(unused, importedPkg{n.Name.Pack.Pos, n.Name.Pack.Name.Pkg.Path, ""}) n.Name.Pack.Name.SetUsed(true) } @@ -995,7 +996,7 @@ func IsAlias(sym *types.Sym) bool { // recordFlags records the specified command-line flags to be placed // in the DWARF info. func recordFlags(flags ...string) { - if Ctxt.Pkgpath == "" { + if base.Ctxt.Pkgpath == "" { // We can't record the flags if we don't know what the // package name is. return @@ -1038,24 +1039,24 @@ func recordFlags(flags ...string) { if cmd.Len() == 0 { return } - s := Ctxt.Lookup(dwarf.CUInfoPrefix + "producer." + Ctxt.Pkgpath) + s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "producer." + base.Ctxt.Pkgpath) s.Type = objabi.SDWARFCUINFO // Sometimes (for example when building tests) we can link // together two package main archives. So allow dups. s.Set(obj.AttrDuplicateOK, true) - Ctxt.Data = append(Ctxt.Data, s) + base.Ctxt.Data = append(base.Ctxt.Data, s) s.P = cmd.Bytes()[1:] } // recordPackageName records the name of the package being // compiled, so that the linker can save it in the compile unit's DIE. func recordPackageName() { - s := Ctxt.Lookup(dwarf.CUInfoPrefix + "packagename." + Ctxt.Pkgpath) + s := base.Ctxt.Lookup(dwarf.CUInfoPrefix + "packagename." + base.Ctxt.Pkgpath) s.Type = objabi.SDWARFCUINFO // Sometimes (for example when building tests) we can link // together two package main archives. So allow dups. s.Set(obj.AttrDuplicateOK, true) - Ctxt.Data = append(Ctxt.Data, s) + base.Ctxt.Data = append(base.Ctxt.Data, s) s.P = []byte(localpkg.Name) } @@ -1099,23 +1100,23 @@ func langSupported(major, minor int, pkg *types.Pkg) bool { // checkLang verifies that the -lang flag holds a valid value, and // exits if not. It initializes data used by langSupported. func checkLang() { - if Flag.Lang == "" { + if base.Flag.Lang == "" { return } var err error - langWant, err = parseLang(Flag.Lang) + langWant, err = parseLang(base.Flag.Lang) if err != nil { - log.Fatalf("invalid value %q for -lang: %v", Flag.Lang, err) + log.Fatalf("invalid value %q for -lang: %v", base.Flag.Lang, err) } - if def := currentLang(); Flag.Lang != def { + if def := currentLang(); base.Flag.Lang != def { defVers, err := parseLang(def) if err != nil { log.Fatalf("internal error parsing default lang %q: %v", def, err) } if langWant.major > defVers.major || (langWant.major == defVers.major && langWant.minor > defVers.minor) { - log.Fatalf("invalid value %q for -lang: max known version is %q", Flag.Lang, def) + log.Fatalf("invalid value %q for -lang: max known version is %q", base.Flag.Lang, def) } } } diff --git a/src/cmd/compile/internal/gc/noder.go b/src/cmd/compile/internal/gc/noder.go index 2d3da884a2..6dae2cd0a4 100644 --- a/src/cmd/compile/internal/gc/noder.go +++ b/src/cmd/compile/internal/gc/noder.go @@ -16,6 +16,7 @@ import ( "unicode" "unicode/utf8" + "cmd/compile/internal/base" "cmd/compile/internal/syntax" "cmd/compile/internal/types" "cmd/internal/obj" @@ -59,15 +60,15 @@ func parseFiles(filenames []string) uint { var lines uint for _, p := range noders { for e := range p.err { - p.yyerrorpos(e.Pos, "%s", e.Msg) + p.errorAt(e.Pos, "%s", e.Msg) } p.node() lines += p.file.Lines p.file = nil // release memory - if SyntaxErrors() != 0 { - errorexit() + if base.SyntaxErrors() != 0 { + base.ErrorExit() } // Always run testdclstack here, even when debug_dclstack is not set, as a sanity measure. testdclstack() @@ -111,20 +112,20 @@ func (p *noder) makeSrcPosBase(b0 *syntax.PosBase) *src.PosBase { } func (p *noder) makeXPos(pos syntax.Pos) (_ src.XPos) { - return Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col())) + return base.Ctxt.PosTable.XPos(src.MakePos(p.makeSrcPosBase(pos.Base()), pos.Line(), pos.Col())) } -func (p *noder) yyerrorpos(pos syntax.Pos, format string, args ...interface{}) { - yyerrorl(p.makeXPos(pos), format, args...) +func (p *noder) errorAt(pos syntax.Pos, format string, args ...interface{}) { + base.ErrorfAt(p.makeXPos(pos), format, args...) } // TODO(gri) Can we eliminate fileh in favor of absFilename? func fileh(name string) string { - return objabi.AbsFile("", name, Flag.TrimPath) + return objabi.AbsFile("", name, base.Flag.TrimPath) } func absFilename(name string) string { - return objabi.AbsFile(Ctxt.Pathname, name, Flag.TrimPath) + return objabi.AbsFile(base.Ctxt.Pathname, name, base.Flag.TrimPath) } // noder transforms package syntax's AST into a Node tree. @@ -162,8 +163,8 @@ func (p *noder) funcBody(fn *Node, block *syntax.BlockStmt) { } fn.Nbody.Set(body) - lineno = p.makeXPos(block.Rbrace) - fn.Func.Endlineno = lineno + base.Pos = p.makeXPos(block.Rbrace) + fn.Func.Endlineno = base.Pos } funcbody() @@ -193,7 +194,7 @@ func (p *noder) closeScope(pos syntax.Pos) { // no variables were declared in this scope, so we can retract it. if int(p.scope) != len(Curfn.Func.Parents) { - Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted") + base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted") } p.scope = Curfn.Func.Parents[p.scope-1] @@ -258,7 +259,7 @@ func (p *noder) node() { for _, n := range p.linknames { if !p.importedUnsafe { - p.yyerrorpos(n.pos, "//go:linkname only allowed in Go files that import \"unsafe\"") + p.errorAt(n.pos, "//go:linkname only allowed in Go files that import \"unsafe\"") continue } s := lookup(n.local) @@ -267,10 +268,10 @@ func (p *noder) node() { } else { // Use the default object symbol name if the // user didn't provide one. - if Ctxt.Pkgpath == "" { - p.yyerrorpos(n.pos, "//go:linkname requires linkname argument or -p compiler flag") + if base.Ctxt.Pkgpath == "" { + p.errorAt(n.pos, "//go:linkname requires linkname argument or -p compiler flag") } else { - s.Linkname = objabi.PathToPrefix(Ctxt.Pkgpath) + "." + n.local + s.Linkname = objabi.PathToPrefix(base.Ctxt.Pkgpath) + "." + n.local } } } @@ -288,7 +289,7 @@ func (p *noder) node() { } pragcgobuf = append(pragcgobuf, p.pragcgobuf...) - lineno = src.NoXPos + base.Pos = src.NoXPos clearImports() } @@ -332,8 +333,8 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) { ipkg := importfile(p.basicLit(imp.Path)) if ipkg == nil { - if Errors() == 0 { - Fatalf("phase error in import") + if base.Errors() == 0 { + base.Fatalf("phase error in import") } return } @@ -363,7 +364,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) { importdot(ipkg, pack) return case "init": - yyerrorl(pack.Pos, "cannot import package as init - init must be a func") + base.ErrorfAt(pack.Pos, "cannot import package as init - init must be a func") return case "_": return @@ -393,7 +394,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []*Node { // so at that point it hasn't seen the imports. // We're left to check now, just before applying the //go:embed lines. for _, e := range pragma.Embeds { - p.yyerrorpos(e.Pos, "//go:embed only allowed in Go files that import \"embed\"") + p.errorAt(e.Pos, "//go:embed only allowed in Go files that import \"embed\"") } } else { exprs = varEmbed(p, names, typ, exprs, pragma.Embeds) @@ -437,7 +438,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*Node { cs.typ, cs.values = typ, values } else { if typ != nil { - yyerror("const declaration cannot have type without expression") + base.Errorf("const declaration cannot have type without expression") } typ, values = cs.typ, cs.values } @@ -445,7 +446,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*Node { nn := make([]*Node, 0, len(names)) for i, n := range names { if i >= len(values) { - yyerror("missing value in const declaration") + base.Errorf("missing value in const declaration") break } v := values[i] @@ -464,7 +465,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*Node { } if len(values) > len(names) { - yyerror("extra expression in const declaration") + base.Errorf("extra expression in const declaration") } cs.iota++ @@ -493,7 +494,7 @@ func (p *noder) typeDecl(decl *syntax.TypeDecl) *Node { nod := p.nod(decl, ODCLTYPE, n, nil) if param.Alias() && !langSupported(1, 9, localpkg) { - yyerrorl(nod.Pos, "type aliases only supported as of -lang=go1.9") + base.ErrorfAt(nod.Pos, "type aliases only supported as of -lang=go1.9") } return nod } @@ -521,13 +522,13 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *Node { if name.Name == "init" { name = renameinit() if t.List.Len() > 0 || t.Rlist.Len() > 0 { - yyerrorl(f.Pos, "func init must have no arguments and no return values") + base.ErrorfAt(f.Pos, "func init must have no arguments and no return values") } } if localpkg.Name == "main" && name.Name == "main" { if t.List.Len() > 0 || t.Rlist.Len() > 0 { - yyerrorl(f.Pos, "func main must have no arguments and no return values") + base.ErrorfAt(f.Pos, "func main must have no arguments and no return values") } } } else { @@ -542,7 +543,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *Node { if pragma, ok := fun.Pragma.(*Pragma); ok { f.Func.Pragma = pragma.Flag & FuncPragmas if pragma.Flag&Systemstack != 0 && pragma.Flag&Nosplit != 0 { - yyerrorl(f.Pos, "go:nosplit and go:systemstack cannot be combined") + base.ErrorfAt(f.Pos, "go:nosplit and go:systemstack cannot be combined") } pragma.Flag &^= FuncPragmas p.checkUnused(pragma) @@ -556,10 +557,10 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *Node { if fun.Body != nil { if f.Func.Pragma&Noescape != 0 { - yyerrorl(f.Pos, "can only use //go:noescape with external func implementations") + base.ErrorfAt(f.Pos, "can only use //go:noescape with external func implementations") } } else { - if Flag.Complete || strings.HasPrefix(f.funcname(), "init.") { + if base.Flag.Complete || strings.HasPrefix(f.funcname(), "init.") { // Linknamed functions are allowed to have no body. Hopefully // the linkname target has a body. See issue 23311. isLinknamed := false @@ -570,7 +571,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *Node { } } if !isLinknamed { - yyerrorl(f.Pos, "missing function body") + base.ErrorfAt(f.Pos, "missing function body") } } } @@ -610,13 +611,13 @@ func (p *noder) param(param *syntax.Field, dddOk, final bool) *Node { if typ.Op == ODDD { if !dddOk { // We mark these as syntax errors to get automatic elimination - // of multiple such errors per line (see yyerrorl in subr.go). - yyerror("syntax error: cannot use ... in receiver or result parameter list") + // of multiple such errors per line (see ErrorfAt in subr.go). + base.Errorf("syntax error: cannot use ... in receiver or result parameter list") } else if !final { if param.Name == nil { - yyerror("syntax error: cannot use ... with non-final parameter") + base.Errorf("syntax error: cannot use ... with non-final parameter") } else { - p.yyerrorpos(param.Name.Pos(), "syntax error: cannot use ... with non-final parameter %s", param.Name.Value) + p.errorAt(param.Name.Pos(), "syntax error: cannot use ... with non-final parameter %s", param.Name.Value) } } typ.Op = OTARRAY @@ -670,7 +671,7 @@ func (p *noder) expr(expr syntax.Expr) *Node { l[i] = p.wrapname(expr.ElemList[i], e) } n.List.Set(l) - lineno = p.makeXPos(expr.Rbrace) + base.Pos = p.makeXPos(expr.Rbrace) return n case *syntax.KeyValueExpr: // use position of expr.Key rather than of expr (which has position of ':') @@ -752,7 +753,7 @@ func (p *noder) expr(expr syntax.Expr) *Node { if expr.Lhs != nil { n.Left = p.declName(expr.Lhs) if n.Left.isBlank() { - yyerror("invalid variable name %v in type switch", n.Left) + base.Errorf("invalid variable name %v in type switch", n.Left) } } return n @@ -916,12 +917,12 @@ func (p *noder) packname(expr syntax.Expr) *types.Sym { name := p.name(expr.X.(*syntax.Name)) def := asNode(name.Def) if def == nil { - yyerror("undefined: %v", name) + base.Errorf("undefined: %v", name) return name } var pkg *types.Pkg if def.Op != OPACK { - yyerror("%v is not a package", name) + base.Errorf("%v is not a package", name) pkg = localpkg } else { def.Name.SetUsed(true) @@ -1026,7 +1027,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *Node { op = OCONTINUE case syntax.Fallthrough: if !fallOK { - yyerror("fallthrough statement out of place") + base.Errorf("fallthrough statement out of place") } op = OFALL case syntax.Goto: @@ -1066,7 +1067,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *Node { break } if asNode(ln.Sym.Def) != ln { - yyerror("%s is shadowed during return", ln.Sym.Name) + base.Errorf("%s is shadowed during return", ln.Sym.Name) } } } @@ -1107,7 +1108,7 @@ func (p *noder) assignList(expr syntax.Expr, defn *Node, colas bool) []*Node { name, ok := expr.(*syntax.Name) if !ok { - p.yyerrorpos(expr.Pos(), "non-name %v on left side of :=", p.expr(expr)) + p.errorAt(expr.Pos(), "non-name %v on left side of :=", p.expr(expr)) newOrErr = true continue } @@ -1118,7 +1119,7 @@ func (p *noder) assignList(expr syntax.Expr, defn *Node, colas bool) []*Node { } if seen[sym] { - p.yyerrorpos(expr.Pos(), "%v repeated on left side of :=", sym) + p.errorAt(expr.Pos(), "%v repeated on left side of :=", sym) newOrErr = true continue } @@ -1138,7 +1139,7 @@ func (p *noder) assignList(expr syntax.Expr, defn *Node, colas bool) []*Node { } if !newOrErr { - yyerrorl(defn.Pos, "no new variables on left side of :=") + base.ErrorfAt(defn.Pos, "no new variables on left side of :=") } return res } @@ -1256,10 +1257,10 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *Node, rbrace n.Nbody.Set(p.stmtsFall(body, true)) if l := n.Nbody.Len(); l > 0 && n.Nbody.Index(l-1).Op == OFALL { if tswitch != nil { - yyerror("cannot fallthrough in type switch") + base.Errorf("cannot fallthrough in type switch") } if i+1 == len(clauses) { - yyerror("cannot fallthrough final case in switch") + base.Errorf("cannot fallthrough final case in switch") } } @@ -1378,7 +1379,7 @@ func checkLangCompat(lit *syntax.BasicLit) { } // len(s) > 2 if strings.Contains(s, "_") { - yyerrorv("go1.13", "underscores in numeric literals") + base.ErrorfVers("go1.13", "underscores in numeric literals") return } if s[0] != '0' { @@ -1386,15 +1387,15 @@ func checkLangCompat(lit *syntax.BasicLit) { } radix := s[1] if radix == 'b' || radix == 'B' { - yyerrorv("go1.13", "binary literals") + base.ErrorfVers("go1.13", "binary literals") return } if radix == 'o' || radix == 'O' { - yyerrorv("go1.13", "0o/0O-style octal literals") + base.ErrorfVers("go1.13", "0o/0O-style octal literals") return } if lit.Kind != syntax.IntLit && (radix == 'x' || radix == 'X') { - yyerrorv("go1.13", "hexadecimal floating-point literals") + base.ErrorfVers("go1.13", "hexadecimal floating-point literals") } } @@ -1415,7 +1416,7 @@ func (p *noder) basicLit(lit *syntax.BasicLit) constant.Value { v := constant.MakeFromLiteral(lit.Value, tokenForLitKind[lit.Kind], 0) if v.Kind() == constant.Unknown { // TODO(mdempsky): Better error message? - p.yyerrorpos(lit.Pos(), "malformed constant: %s", lit.Value) + p.errorAt(lit.Pos(), "malformed constant: %s", lit.Value) } // go/constant uses big.Rat by default, which is more precise, but @@ -1474,7 +1475,7 @@ func (p *noder) nodSym(orig syntax.Node, op Op, left *Node, sym *types.Sym) *Nod func (p *noder) pos(n syntax.Node) src.XPos { // TODO(gri): orig.Pos() should always be known - fix package syntax - xpos := lineno + xpos := base.Pos if pos := n.Pos(); pos.IsKnown() { xpos = p.makeXPos(pos) } @@ -1483,7 +1484,7 @@ func (p *noder) pos(n syntax.Node) src.XPos { func (p *noder) setlineno(n syntax.Node) { if n != nil { - lineno = p.pos(n) + base.Pos = p.pos(n) } } @@ -1525,12 +1526,12 @@ type PragmaEmbed struct { func (p *noder) checkUnused(pragma *Pragma) { for _, pos := range pragma.Pos { if pos.Flag&pragma.Flag != 0 { - p.yyerrorpos(pos.Pos, "misplaced compiler directive") + p.errorAt(pos.Pos, "misplaced compiler directive") } } if len(pragma.Embeds) > 0 { for _, e := range pragma.Embeds { - p.yyerrorpos(e.Pos, "misplaced go:embed directive") + p.errorAt(e.Pos, "misplaced go:embed directive") } } } @@ -1619,7 +1620,7 @@ func (p *noder) pragma(pos syntax.Pos, blankLine bool, text string, old syntax.P // For security, we disallow //go:cgo_* directives other // than cgo_import_dynamic outside cgo-generated files. // Exception: they are allowed in the standard library, for runtime and syscall. - if !isCgoGeneratedFile(pos) && !Flag.Std { + if !isCgoGeneratedFile(pos) && !base.Flag.Std { p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s only allowed in cgo-generated code", text)}) } p.pragcgo(pos, text) @@ -1631,10 +1632,10 @@ func (p *noder) pragma(pos syntax.Pos, blankLine bool, text string, old syntax.P } flag := pragmaFlag(verb) const runtimePragmas = Systemstack | Nowritebarrier | Nowritebarrierrec | Yeswritebarrierrec - if !Flag.CompilingRuntime && flag&runtimePragmas != 0 { + if !base.Flag.CompilingRuntime && flag&runtimePragmas != 0 { p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s only allowed in runtime", verb)}) } - if flag == 0 && !allowedStdPragmas[verb] && Flag.Std { + if flag == 0 && !allowedStdPragmas[verb] && base.Flag.Std { p.error(syntax.Error{Pos: pos, Msg: fmt.Sprintf("//%s is not allowed in the standard library", verb)}) } pragma.Flag |= flag diff --git a/src/cmd/compile/internal/gc/obj.go b/src/cmd/compile/internal/gc/obj.go index 170d997cd6..6c659c91c7 100644 --- a/src/cmd/compile/internal/gc/obj.go +++ b/src/cmd/compile/internal/gc/obj.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/bio" "cmd/internal/obj" @@ -47,20 +48,20 @@ const ( ) func dumpobj() { - if Flag.LinkObj == "" { - dumpobj1(Flag.LowerO, modeCompilerObj|modeLinkerObj) + if base.Flag.LinkObj == "" { + dumpobj1(base.Flag.LowerO, modeCompilerObj|modeLinkerObj) return } - dumpobj1(Flag.LowerO, modeCompilerObj) - dumpobj1(Flag.LinkObj, modeLinkerObj) + dumpobj1(base.Flag.LowerO, modeCompilerObj) + dumpobj1(base.Flag.LinkObj, modeLinkerObj) } func dumpobj1(outfile string, mode int) { bout, err := bio.Create(outfile) if err != nil { - flusherrors() + base.FlushErrors() fmt.Printf("can't create %s: %v\n", outfile, err) - errorexit() + base.ErrorExit() } defer bout.Close() bout.WriteString("!\n") @@ -79,8 +80,8 @@ func dumpobj1(outfile string, mode int) { func printObjHeader(bout *bio.Writer) { fmt.Fprintf(bout, "go object %s %s %s %s\n", objabi.GOOS, objabi.GOARCH, objabi.Version, objabi.Expstring()) - if Flag.BuildID != "" { - fmt.Fprintf(bout, "build id %q\n", Flag.BuildID) + if base.Flag.BuildID != "" { + fmt.Fprintf(bout, "build id %q\n", base.Flag.BuildID) } if localpkg.Name == "main" { fmt.Fprintf(bout, "main\n") @@ -169,13 +170,13 @@ func dumpdata() { addGCLocals() if exportlistLen != len(exportlist) { - Fatalf("exportlist changed after compile functions loop") + base.Fatalf("exportlist changed after compile functions loop") } if ptabsLen != len(ptabs) { - Fatalf("ptabs changed after compile functions loop") + base.Fatalf("ptabs changed after compile functions loop") } if itabsLen != len(itabs) { - Fatalf("itabs changed after compile functions loop") + base.Fatalf("itabs changed after compile functions loop") } } @@ -187,18 +188,18 @@ func dumpLinkerObj(bout *bio.Writer) { fmt.Fprintf(bout, "\n$$\n\n$$\n\n") fmt.Fprintf(bout, "\n$$ // cgo\n") if err := json.NewEncoder(bout).Encode(pragcgobuf); err != nil { - Fatalf("serializing pragcgobuf: %v", err) + base.Fatalf("serializing pragcgobuf: %v", err) } fmt.Fprintf(bout, "\n$$\n\n") } fmt.Fprintf(bout, "\n!\n") - obj.WriteObjFile(Ctxt, bout) + obj.WriteObjFile(base.Ctxt, bout) } func addptabs() { - if !Ctxt.Flag_dynlink || localpkg.Name != "main" { + if !base.Ctxt.Flag_dynlink || localpkg.Name != "main" { return } for _, exportn := range exportlist { @@ -228,7 +229,7 @@ func addptabs() { func dumpGlobal(n *Node) { if n.Type == nil { - Fatalf("external %v nil type\n", n) + base.Fatalf("external %v nil type\n", n) } if n.Class() == PFUNC { return @@ -261,7 +262,7 @@ func dumpGlobalConst(n *Node) { return } } - Ctxt.DwarfIntConst(Ctxt.Pkgpath, n.Sym.Name, typesymname(t), int64Val(t, v)) + base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym.Name, typesymname(t), int64Val(t, v)) } func dumpglobls() { @@ -293,7 +294,7 @@ func dumpglobls() { // This is done during the sequential phase after compilation, since // global symbols can't be declared during parallel compilation. func addGCLocals() { - for _, s := range Ctxt.Text { + for _, s := range base.Ctxt.Text { fn := s.Func() if fn == nil { continue @@ -316,9 +317,9 @@ func addGCLocals() { func duintxx(s *obj.LSym, off int, v uint64, wid int) int { if off&(wid-1) != 0 { - Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off) + base.Fatalf("duintxxLSym: misaligned: v=%d wid=%d off=%d", v, wid, off) } - s.WriteInt(Ctxt, int64(off), wid, int64(v)) + s.WriteInt(base.Ctxt, int64(off), wid, int64(v)) return off + wid } @@ -369,7 +370,7 @@ func stringsym(pos src.XPos, s string) (data *obj.LSym) { symname = strconv.Quote(s) } - symdata := Ctxt.Lookup(stringSymPrefix + symname) + symdata := base.Ctxt.Lookup(stringSymPrefix + symname) if !symdata.OnList() { off := dstringdata(symdata, 0, s, pos, "string") ggloblsym(symdata, int32(off), obj.DUPOK|obj.RODATA|obj.LOCAL) @@ -447,7 +448,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj. var symdata *obj.LSym if readonly { symname := fmt.Sprintf(stringSymPattern, size, sum) - symdata = Ctxt.Lookup(stringSymPrefix + symname) + symdata = base.Ctxt.Lookup(stringSymPrefix + symname) if !symdata.OnList() { info := symdata.NewFileInfo() info.Name = file @@ -489,7 +490,7 @@ func slicedata(pos src.XPos, s string) *Node { func slicebytes(nam *Node, s string) { if nam.Op != ONAME { - Fatalf("slicebytes %v", nam) + base.Fatalf("slicebytes %v", nam) } slicesym(nam, slicedata(nam.Pos, s), int64(len(s))) } @@ -499,29 +500,29 @@ func dstringdata(s *obj.LSym, off int, t string, pos src.XPos, what string) int // causing a cryptic error message by the linker. Check for oversize objects here // and provide a useful error message instead. if int64(len(t)) > 2e9 { - yyerrorl(pos, "%v with length %v is too big", what, len(t)) + base.ErrorfAt(pos, "%v with length %v is too big", what, len(t)) return 0 } - s.WriteString(Ctxt, int64(off), len(t), t) + s.WriteString(base.Ctxt, int64(off), len(t), t) return off + len(t) } func dsymptr(s *obj.LSym, off int, x *obj.LSym, xoff int) int { off = int(Rnd(int64(off), int64(Widthptr))) - s.WriteAddr(Ctxt, int64(off), Widthptr, x, int64(xoff)) + s.WriteAddr(base.Ctxt, int64(off), Widthptr, x, int64(xoff)) off += Widthptr return off } func dsymptrOff(s *obj.LSym, off int, x *obj.LSym) int { - s.WriteOff(Ctxt, int64(off), x, 0) + s.WriteOff(base.Ctxt, int64(off), x, 0) off += 4 return off } func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int { - s.WriteWeakOff(Ctxt, int64(off), x, 0) + s.WriteWeakOff(base.Ctxt, int64(off), x, 0) off += 4 return off } @@ -532,79 +533,79 @@ func slicesym(n, arr *Node, lencap int64) { s := n.Sym.Linksym() off := n.Xoffset if arr.Op != ONAME { - Fatalf("slicesym non-name arr %v", arr) + base.Fatalf("slicesym non-name arr %v", arr) } - s.WriteAddr(Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset) - s.WriteInt(Ctxt, off+sliceLenOffset, Widthptr, lencap) - s.WriteInt(Ctxt, off+sliceCapOffset, Widthptr, lencap) + s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset) + s.WriteInt(base.Ctxt, off+sliceLenOffset, Widthptr, lencap) + s.WriteInt(base.Ctxt, off+sliceCapOffset, Widthptr, lencap) } // addrsym writes the static address of a to n. a must be an ONAME. // Neither n nor a is modified. func addrsym(n, a *Node) { if n.Op != ONAME { - Fatalf("addrsym n op %v", n.Op) + base.Fatalf("addrsym n op %v", n.Op) } if n.Sym == nil { - Fatalf("addrsym nil n sym") + base.Fatalf("addrsym nil n sym") } if a.Op != ONAME { - Fatalf("addrsym a op %v", a.Op) + base.Fatalf("addrsym a op %v", a.Op) } s := n.Sym.Linksym() - s.WriteAddr(Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset) + s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset) } // pfuncsym writes the static address of f to n. f must be a global function. // Neither n nor f is modified. func pfuncsym(n, f *Node) { if n.Op != ONAME { - Fatalf("pfuncsym n op %v", n.Op) + base.Fatalf("pfuncsym n op %v", n.Op) } if n.Sym == nil { - Fatalf("pfuncsym nil n sym") + base.Fatalf("pfuncsym nil n sym") } if f.Class() != PFUNC { - Fatalf("pfuncsym class not PFUNC %d", f.Class()) + base.Fatalf("pfuncsym class not PFUNC %d", f.Class()) } s := n.Sym.Linksym() - s.WriteAddr(Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset) + s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset) } // litsym writes the static literal c to n. // Neither n nor c is modified. func litsym(n, c *Node, wid int) { if n.Op != ONAME { - Fatalf("litsym n op %v", n.Op) + base.Fatalf("litsym n op %v", n.Op) } if n.Sym == nil { - Fatalf("litsym nil n sym") + base.Fatalf("litsym nil n sym") } if !types.Identical(n.Type, c.Type) { - Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type) + base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type) } if c.Op == ONIL { return } if c.Op != OLITERAL { - Fatalf("litsym c op %v", c.Op) + base.Fatalf("litsym c op %v", c.Op) } s := n.Sym.Linksym() switch u := c.Val(); u.Kind() { case constant.Bool: i := int64(obj.Bool2int(constant.BoolVal(u))) - s.WriteInt(Ctxt, n.Xoffset, wid, i) + s.WriteInt(base.Ctxt, n.Xoffset, wid, i) case constant.Int: - s.WriteInt(Ctxt, n.Xoffset, wid, int64Val(n.Type, u)) + s.WriteInt(base.Ctxt, n.Xoffset, wid, int64Val(n.Type, u)) case constant.Float: f, _ := constant.Float64Val(u) switch n.Type.Etype { case TFLOAT32: - s.WriteFloat32(Ctxt, n.Xoffset, float32(f)) + s.WriteFloat32(base.Ctxt, n.Xoffset, float32(f)) case TFLOAT64: - s.WriteFloat64(Ctxt, n.Xoffset, f) + s.WriteFloat64(base.Ctxt, n.Xoffset, f) } case constant.Complex: @@ -612,20 +613,20 @@ func litsym(n, c *Node, wid int) { im, _ := constant.Float64Val(constant.Imag(u)) switch n.Type.Etype { case TCOMPLEX64: - s.WriteFloat32(Ctxt, n.Xoffset, float32(re)) - s.WriteFloat32(Ctxt, n.Xoffset+4, float32(im)) + s.WriteFloat32(base.Ctxt, n.Xoffset, float32(re)) + s.WriteFloat32(base.Ctxt, n.Xoffset+4, float32(im)) case TCOMPLEX128: - s.WriteFloat64(Ctxt, n.Xoffset, re) - s.WriteFloat64(Ctxt, n.Xoffset+8, im) + s.WriteFloat64(base.Ctxt, n.Xoffset, re) + s.WriteFloat64(base.Ctxt, n.Xoffset+8, im) } case constant.String: i := constant.StringVal(u) symdata := stringsym(n.Pos, i) - s.WriteAddr(Ctxt, n.Xoffset, Widthptr, symdata, 0) - s.WriteInt(Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(i))) + s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, symdata, 0) + s.WriteInt(base.Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(i))) default: - Fatalf("litsym unhandled OLITERAL %v", c) + base.Fatalf("litsym unhandled OLITERAL %v", c) } } diff --git a/src/cmd/compile/internal/gc/order.go b/src/cmd/compile/internal/gc/order.go index 90c08b1b75..3b0f316696 100644 --- a/src/cmd/compile/internal/gc/order.go +++ b/src/cmd/compile/internal/gc/order.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "fmt" @@ -50,7 +51,7 @@ type Order struct { // Order rewrites fn.Nbody to apply the ordering constraints // described in the comment at the top of the file. func order(fn *Node) { - if Flag.W > 1 { + if base.Flag.W > 1 { s := fmt.Sprintf("\nbefore order %v", fn.Func.Nname.Sym) dumplist(s, fn.Nbody) } @@ -181,7 +182,7 @@ func (o *Order) safeExpr(n *Node) *Node { return typecheck(a, ctxExpr) default: - Fatalf("order.safeExpr %v", n.Op) + base.Fatalf("order.safeExpr %v", n.Op) return nil // not reached } } @@ -210,7 +211,7 @@ func (o *Order) addrTemp(n *Node) *Node { var s InitSchedule s.staticassign(vstat, n) if s.out != nil { - Fatalf("staticassign of const generated code: %+v", n) + base.Fatalf("staticassign of const generated code: %+v", n) } vstat = typecheck(vstat, ctxExpr) return vstat @@ -323,7 +324,7 @@ func (o *Order) stmtList(l Nodes) { // and rewrites it to: // m = OMAKESLICECOPY([]T, x, s); nil func orderMakeSliceCopy(s []*Node) { - if Flag.N != 0 || instrumenting { + if base.Flag.N != 0 || instrumenting { return } @@ -384,7 +385,7 @@ func orderMakeSliceCopy(s []*Node) { // edge inserts coverage instrumentation for libfuzzer. func (o *Order) edge() { - if Debug.Libfuzzer == 0 { + if base.Debug.Libfuzzer == 0 { return } @@ -450,7 +451,7 @@ func (o *Order) init(n *Node) { // For concurrency safety, don't mutate potentially shared nodes. // First, ensure that no work is required here. if n.Ninit.Len() > 0 { - Fatalf("order.init shared node with ninit") + base.Fatalf("order.init shared node with ninit") } return } @@ -463,7 +464,7 @@ func (o *Order) init(n *Node) { func (o *Order) call(n *Node) { if n.Ninit.Len() > 0 { // Caller should have already called o.init(n). - Fatalf("%v with unexpected ninit", n.Op) + base.Fatalf("%v with unexpected ninit", n.Op) } // Builtin functions. @@ -526,7 +527,7 @@ func (o *Order) call(n *Node) { func (o *Order) mapAssign(n *Node) { switch n.Op { default: - Fatalf("order.mapAssign %v", n.Op) + base.Fatalf("order.mapAssign %v", n.Op) case OAS, OASOP: if n.Left.Op == OINDEXMAP { @@ -582,7 +583,7 @@ func (o *Order) stmt(n *Node) { switch n.Op { default: - Fatalf("order.stmt %v", n.Op) + base.Fatalf("order.stmt %v", n.Op) case OVARKILL, OVARLIVE, OINLMARK: o.out = append(o.out, n) @@ -659,7 +660,7 @@ func (o *Order) stmt(n *Node) { _ = mapKeyReplaceStrConv(r.Right) r.Right = o.mapKeyTemp(r.Left.Type, r.Right) default: - Fatalf("order.stmt: %v", r.Op) + base.Fatalf("order.stmt: %v", r.Op) } o.okAs2(n) @@ -776,7 +777,7 @@ func (o *Order) stmt(n *Node) { orderBody := true switch n.Type.Etype { default: - Fatalf("order.stmt range %v", n.Type) + base.Fatalf("order.stmt range %v", n.Type) case TARRAY, TSLICE: if n.List.Len() < 2 || n.List.Second().isBlank() { @@ -843,7 +844,7 @@ func (o *Order) stmt(n *Node) { for _, n2 := range n.List.Slice() { if n2.Op != OCASE { - Fatalf("order select case %v", n2.Op) + base.Fatalf("order select case %v", n2.Op) } r := n2.Left setlineno(n2) @@ -851,7 +852,7 @@ func (o *Order) stmt(n *Node) { // Append any new body prologue to ninit. // The next loop will insert ninit into nbody. if n2.Ninit.Len() != 0 { - Fatalf("order select ninit") + base.Fatalf("order select ninit") } if r == nil { continue @@ -859,7 +860,7 @@ func (o *Order) stmt(n *Node) { switch r.Op { default: Dump("select case", r) - Fatalf("unknown op in select %v", r.Op) + base.Fatalf("unknown op in select %v", r.Op) // If this is case x := <-ch or case x, y := <-ch, the case has // the ODCL nodes to declare x and y. We want to delay that @@ -881,7 +882,7 @@ func (o *Order) stmt(n *Node) { if r.Ninit.Len() != 0 { dumplist("ninit", r.Ninit) - Fatalf("ninit on select recv") + base.Fatalf("ninit on select recv") } // case x = <-c @@ -943,7 +944,7 @@ func (o *Order) stmt(n *Node) { case OSEND: if r.Ninit.Len() != 0 { dumplist("ninit", r.Ninit) - Fatalf("ninit on select send") + base.Fatalf("ninit on select send") } // case c <- x @@ -998,7 +999,7 @@ func (o *Order) stmt(n *Node) { // For now just clean all the temporaries at the end. // In practice that's fine. case OSWITCH: - if Debug.Libfuzzer != 0 && !hasDefaultCase(n) { + if base.Debug.Libfuzzer != 0 && !hasDefaultCase(n) { // Add empty "default:" case for instrumentation. n.List.Append(nod(OCASE, nil, nil)) } @@ -1007,7 +1008,7 @@ func (o *Order) stmt(n *Node) { n.Left = o.expr(n.Left, nil) for _, ncas := range n.List.Slice() { if ncas.Op != OCASE { - Fatalf("order switch case %v", ncas.Op) + base.Fatalf("order switch case %v", ncas.Op) } o.exprListInPlace(ncas.List) orderBlock(&ncas.Nbody, o.free) @@ -1017,13 +1018,13 @@ func (o *Order) stmt(n *Node) { o.cleanTemp(t) } - lineno = lno + base.Pos = lno } func hasDefaultCase(n *Node) bool { for _, ncas := range n.List.Slice() { if ncas.Op != OCASE { - Fatalf("expected case, found %v", ncas.Op) + base.Fatalf("expected case, found %v", ncas.Op) } if ncas.List.Len() == 0 { return true @@ -1330,7 +1331,7 @@ func (o *Order) expr(n, lhs *Node) *Node { var dynamics []*Node for _, r := range entries { if r.Op != OKEY { - Fatalf("OMAPLIT entry not OKEY: %v\n", r) + base.Fatalf("OMAPLIT entry not OKEY: %v\n", r) } if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) { @@ -1369,7 +1370,7 @@ func (o *Order) expr(n, lhs *Node) *Node { } } - lineno = lno + base.Pos = lno return n } diff --git a/src/cmd/compile/internal/gc/pgen.go b/src/cmd/compile/internal/gc/pgen.go index 19a24a3235..f10599dc28 100644 --- a/src/cmd/compile/internal/gc/pgen.go +++ b/src/cmd/compile/internal/gc/pgen.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/compile/internal/types" "cmd/internal/dwarf" @@ -29,7 +30,7 @@ func emitptrargsmap(fn *Node) { if fn.funcname() == "_" || fn.Func.Nname.Sym.Linkname != "" { return } - lsym := Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap") + lsym := base.Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap") nptr := int(fn.Type.ArgWidth() / int64(Widthptr)) bv := bvalloc(int32(nptr) * 2) @@ -164,7 +165,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { dowidth(n.Type) w := n.Type.Width if w >= thearch.MAXWIDTH || w < 0 { - Fatalf("bad width") + base.Fatalf("bad width") } if w == 0 && lastHasPtr { // Pad between a pointer-containing object and a zero-sized object. @@ -193,12 +194,12 @@ func (s *ssafn) AllocFrame(f *ssa.Func) { func funccompile(fn *Node) { if Curfn != nil { - Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym) + base.Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym) } if fn.Type == nil { - if Errors() == 0 { - Fatalf("funccompile missing type") + if base.Errors() == 0 { + base.Fatalf("funccompile missing type") } return } @@ -223,9 +224,9 @@ func funccompile(fn *Node) { } func compile(fn *Node) { - errorsBefore := Errors() + errorsBefore := base.Errors() order(fn) - if Errors() > errorsBefore { + if base.Errors() > errorsBefore { return } @@ -235,7 +236,7 @@ func compile(fn *Node) { fn.Func.initLSym(true) walk(fn) - if Errors() > errorsBefore { + if base.Errors() > errorsBefore { return } if instrumenting { @@ -265,7 +266,7 @@ func compile(fn *Node) { // Also make sure we allocate a linker symbol // for the stack object data, for the same reason. if fn.Func.lsym.Func().StackObjects == nil { - fn.Func.lsym.Func().StackObjects = Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj") + fn.Func.lsym.Func().StackObjects = base.Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj") } } } @@ -291,7 +292,7 @@ func compilenow(fn *Node) bool { if fn.IsMethod() && isInlinableButNotInlined(fn) { return false } - return Flag.LowerC == 1 && Debug.CompileLater == 0 + return base.Flag.LowerC == 1 && base.Debug.CompileLater == 0 } // isInlinableButNotInlined returns true if 'fn' was marked as an @@ -373,9 +374,9 @@ func compileFunctions() { }) } var wg sync.WaitGroup - Ctxt.InParallel = true - c := make(chan *Node, Flag.LowerC) - for i := 0; i < Flag.LowerC; i++ { + base.Ctxt.InParallel = true + c := make(chan *Node, base.Flag.LowerC) + for i := 0; i < base.Flag.LowerC; i++ { wg.Add(1) go func(worker int) { for fn := range c { @@ -390,7 +391,7 @@ func compileFunctions() { close(c) compilequeue = nil wg.Wait() - Ctxt.InParallel = false + base.Ctxt.InParallel = false sizeCalculationDisabled = false } } @@ -399,7 +400,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S fn := curfn.(*Node) if fn.Func.Nname != nil { if expect := fn.Func.Nname.Sym.Linksym(); fnsym != expect { - Fatalf("unexpected fnsym: %v != %v", fnsym, expect) + base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect) } } @@ -442,7 +443,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S if !n.Name.Used() { // Text == nil -> generating abstract function if fnsym.Func().Text != nil { - Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)") + base.Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)") } continue } @@ -481,7 +482,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S scopes := assembleScopes(fnsym, fn, dwarfVars, varScopes) var inlcalls dwarf.InlCalls - if Flag.GenDwarfInl > 0 { + if base.Flag.GenDwarfInl > 0 { inlcalls = assembleInlines(fnsym, dwarfVars) } return scopes, inlcalls @@ -533,7 +534,7 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var { switch n.Class() { case PAUTO: abbrev = dwarf.DW_ABRV_AUTO - if Ctxt.FixedFrameSize() == 0 { + if base.Ctxt.FixedFrameSize() == 0 { offs -= int64(Widthptr) } if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" { @@ -543,15 +544,15 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var { case PPARAM, PPARAMOUT: abbrev = dwarf.DW_ABRV_PARAM - offs += Ctxt.FixedFrameSize() + offs += base.Ctxt.FixedFrameSize() default: - Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n) + base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n) } typename := dwarf.InfoPrefix + typesymname(n.Type) delete(fnsym.Func().Autot, ngotype(n).Linksym()) inlIndex := 0 - if Flag.GenDwarfInl > 1 { + if base.Flag.GenDwarfInl > 1 { if n.Name.InlFormal() || n.Name.InlLocal() { inlIndex = posInlIndex(n.Pos) + 1 if n.Name.InlFormal() { @@ -559,14 +560,14 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var { } } } - declpos := Ctxt.InnermostPos(declPos(n)) + declpos := base.Ctxt.InnermostPos(declPos(n)) return &dwarf.Var{ Name: n.Sym.Name, IsReturnValue: n.Class() == PPARAMOUT, IsInlFormal: n.Name.InlFormal(), Abbrev: abbrev, StackOffset: int32(offs), - Type: Ctxt.Lookup(typename), + Type: base.Ctxt.Lookup(typename), DeclFile: declpos.RelFilename(), DeclLine: declpos.RelLine(), DeclCol: declpos.Col(), @@ -608,7 +609,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node) var vars []*dwarf.Var var decls []*Node var selected map[*Node]bool - if Ctxt.Flag_locationlists && Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK { + if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK { decls, vars, selected = createComplexVars(fnsym, fn) } else { decls, vars, selected = createSimpleVars(fnsym, apDecls) @@ -672,7 +673,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node) } } inlIndex := 0 - if Flag.GenDwarfInl > 1 { + if base.Flag.GenDwarfInl > 1 { if n.Name.InlFormal() || n.Name.InlLocal() { inlIndex = posInlIndex(n.Pos) + 1 if n.Name.InlFormal() { @@ -680,13 +681,13 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node) } } } - declpos := Ctxt.InnermostPos(n.Pos) + declpos := base.Ctxt.InnermostPos(n.Pos) vars = append(vars, &dwarf.Var{ Name: n.Sym.Name, IsReturnValue: isReturnValue, Abbrev: abbrev, StackOffset: int32(n.Xoffset), - Type: Ctxt.Lookup(typename), + Type: base.Ctxt.Lookup(typename), DeclFile: declpos.RelFilename(), DeclLine: declpos.RelLine(), DeclCol: declpos.Col(), @@ -707,7 +708,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node) // names of the variables may have been "versioned" to avoid conflicts // with local vars; disregard this versioning when sorting. func preInliningDcls(fnsym *obj.LSym) []*Node { - fn := Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node) + fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node) var rdcl []*Node for _, n := range fn.Func.Inl.Dcl { c := n.Sym.Name[0] @@ -729,7 +730,7 @@ func stackOffset(slot ssa.LocalSlot) int32 { var off int64 switch n.Class() { case PAUTO: - if Ctxt.FixedFrameSize() == 0 { + if base.Ctxt.FixedFrameSize() == 0 { off -= int64(Widthptr) } if objabi.Framepointer_enabled || objabi.GOARCH == "arm64" { @@ -737,7 +738,7 @@ func stackOffset(slot ssa.LocalSlot) int32 { off -= int64(Widthptr) } case PPARAM, PPARAMOUT: - off += Ctxt.FixedFrameSize() + off += base.Ctxt.FixedFrameSize() } return int32(off + n.Xoffset + slot.Off) } @@ -761,7 +762,7 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var { delete(fnsym.Func().Autot, gotype) typename := dwarf.InfoPrefix + gotype.Name[len("type."):] inlIndex := 0 - if Flag.GenDwarfInl > 1 { + if base.Flag.GenDwarfInl > 1 { if n.Name.InlFormal() || n.Name.InlLocal() { inlIndex = posInlIndex(n.Pos) + 1 if n.Name.InlFormal() { @@ -769,13 +770,13 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var { } } } - declpos := Ctxt.InnermostPos(n.Pos) + declpos := base.Ctxt.InnermostPos(n.Pos) dvar := &dwarf.Var{ Name: n.Sym.Name, IsReturnValue: n.Class() == PPARAMOUT, IsInlFormal: n.Name.InlFormal(), Abbrev: abbrev, - Type: Ctxt.Lookup(typename), + Type: base.Ctxt.Lookup(typename), // The stack offset is used as a sorting key, so for decomposed // variables just give it the first one. It's not used otherwise. // This won't work well if the first slot hasn't been assigned a stack @@ -790,7 +791,7 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var { list := debug.LocationLists[varID] if len(list) != 0 { dvar.PutLocationList = func(listSym, startPC dwarf.Sym) { - debug.PutLocationList(list, Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym)) + debug.PutLocationList(list, base.Ctxt, listSym.(*obj.LSym), startPC.(*obj.LSym)) } } return dvar diff --git a/src/cmd/compile/internal/gc/plive.go b/src/cmd/compile/internal/gc/plive.go index 5f4af06b80..da2298480a 100644 --- a/src/cmd/compile/internal/gc/plive.go +++ b/src/cmd/compile/internal/gc/plive.go @@ -15,6 +15,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/compile/internal/types" "cmd/internal/obj" @@ -226,7 +227,7 @@ func getvariables(fn *Node) ([]*Node, map[*Node]int32) { func (lv *Liveness) initcache() { if lv.cache.initialized { - Fatalf("liveness cache initialized twice") + base.Fatalf("liveness cache initialized twice") return } lv.cache.initialized = true @@ -341,7 +342,7 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) { case *Node: return a, e default: - Fatalf("weird aux: %s", v.LongString()) + base.Fatalf("weird aux: %s", v.LongString()) return nil, e } } @@ -406,7 +407,7 @@ func (lv *Liveness) blockEffects(b *ssa.Block) *BlockEffects { // on future calls with the same type t. func onebitwalktype1(t *types.Type, off int64, bv bvec) { if t.Align > 0 && off&int64(t.Align-1) != 0 { - Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off) + base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off) } if !t.HasPointers() { // Note: this case ensures that pointers to go:notinheap types @@ -417,14 +418,14 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) { switch t.Etype { case TPTR, TUNSAFEPTR, TFUNC, TCHAN, TMAP: if off&int64(Widthptr-1) != 0 { - Fatalf("onebitwalktype1: invalid alignment, %v", t) + base.Fatalf("onebitwalktype1: invalid alignment, %v", t) } bv.Set(int32(off / int64(Widthptr))) // pointer case TSTRING: // struct { byte *str; intgo len; } if off&int64(Widthptr-1) != 0 { - Fatalf("onebitwalktype1: invalid alignment, %v", t) + base.Fatalf("onebitwalktype1: invalid alignment, %v", t) } bv.Set(int32(off / int64(Widthptr))) //pointer in first slot @@ -433,7 +434,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) { // or, when isnilinter(t)==true: // struct { Type *type; void *data; } if off&int64(Widthptr-1) != 0 { - Fatalf("onebitwalktype1: invalid alignment, %v", t) + base.Fatalf("onebitwalktype1: invalid alignment, %v", t) } // The first word of an interface is a pointer, but we don't // treat it as such. @@ -452,7 +453,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) { case TSLICE: // struct { byte *array; uintgo len; uintgo cap; } if off&int64(Widthptr-1) != 0 { - Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t) + base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t) } bv.Set(int32(off / int64(Widthptr))) // pointer in first slot (BitsPointer) @@ -473,7 +474,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) { } default: - Fatalf("onebitwalktype1: unexpected type, %v", t) + base.Fatalf("onebitwalktype1: unexpected type, %v", t) } } @@ -509,7 +510,7 @@ func allUnsafe(f *ssa.Func) bool { // go:nosplit functions are similar. Since safe points used to // be coupled with stack checks, go:nosplit often actually // means "no safe points in this function". - return Flag.CompilingRuntime || f.NoSplit + return base.Flag.CompilingRuntime || f.NoSplit } // markUnsafePoints finds unsafe points and computes lv.unsafePoints. @@ -791,7 +792,7 @@ func (lv *Liveness) epilogue() { if n.Class() == PPARAMOUT { if n.Name.IsOutputParamHeapAddr() { // Just to be paranoid. Heap addresses are PAUTOs. - Fatalf("variable %v both output param and heap output param", n) + base.Fatalf("variable %v both output param and heap output param", n) } if n.Name.Param.Heapaddr != nil { // If this variable moved to the heap, then @@ -816,7 +817,7 @@ func (lv *Liveness) epilogue() { livedefer.Set(int32(i)) // It was already marked as Needzero when created. if !n.Name.Needzero() { - Fatalf("all pointer-containing defer arg slots should have Needzero set") + base.Fatalf("all pointer-containing defer arg slots should have Needzero set") } } } @@ -878,7 +879,7 @@ func (lv *Liveness) epilogue() { if b == lv.f.Entry { if index != 0 { - Fatalf("bad index for entry point: %v", index) + base.Fatalf("bad index for entry point: %v", index) } // Check to make sure only input variables are live. @@ -889,7 +890,7 @@ func (lv *Liveness) epilogue() { if n.Class() == PPARAM { continue // ok } - Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n) + base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n) } // Record live variables. @@ -966,7 +967,7 @@ func (lv *Liveness) compact(b *ssa.Block) { } func (lv *Liveness) showlive(v *ssa.Value, live bvec) { - if Flag.Live == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") { + if base.Flag.Live == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") { return } if !(v == nil || v.Op.IsCall()) { @@ -1002,7 +1003,7 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) { } } - Warnl(pos, s) + base.WarnfAt(pos, s) } func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool { @@ -1088,7 +1089,7 @@ func (lv *Liveness) printDebug() { if b == lv.f.Entry { live := lv.stackMaps[0] - fmt.Printf("(%s) function entry\n", linestr(lv.fn.Func.Nname.Pos)) + fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Func.Nname.Pos)) fmt.Printf("\tlive=") printed = false for j, n := range lv.vars { @@ -1105,7 +1106,7 @@ func (lv *Liveness) printDebug() { } for _, v := range b.Values { - fmt.Printf("(%s) %v\n", linestr(v.Pos), v.LongString()) + fmt.Printf("(%s) %v\n", base.FmtPos(v.Pos), v.LongString()) pcdata := lv.livenessMap.Get(v) @@ -1214,7 +1215,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) { // These symbols will be added to Ctxt.Data by addGCLocals // after parallel compilation is done. makeSym := func(tmpSym *obj.LSym) *obj.LSym { - return Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) { + return base.Ctxt.LookupInit(fmt.Sprintf("gclocals·%x", md5.Sum(tmpSym.P)), func(lsym *obj.LSym) { lsym.P = tmpSym.P lsym.Set(obj.AttrContentAddressable, true) }) @@ -1235,7 +1236,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap { lv.prologue() lv.solve() lv.epilogue() - if Flag.Live > 0 { + if base.Flag.Live > 0 { lv.showlive(nil, lv.stackMaps[0]) for _, b := range f.Blocks { for _, val := range b.Values { @@ -1245,7 +1246,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap { } } } - if Flag.Live >= 2 { + if base.Flag.Live >= 2 { lv.printDebug() } diff --git a/src/cmd/compile/internal/gc/racewalk.go b/src/cmd/compile/internal/gc/racewalk.go index 733d19c024..20b4bc583b 100644 --- a/src/cmd/compile/internal/gc/racewalk.go +++ b/src/cmd/compile/internal/gc/racewalk.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "cmd/internal/sys" @@ -47,9 +48,9 @@ var omit_pkgs = []string{ var norace_inst_pkgs = []string{"sync", "sync/atomic"} func ispkgin(pkgs []string) bool { - if Ctxt.Pkgpath != "" { + if base.Ctxt.Pkgpath != "" { for _, p := range pkgs { - if Ctxt.Pkgpath == p { + if base.Ctxt.Pkgpath == p { return true } } @@ -63,13 +64,13 @@ func instrument(fn *Node) { return } - if !Flag.Race || !ispkgin(norace_inst_pkgs) { + if !base.Flag.Race || !ispkgin(norace_inst_pkgs) { fn.Func.SetInstrumentBody(true) } - if Flag.Race { - lno := lineno - lineno = src.NoXPos + if base.Flag.Race { + lno := base.Pos + base.Pos = src.NoXPos if thearch.LinkArch.Arch.Family != sys.AMD64 { fn.Func.Enter.Prepend(mkcall("racefuncenterfp", nil, nil)) @@ -88,6 +89,6 @@ func instrument(fn *Node) { fn.Func.Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc)) fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil)) } - lineno = lno + base.Pos = lno } } diff --git a/src/cmd/compile/internal/gc/range.go b/src/cmd/compile/internal/gc/range.go index 44776e988e..568c5138ec 100644 --- a/src/cmd/compile/internal/gc/range.go +++ b/src/cmd/compile/internal/gc/range.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/sys" "unicode/utf8" @@ -61,7 +62,7 @@ func typecheckrangeExpr(n *Node) { toomany := false switch t.Etype { default: - yyerrorl(n.Pos, "cannot range over %L", n.Right) + base.ErrorfAt(n.Pos, "cannot range over %L", n.Right) return case TARRAY, TSLICE: @@ -74,7 +75,7 @@ func typecheckrangeExpr(n *Node) { case TCHAN: if !t.ChanDir().CanRecv() { - yyerrorl(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type) + base.ErrorfAt(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type) return } @@ -90,7 +91,7 @@ func typecheckrangeExpr(n *Node) { } if n.List.Len() > 2 || toomany { - yyerrorl(n.Pos, "too many variables in range") + base.ErrorfAt(n.Pos, "too many variables in range") } var v1, v2 *Node @@ -117,7 +118,7 @@ func typecheckrangeExpr(n *Node) { v1.Type = t1 } else if v1.Type != nil { if op, why := assignop(t1, v1.Type); op == OXXX { - yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why) + base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why) } } checkassign(n, v1) @@ -128,7 +129,7 @@ func typecheckrangeExpr(n *Node) { v2.Type = t2 } else if v2.Type != nil { if op, why := assignop(t2, v2.Type); op == OXXX { - yyerrorl(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why) + base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why) } } checkassign(n, v2) @@ -160,7 +161,7 @@ func walkrange(n *Node) *Node { m := n.Right lno := setlineno(m) n = mapClear(m) - lineno = lno + base.Pos = lno return n } @@ -196,7 +197,7 @@ func walkrange(n *Node) *Node { } if v1 == nil && v2 != nil { - Fatalf("walkrange: v2 != nil while v1 == nil") + base.Fatalf("walkrange: v2 != nil while v1 == nil") } // n.List has no meaning anymore, clear it @@ -211,11 +212,11 @@ func walkrange(n *Node) *Node { var init []*Node switch t.Etype { default: - Fatalf("walkrange") + base.Fatalf("walkrange") case TARRAY, TSLICE: if arrayClear(n, v1, v2, a) { - lineno = lno + base.Pos = lno return n } @@ -454,7 +455,7 @@ func walkrange(n *Node) *Node { n = walkstmt(n) - lineno = lno + base.Pos = lno return n } @@ -466,7 +467,7 @@ func walkrange(n *Node) *Node { // // where == for keys of map m is reflexive. func isMapClear(n *Node) bool { - if Flag.N != 0 || instrumenting { + if base.Flag.N != 0 || instrumenting { return false } @@ -533,7 +534,7 @@ func mapClear(m *Node) *Node { // // Parameters are as in walkrange: "for v1, v2 = range a". func arrayClear(n, v1, v2, a *Node) bool { - if Flag.N != 0 || instrumenting { + if base.Flag.N != 0 || instrumenting { return false } diff --git a/src/cmd/compile/internal/gc/reflect.go b/src/cmd/compile/internal/gc/reflect.go index 11ccc15a25..456903e7d7 100644 --- a/src/cmd/compile/internal/gc/reflect.go +++ b/src/cmd/compile/internal/gc/reflect.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/gcprog" "cmd/internal/obj" @@ -131,52 +132,52 @@ func bmap(t *types.Type) *types.Type { // Check invariants that map code depends on. if !IsComparable(t.Key()) { - Fatalf("unsupported map key type for %v", t) + base.Fatalf("unsupported map key type for %v", t) } if BUCKETSIZE < 8 { - Fatalf("bucket size too small for proper alignment") + base.Fatalf("bucket size too small for proper alignment") } if keytype.Align > BUCKETSIZE { - Fatalf("key align too big for %v", t) + base.Fatalf("key align too big for %v", t) } if elemtype.Align > BUCKETSIZE { - Fatalf("elem align too big for %v", t) + base.Fatalf("elem align too big for %v", t) } if keytype.Width > MAXKEYSIZE { - Fatalf("key size to large for %v", t) + base.Fatalf("key size to large for %v", t) } if elemtype.Width > MAXELEMSIZE { - Fatalf("elem size to large for %v", t) + base.Fatalf("elem size to large for %v", t) } if t.Key().Width > MAXKEYSIZE && !keytype.IsPtr() { - Fatalf("key indirect incorrect for %v", t) + base.Fatalf("key indirect incorrect for %v", t) } if t.Elem().Width > MAXELEMSIZE && !elemtype.IsPtr() { - Fatalf("elem indirect incorrect for %v", t) + base.Fatalf("elem indirect incorrect for %v", t) } if keytype.Width%int64(keytype.Align) != 0 { - Fatalf("key size not a multiple of key align for %v", t) + base.Fatalf("key size not a multiple of key align for %v", t) } if elemtype.Width%int64(elemtype.Align) != 0 { - Fatalf("elem size not a multiple of elem align for %v", t) + base.Fatalf("elem size not a multiple of elem align for %v", t) } if bucket.Align%keytype.Align != 0 { - Fatalf("bucket align not multiple of key align %v", t) + base.Fatalf("bucket align not multiple of key align %v", t) } if bucket.Align%elemtype.Align != 0 { - Fatalf("bucket align not multiple of elem align %v", t) + base.Fatalf("bucket align not multiple of elem align %v", t) } if keys.Offset%int64(keytype.Align) != 0 { - Fatalf("bad alignment of keys in bmap for %v", t) + base.Fatalf("bad alignment of keys in bmap for %v", t) } if elems.Offset%int64(elemtype.Align) != 0 { - Fatalf("bad alignment of elems in bmap for %v", t) + base.Fatalf("bad alignment of elems in bmap for %v", t) } // Double-check that overflow field is final memory in struct, // with no padding at end. if overflow.Offset != bucket.Width-int64(Widthptr) { - Fatalf("bad offset of overflow in bmap for %v", t) + base.Fatalf("bad offset of overflow in bmap for %v", t) } t.MapType().Bucket = bucket @@ -227,7 +228,7 @@ func hmap(t *types.Type) *types.Type { // The size of hmap should be 48 bytes on 64 bit // and 28 bytes on 32 bit platforms. if size := int64(8 + 5*Widthptr); hmap.Width != size { - Fatalf("hmap size not correct: got %d, want %d", hmap.Width, size) + base.Fatalf("hmap size not correct: got %d, want %d", hmap.Width, size) } t.MapType().Hmap = hmap @@ -288,7 +289,7 @@ func hiter(t *types.Type) *types.Type { hiter.SetFields(fields) dowidth(hiter) if hiter.Width != int64(12*Widthptr) { - Fatalf("hash_iter size not correct %d %d", hiter.Width, 12*Widthptr) + base.Fatalf("hash_iter size not correct %d %d", hiter.Width, 12*Widthptr) } t.MapType().Hiter = hiter hiter.StructType().Map = t @@ -391,10 +392,10 @@ func methods(t *types.Type) []*Sig { var ms []*Sig for _, f := range mt.AllMethods().Slice() { if !f.IsMethod() { - Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f) + base.Fatalf("non-method on %v method %v %v\n", mt, f.Sym, f) } if f.Type.Recv() == nil { - Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f) + base.Fatalf("receiver with no type on %v method %v %v\n", mt, f.Sym, f) } if f.Nointerface() { continue @@ -450,12 +451,12 @@ func imethods(t *types.Type) []*Sig { continue } if f.Sym.IsBlank() { - Fatalf("unexpected blank symbol in interface method set") + base.Fatalf("unexpected blank symbol in interface method set") } if n := len(methods); n > 0 { last := methods[n-1] if !last.name.Less(f.Sym) { - Fatalf("sigcmp vs sortinter %v %v", last.name, f.Sym) + base.Fatalf("sigcmp vs sortinter %v %v", last.name, f.Sym) } } @@ -488,17 +489,17 @@ func dimportpath(p *types.Pkg) { // If we are compiling the runtime package, there are two runtime packages around // -- localpkg and Runtimepkg. We don't want to produce import path symbols for // both of them, so just produce one for localpkg. - if Ctxt.Pkgpath == "runtime" && p == Runtimepkg { + if base.Ctxt.Pkgpath == "runtime" && p == Runtimepkg { return } str := p.Path if p == localpkg { // Note: myimportpath != "", or else dgopkgpath won't call dimportpath. - str = Ctxt.Pkgpath + str = base.Ctxt.Pkgpath } - s := Ctxt.Lookup("type..importpath." + p.Prefix + ".") + s := base.Ctxt.Lookup("type..importpath." + p.Prefix + ".") ot := dnameData(s, 0, str, "", nil, false) ggloblsym(s, int32(ot), obj.DUPOK|obj.RODATA) s.Set(obj.AttrContentAddressable, true) @@ -510,13 +511,13 @@ func dgopkgpath(s *obj.LSym, ot int, pkg *types.Pkg) int { return duintptr(s, ot, 0) } - if pkg == localpkg && Ctxt.Pkgpath == "" { + if pkg == localpkg && base.Ctxt.Pkgpath == "" { // If we don't know the full import path of the package being compiled // (i.e. -p was not passed on the compiler command line), emit a reference to // type..importpath.""., which the linker will rewrite using the correct import path. // Every package that imports this one directly defines the symbol. // See also https://groups.google.com/forum/#!topic/golang-dev/myb9s53HxGQ. - ns := Ctxt.Lookup(`type..importpath."".`) + ns := base.Ctxt.Lookup(`type..importpath."".`) return dsymptr(s, ot, ns, 0) } @@ -529,13 +530,13 @@ func dgopkgpathOff(s *obj.LSym, ot int, pkg *types.Pkg) int { if pkg == nil { return duint32(s, ot, 0) } - if pkg == localpkg && Ctxt.Pkgpath == "" { + if pkg == localpkg && base.Ctxt.Pkgpath == "" { // If we don't know the full import path of the package being compiled // (i.e. -p was not passed on the compiler command line), emit a reference to // type..importpath.""., which the linker will rewrite using the correct import path. // Every package that imports this one directly defines the symbol. // See also https://groups.google.com/forum/#!topic/golang-dev/myb9s53HxGQ. - ns := Ctxt.Lookup(`type..importpath."".`) + ns := base.Ctxt.Lookup(`type..importpath."".`) return dsymptrOff(s, ot, ns) } @@ -546,7 +547,7 @@ func dgopkgpathOff(s *obj.LSym, ot int, pkg *types.Pkg) int { // dnameField dumps a reflect.name for a struct field. func dnameField(lsym *obj.LSym, ot int, spkg *types.Pkg, ft *types.Field) int { if !types.IsExported(ft.Sym.Name) && ft.Sym.Pkg != spkg { - Fatalf("package mismatch for %v", ft.Sym) + base.Fatalf("package mismatch for %v", ft.Sym) } nsym := dname(ft.Sym.Name, ft.Note, nil, types.IsExported(ft.Sym.Name)) return dsymptr(lsym, ot, nsym, 0) @@ -555,10 +556,10 @@ func dnameField(lsym *obj.LSym, ot int, spkg *types.Pkg, ft *types.Field) int { // dnameData writes the contents of a reflect.name into s at offset ot. func dnameData(s *obj.LSym, ot int, name, tag string, pkg *types.Pkg, exported bool) int { if len(name) > 1<<16-1 { - Fatalf("name too long: %s", name) + base.Fatalf("name too long: %s", name) } if len(tag) > 1<<16-1 { - Fatalf("tag too long: %s", tag) + base.Fatalf("tag too long: %s", tag) } // Encode name and tag. See reflect/type.go for details. @@ -586,7 +587,7 @@ func dnameData(s *obj.LSym, ot int, name, tag string, pkg *types.Pkg, exported b copy(tb[2:], tag) } - ot = int(s.WriteBytes(Ctxt, int64(ot), b)) + ot = int(s.WriteBytes(base.Ctxt, int64(ot), b)) if pkg != nil { ot = dgopkgpathOff(s, ot, pkg) @@ -623,7 +624,7 @@ func dname(name, tag string, pkg *types.Pkg, exported bool) *obj.LSym { sname = fmt.Sprintf(`%s"".%d`, sname, dnameCount) dnameCount++ } - s := Ctxt.Lookup(sname) + s := base.Ctxt.Lookup(sname) if len(s.P) > 0 { return s } @@ -643,7 +644,7 @@ func dextratype(lsym *obj.LSym, ot int, t *types.Type, dataAdd int) int { } noff := int(Rnd(int64(ot), int64(Widthptr))) if noff != ot { - Fatalf("unexpected alignment in dextratype for %v", t) + base.Fatalf("unexpected alignment in dextratype for %v", t) } for _, a := range m { @@ -655,11 +656,11 @@ func dextratype(lsym *obj.LSym, ot int, t *types.Type, dataAdd int) int { dataAdd += uncommonSize(t) mcount := len(m) if mcount != int(uint16(mcount)) { - Fatalf("too many methods on %v: %d", t, mcount) + base.Fatalf("too many methods on %v: %d", t, mcount) } xcount := sort.Search(mcount, func(i int) bool { return !types.IsExported(m[i].name.Name) }) if dataAdd != int(uint32(dataAdd)) { - Fatalf("methods are too far away on %v: %d", t, dataAdd) + base.Fatalf("methods are too far away on %v: %d", t, dataAdd) } ot = duint16(lsym, ot, uint16(mcount)) @@ -788,7 +789,7 @@ func typeptrdata(t *types.Type) int64 { return lastPtrField.Offset + typeptrdata(lastPtrField.Type) default: - Fatalf("typeptrdata: unexpected type, %v", t) + base.Fatalf("typeptrdata: unexpected type, %v", t) return 0 } } @@ -888,7 +889,7 @@ func dcommontype(lsym *obj.LSym, t *types.Type) int { i = 1 } if i&(i-1) != 0 { - Fatalf("invalid alignment %d for %v", t.Align, t) + base.Fatalf("invalid alignment %d for %v", t.Align, t) } ot = duint8(lsym, ot, t.Align) // align ot = duint8(lsym, ot, t.Align) // fieldAlign @@ -979,7 +980,7 @@ func typesymprefix(prefix string, t *types.Type) *types.Sym { func typenamesym(t *types.Type) *types.Sym { if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() { - Fatalf("typenamesym %v", t) + base.Fatalf("typenamesym %v", t) } s := typesym(t) signatmu.Lock() @@ -1006,7 +1007,7 @@ func typename(t *types.Type) *Node { func itabname(t, itype *types.Type) *Node { if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() { - Fatalf("itabname(%v, %v)", t, itype) + base.Fatalf("itabname(%v, %v)", t, itype) } s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString()) if s.Def == nil { @@ -1065,7 +1066,7 @@ func isreflexive(t *types.Type) bool { return true default: - Fatalf("bad type for map key: %v", t) + base.Fatalf("bad type for map key: %v", t) return false } } @@ -1095,7 +1096,7 @@ func needkeyupdate(t *types.Type) bool { return false default: - Fatalf("bad type for map key: %v", t) + base.Fatalf("bad type for map key: %v", t) return true } } @@ -1135,7 +1136,7 @@ func formalType(t *types.Type) *types.Type { func dtypesym(t *types.Type) *obj.LSym { t = formalType(t) if t.IsUntyped() { - Fatalf("dtypesym %v", t) + base.Fatalf("dtypesym %v", t) } s := typesym(t) @@ -1158,7 +1159,7 @@ func dtypesym(t *types.Type) *obj.LSym { dupok = obj.DUPOK } - if Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc + if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc // named types from other files are defined only by those files if tbase.Sym != nil && tbase.Sym.Pkg != localpkg { if i, ok := typeSymIdx[tbase]; ok { @@ -1377,7 +1378,7 @@ func dtypesym(t *types.Type) *obj.LSym { ot = dsymptr(lsym, ot, dtypesym(f.Type), 0) offsetAnon := uint64(f.Offset) << 1 if offsetAnon>>1 != uint64(f.Offset) { - Fatalf("%v: bad field offset for %s", t, f.Sym.Name) + base.Fatalf("%v: bad field offset for %s", t, f.Sym.Name) } if f.Embedded != 0 { offsetAnon |= 1 @@ -1394,7 +1395,7 @@ func dtypesym(t *types.Type) *obj.LSym { // // When buildmode=shared, all types are in typelinks so the // runtime can deduplicate type pointers. - keep := Ctxt.Flag_dynlink + keep := base.Ctxt.Flag_dynlink if !keep && t.Sym == nil { // For an unnamed type, we only need the link if the type can // be created at run time by reflect.PtrTo and similar @@ -1471,7 +1472,7 @@ func genfun(t, it *types.Type) []*obj.LSym { } if len(sigs) != 0 { - Fatalf("incomplete itab") + base.Fatalf("incomplete itab") } return out @@ -1572,7 +1573,7 @@ func dumptabs() { // process ptabs if localpkg.Name == "main" && len(ptabs) > 0 { ot := 0 - s := Ctxt.Lookup("go.plugin.tabs") + s := base.Ctxt.Lookup("go.plugin.tabs") for _, p := range ptabs { // Dump ptab symbol into go.pluginsym package. // @@ -1591,7 +1592,7 @@ func dumptabs() { ggloblsym(s, int32(ot), int16(obj.RODATA)) ot = 0 - s = Ctxt.Lookup("go.plugin.exports") + s = base.Ctxt.Lookup("go.plugin.exports") for _, p := range ptabs { ot = dsymptr(s, ot, p.s.Linksym(), 0) } @@ -1613,7 +1614,7 @@ func dumpbasictypes() { // so this is as good as any. // another possible choice would be package main, // but using runtime means fewer copies in object files. - if Ctxt.Pkgpath == "runtime" { + if base.Ctxt.Pkgpath == "runtime" { for i := types.EType(1); i <= TBOOL; i++ { dtypesym(types.NewPtr(types.Types[i])) } @@ -1629,10 +1630,10 @@ func dumpbasictypes() { // add paths for runtime and main, which 6l imports implicitly. dimportpath(Runtimepkg) - if Flag.Race { + if base.Flag.Race { dimportpath(racepkg) } - if Flag.MSan { + if base.Flag.MSan { dimportpath(msanpkg) } dimportpath(types.NewPkg("main", "")) @@ -1767,7 +1768,7 @@ func fillptrmask(t *types.Type, ptrmask []byte) { func dgcprog(t *types.Type) (*obj.LSym, int64) { dowidth(t) if t.Width == BADWIDTH { - Fatalf("dgcprog: %v badwidth", t) + base.Fatalf("dgcprog: %v badwidth", t) } lsym := typesymprefix(".gcprog", t).Linksym() var p GCProg @@ -1776,7 +1777,7 @@ func dgcprog(t *types.Type) (*obj.LSym, int64) { offset := p.w.BitIndex() * int64(Widthptr) p.end() if ptrdata := typeptrdata(t); offset < ptrdata || offset > t.Width { - Fatalf("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width) + base.Fatalf("dgcprog: %v: offset=%d but ptrdata=%d size=%d", t, offset, ptrdata, t.Width) } return lsym, offset } @@ -1791,7 +1792,7 @@ func (p *GCProg) init(lsym *obj.LSym) { p.lsym = lsym p.symoff = 4 // first 4 bytes hold program length p.w.Init(p.writeByte) - if Debug.GCProg > 0 { + if base.Debug.GCProg > 0 { fmt.Fprintf(os.Stderr, "compile: start GCProg for %v\n", lsym) p.w.Debug(os.Stderr) } @@ -1805,7 +1806,7 @@ func (p *GCProg) end() { p.w.End() duint32(p.lsym, 0, uint32(p.symoff-4)) ggloblsym(p.lsym, int32(p.symoff), obj.DUPOK|obj.RODATA|obj.LOCAL) - if Debug.GCProg > 0 { + if base.Debug.GCProg > 0 { fmt.Fprintf(os.Stderr, "compile: end GCProg for %v\n", p.lsym) } } @@ -1821,7 +1822,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) { } switch t.Etype { default: - Fatalf("GCProg.emit: unexpected type %v", t) + base.Fatalf("GCProg.emit: unexpected type %v", t) case TSTRING: p.w.Ptr(offset / int64(Widthptr)) @@ -1836,7 +1837,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) { case TARRAY: if t.NumElem() == 0 { // should have been handled by haspointers check above - Fatalf("GCProg.emit: empty array") + base.Fatalf("GCProg.emit: empty array") } // Flatten array-of-array-of-array to just a big array by multiplying counts. @@ -1869,7 +1870,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) { // size bytes of zeros. func zeroaddr(size int64) *Node { if size >= 1<<31 { - Fatalf("map elem too big %d", size) + base.Fatalf("map elem too big %d", size) } if zerosize < size { zerosize = size diff --git a/src/cmd/compile/internal/gc/scope.go b/src/cmd/compile/internal/gc/scope.go index e66b859e10..ace1d6bd9c 100644 --- a/src/cmd/compile/internal/gc/scope.go +++ b/src/cmd/compile/internal/gc/scope.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/internal/dwarf" "cmd/internal/obj" "cmd/internal/src" @@ -13,7 +14,7 @@ import ( // See golang.org/issue/20390. func xposBefore(p, q src.XPos) bool { - return Ctxt.PosTable.Pos(p).Before(Ctxt.PosTable.Pos(q)) + return base.Ctxt.PosTable.Pos(p).Before(base.Ctxt.PosTable.Pos(q)) } func findScope(marks []Mark, pos src.XPos) ScopeID { diff --git a/src/cmd/compile/internal/gc/select.go b/src/cmd/compile/internal/gc/select.go index 8e6b15af53..8d4c8d2be1 100644 --- a/src/cmd/compile/internal/gc/select.go +++ b/src/cmd/compile/internal/gc/select.go @@ -4,7 +4,10 @@ package gc -import "cmd/compile/internal/types" +import ( + "cmd/compile/internal/base" + "cmd/compile/internal/types" +) // select func typecheckselect(sel *Node) { @@ -14,18 +17,18 @@ func typecheckselect(sel *Node) { for _, ncase := range sel.List.Slice() { if ncase.Op != OCASE { setlineno(ncase) - Fatalf("typecheckselect %v", ncase.Op) + base.Fatalf("typecheckselect %v", ncase.Op) } if ncase.List.Len() == 0 { // default if def != nil { - yyerrorl(ncase.Pos, "multiple defaults in select (first at %v)", def.Line()) + base.ErrorfAt(ncase.Pos, "multiple defaults in select (first at %v)", def.Line()) } else { def = ncase } } else if ncase.List.Len() > 1 { - yyerrorl(ncase.Pos, "select cases cannot be lists") + base.ErrorfAt(ncase.Pos, "select cases cannot be lists") } else { ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt)) n := ncase.List.First() @@ -41,7 +44,7 @@ func typecheckselect(sel *Node) { // on the same line). This matches the approach before 1.10. pos = ncase.Pos } - yyerrorl(pos, "select case must be receive, send or assign recv") + base.ErrorfAt(pos, "select case must be receive, send or assign recv") // convert x = <-c into OSELRECV(x, <-c). // remove implicit conversions; the eventual assignment @@ -52,7 +55,7 @@ func typecheckselect(sel *Node) { } if n.Right.Op != ORECV { - yyerrorl(n.Pos, "select assignment must have receive on right hand side") + base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side") break } @@ -61,7 +64,7 @@ func typecheckselect(sel *Node) { // convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok case OAS2RECV: if n.Right.Op != ORECV { - yyerrorl(n.Pos, "select assignment must have receive on right hand side") + base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side") break } @@ -84,13 +87,13 @@ func typecheckselect(sel *Node) { typecheckslice(ncase.Nbody.Slice(), ctxStmt) } - lineno = lno + base.Pos = lno } func walkselect(sel *Node) { lno := setlineno(sel) if sel.Nbody.Len() != 0 { - Fatalf("double walkselect") + base.Fatalf("double walkselect") } init := sel.Ninit.Slice() @@ -102,12 +105,12 @@ func walkselect(sel *Node) { sel.Nbody.Set(init) walkstmtlist(sel.Nbody.Slice()) - lineno = lno + base.Pos = lno } func walkselectcases(cases *Nodes) []*Node { ncas := cases.Len() - sellineno := lineno + sellineno := base.Pos // optimization: zero-case select if ncas == 0 { @@ -125,7 +128,7 @@ func walkselectcases(cases *Nodes) []*Node { n.Ninit.Set(nil) switch n.Op { default: - Fatalf("select %v", n.Op) + base.Fatalf("select %v", n.Op) case OSEND: // already ok @@ -202,7 +205,7 @@ func walkselectcases(cases *Nodes) []*Node { r.Ninit.Set(cas.Ninit.Slice()) switch n.Op { default: - Fatalf("select %v", n.Op) + base.Fatalf("select %v", n.Op) case OSEND: // if selectnbsend(c, v) { body } else { default body } @@ -245,7 +248,7 @@ func walkselectcases(cases *Nodes) []*Node { var init []*Node // generate sel-struct - lineno = sellineno + base.Pos = sellineno selv := temp(types.NewArray(scasetype(), int64(ncas))) r := nod(OAS, selv, nil) r = typecheck(r, ctxStmt) @@ -255,7 +258,7 @@ func walkselectcases(cases *Nodes) []*Node { order := temp(types.NewArray(types.Types[TUINT16], 2*int64(ncas))) var pc0, pcs *Node - if Flag.Race { + if base.Flag.Race { pcs = temp(types.NewArray(types.Types[TUINTPTR], int64(ncas))) pc0 = typecheck(nod(OADDR, nod(OINDEX, pcs, nodintconst(0)), nil), ctxExpr) } else { @@ -278,7 +281,7 @@ func walkselectcases(cases *Nodes) []*Node { var c, elem *Node switch n.Op { default: - Fatalf("select %v", n.Op) + base.Fatalf("select %v", n.Op) case OSEND: i = nsends nsends++ @@ -308,17 +311,17 @@ func walkselectcases(cases *Nodes) []*Node { // TODO(mdempsky): There should be a cleaner way to // handle this. - if Flag.Race { + if base.Flag.Race { r = mkcall("selectsetpc", nil, nil, nod(OADDR, nod(OINDEX, pcs, nodintconst(int64(i))), nil)) init = append(init, r) } } if nsends+nrecvs != ncas { - Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas) + base.Fatalf("walkselectcases: miscount: %v + %v != %v", nsends, nrecvs, ncas) } // run the select - lineno = sellineno + base.Pos = sellineno chosen := temp(types.Types[TINT]) recvOK := temp(types.Types[TBOOL]) r = nod(OAS2, nil, nil) @@ -331,7 +334,7 @@ func walkselectcases(cases *Nodes) []*Node { // selv and order are no longer alive after selectgo. init = append(init, nod(OVARKILL, selv, nil)) init = append(init, nod(OVARKILL, order, nil)) - if Flag.Race { + if base.Flag.Race { init = append(init, nod(OVARKILL, pcs, nil)) } diff --git a/src/cmd/compile/internal/gc/sinit.go b/src/cmd/compile/internal/gc/sinit.go index 1f89baa3c0..219435d6de 100644 --- a/src/cmd/compile/internal/gc/sinit.go +++ b/src/cmd/compile/internal/gc/sinit.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" "fmt" @@ -40,7 +41,7 @@ func (s *InitSchedule) append(n *Node) { // staticInit adds an initialization statement n to the schedule. func (s *InitSchedule) staticInit(n *Node) { if !s.tryStaticInit(n) { - if Flag.Percent != 0 { + if base.Flag.Percent != 0 { Dump("nonstatic", n) } s.append(n) @@ -62,7 +63,7 @@ func (s *InitSchedule) tryStaticInit(n *Node) bool { return true } lno := setlineno(n) - defer func() { lineno = lno }() + defer func() { base.Pos = lno }() return s.staticassign(n.Left, n.Right) } @@ -256,8 +257,8 @@ func (s *InitSchedule) staticassign(l *Node, r *Node) bool { case OCLOSURE: if hasemptycvars(r) { - if Debug.Closure > 0 { - Warnl(r.Pos, "closure converted to global") + if base.Debug.Closure > 0 { + base.WarnfAt(r.Pos, "closure converted to global") } // Closures with no captured variables are globals, // so the assignment can be done at link time. @@ -462,7 +463,7 @@ func isStaticCompositeLiteral(n *Node) bool { case OSTRUCTLIT: for _, r := range n.List.Slice() { if r.Op != OSTRUCTKEY { - Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r) + base.Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r) } if !isStaticCompositeLiteral(r.Left) { return false @@ -517,7 +518,7 @@ func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes) if r.Op == OKEY { k = indexconst(r.Left) if k < 0 { - Fatalf("fixedlit: invalid index %v", r.Left) + base.Fatalf("fixedlit: invalid index %v", r.Left) } r = r.Right } @@ -531,7 +532,7 @@ func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes) case OSTRUCTLIT: splitnode = func(r *Node) (*Node, *Node) { if r.Op != OSTRUCTKEY { - Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r) + base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r) } if r.Sym.IsBlank() || isBlank { return nblank, r.Left @@ -540,7 +541,7 @@ func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes) return nodSym(ODOT, var_, r.Sym), r.Left } default: - Fatalf("fixedlit bad op: %v", n.Op) + base.Fatalf("fixedlit bad op: %v", n.Op) } for _, r := range n.List.Slice() { @@ -578,7 +579,7 @@ func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes) a = walkstmt(a) init.Append(a) default: - Fatalf("fixedlit: bad kind %d", kind) + base.Fatalf("fixedlit: bad kind %d", kind) } } @@ -610,7 +611,7 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) { var_ = typecheck(var_, ctxExpr|ctxAssign) nam := stataddr(var_) if nam == nil || nam.Class() != PEXTERN { - Fatalf("slicelit: %v", var_) + base.Fatalf("slicelit: %v", var_) } slicesym(nam, vstat, t.NumElem()) return @@ -709,7 +710,7 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) { if value.Op == OKEY { index = indexconst(value.Left) if index < 0 { - Fatalf("slicelit: invalid index %v", value.Left) + base.Fatalf("slicelit: invalid index %v", value.Left) } value = value.Right } @@ -770,7 +771,7 @@ func maplit(n *Node, m *Node, init *Nodes) { // All remaining entries are static. Double-check that. for _, r := range entries { if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) { - Fatalf("maplit: entry is not a literal: %v", r) + base.Fatalf("maplit: entry is not a literal: %v", r) } } @@ -868,7 +869,7 @@ func anylit(n *Node, var_ *Node, init *Nodes) { t := n.Type switch n.Op { default: - Fatalf("anylit: not lit, op=%v node=%v", n.Op, n) + base.Fatalf("anylit: not lit, op=%v node=%v", n.Op, n) case ONAME, OMETHEXPR: a := nod(OAS, var_, n) @@ -877,7 +878,7 @@ func anylit(n *Node, var_ *Node, init *Nodes) { case OPTRLIT: if !t.IsPtr() { - Fatalf("anylit: not ptr") + base.Fatalf("anylit: not ptr") } var r *Node @@ -905,7 +906,7 @@ func anylit(n *Node, var_ *Node, init *Nodes) { case OSTRUCTLIT, OARRAYLIT: if !t.IsStruct() && !t.IsArray() { - Fatalf("anylit: not struct/array") + base.Fatalf("anylit: not struct/array") } if var_.isSimpleName() && n.List.Len() > 4 { @@ -951,7 +952,7 @@ func anylit(n *Node, var_ *Node, init *Nodes) { case OMAPLIT: if !t.IsMap() { - Fatalf("anylit: not map") + base.Fatalf("anylit: not map") } maplit(n, var_, init) } @@ -1052,7 +1053,7 @@ func (s *InitSchedule) initplan(n *Node) { s.initplans[n] = p switch n.Op { default: - Fatalf("initplan") + base.Fatalf("initplan") case OARRAYLIT, OSLICELIT: var k int64 @@ -1060,7 +1061,7 @@ func (s *InitSchedule) initplan(n *Node) { if a.Op == OKEY { k = indexconst(a.Left) if k < 0 { - Fatalf("initplan arraylit: invalid index %v", a.Left) + base.Fatalf("initplan arraylit: invalid index %v", a.Left) } a = a.Right } @@ -1071,7 +1072,7 @@ func (s *InitSchedule) initplan(n *Node) { case OSTRUCTLIT: for _, a := range n.List.Slice() { if a.Op != OSTRUCTKEY { - Fatalf("initplan structlit") + base.Fatalf("initplan structlit") } if a.Sym.IsBlank() { continue @@ -1082,7 +1083,7 @@ func (s *InitSchedule) initplan(n *Node) { case OMAPLIT: for _, a := range n.List.Slice() { if a.Op != OKEY { - Fatalf("initplan maplit") + base.Fatalf("initplan maplit") } s.addvalue(p, -1, a.Right) } @@ -1155,12 +1156,12 @@ func isvaluelit(n *Node) bool { func genAsStatic(as *Node) { if as.Left.Type == nil { - Fatalf("genAsStatic as.Left not typechecked") + base.Fatalf("genAsStatic as.Left not typechecked") } nam := stataddr(as.Left) if nam == nil || (nam.Class() != PEXTERN && as.Left != nblank) { - Fatalf("genAsStatic: lhs %v", as.Left) + base.Fatalf("genAsStatic: lhs %v", as.Left) } switch { @@ -1169,6 +1170,6 @@ func genAsStatic(as *Node) { case (as.Right.Op == ONAME || as.Right.Op == OMETHEXPR) && as.Right.Class() == PFUNC: pfuncsym(nam, as.Right) default: - Fatalf("genAsStatic: rhs %v", as.Right) + base.Fatalf("genAsStatic: rhs %v", as.Right) } } diff --git a/src/cmd/compile/internal/gc/ssa.go b/src/cmd/compile/internal/gc/ssa.go index f06f08e6ab..e892a01da0 100644 --- a/src/cmd/compile/internal/gc/ssa.go +++ b/src/cmd/compile/internal/gc/ssa.go @@ -15,6 +15,7 @@ import ( "bufio" "bytes" + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/compile/internal/types" "cmd/internal/obj" @@ -60,10 +61,10 @@ func initssaconfig() { _ = types.NewPtr(types.Types[TINT64]) // *int64 _ = types.NewPtr(types.Errortype) // *error types.NewPtrCacheEnabled = false - ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, Ctxt, Flag.N == 0) + ssaConfig = ssa.NewConfig(thearch.LinkArch.Name, *types_, base.Ctxt, base.Flag.N == 0) ssaConfig.SoftFloat = thearch.SoftFloat - ssaConfig.Race = Flag.Race - ssaCaches = make([]ssa.Cache, Flag.LowerC) + ssaConfig.Race = base.Flag.Race + ssaCaches = make([]ssa.Cache, base.Flag.LowerC) // Set up some runtime functions we'll need to call. assertE2I = sysfunc("assertE2I") @@ -240,7 +241,7 @@ func dvarint(x *obj.LSym, off int, v int64) int { // - Size of the argument // - Offset of where argument should be placed in the args frame when making call func (s *state) emitOpenDeferInfo() { - x := Ctxt.Lookup(s.curfn.Func.lsym.Name + ".opendefer") + x := base.Ctxt.Lookup(s.curfn.Func.lsym.Name + ".opendefer") s.curfn.Func.lsym.Func().OpenCodedDeferInfo = x off := 0 @@ -291,7 +292,7 @@ func buildssa(fn *Node, worker int) *ssa.Func { name := fn.funcname() printssa := false if ssaDump != "" { // match either a simple name e.g. "(*Reader).Reset", or a package.name e.g. "compress/gzip.(*Reader).Reset" - printssa = name == ssaDump || Ctxt.Pkgpath+"."+name == ssaDump + printssa = name == ssaDump || base.Ctxt.Pkgpath+"."+name == ssaDump } var astBuf *bytes.Buffer if printssa { @@ -342,7 +343,7 @@ func buildssa(fn *Node, worker int) *ssa.Func { if printssa { ssaDF := ssaDumpFile if ssaDir != "" { - ssaDF = filepath.Join(ssaDir, Ctxt.Pkgpath+"."+name+".html") + ssaDF = filepath.Join(ssaDir, base.Ctxt.Pkgpath+"."+name+".html") ssaD := filepath.Dir(ssaDF) os.MkdirAll(ssaD, 0755) } @@ -358,9 +359,9 @@ func buildssa(fn *Node, worker int) *ssa.Func { s.fwdVars = map[*Node]*ssa.Value{} s.startmem = s.entryNewValue0(ssa.OpInitMem, types.TypeMem) - s.hasOpenDefers = Flag.N == 0 && s.hasdefer && !s.curfn.Func.OpenCodedDeferDisallowed() + s.hasOpenDefers = base.Flag.N == 0 && s.hasdefer && !s.curfn.Func.OpenCodedDeferDisallowed() switch { - case s.hasOpenDefers && (Ctxt.Flag_shared || Ctxt.Flag_dynlink) && thearch.LinkArch.Name == "386": + case s.hasOpenDefers && (base.Ctxt.Flag_shared || base.Ctxt.Flag_dynlink) && thearch.LinkArch.Name == "386": // Don't support open-coded defers for 386 ONLY when using shared // libraries, because there is extra code (added by rewriteToUseGot()) // preceding the deferreturn/ret code that is generated by gencallret() @@ -478,7 +479,7 @@ func buildssa(fn *Node, worker int) *ssa.Func { func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *Node) { // Read sources of target function fn. - fname := Ctxt.PosTable.Pos(fn.Pos).Filename() + fname := base.Ctxt.PosTable.Pos(fn.Pos).Filename() targetFn, err := readFuncLines(fname, fn.Pos.Line(), fn.Func.Endlineno.Line()) if err != nil { writer.Logf("cannot read sources for function %v: %v", fn, err) @@ -494,7 +495,7 @@ func dumpSourcesColumn(writer *ssa.HTMLWriter, fn *Node) { } else { elno = fi.Name.Defn.Func.Endlineno } - fname := Ctxt.PosTable.Pos(fi.Pos).Filename() + fname := base.Ctxt.PosTable.Pos(fi.Pos).Filename() fnLines, err := readFuncLines(fname, fi.Pos.Line(), elno.Line()) if err != nil { writer.Logf("cannot read sources for inlined function %v: %v", fi, err) @@ -752,8 +753,8 @@ func (s *state) pushLine(line src.XPos) { // the frontend may emit node with line number missing, // use the parent line number in this case. line = s.peekPos() - if Flag.K != 0 { - Warn("buildssa: unknown position (line 0)") + if base.Flag.K != 0 { + base.Warn("buildssa: unknown position (line 0)") } } else { s.lastPos = line @@ -988,13 +989,13 @@ func (s *state) instrument(t *types.Type, addr *ssa.Value, wr bool) { var fn *obj.LSym needWidth := false - if Flag.MSan { + if base.Flag.MSan { fn = msanread if wr { fn = msanwrite } needWidth = true - } else if Flag.Race && t.NumComponents(types.CountBlankFields) > 1 { + } else if base.Flag.Race && t.NumComponents(types.CountBlankFields) > 1 { // for composite objects we have to write every address // because a write might happen to any subobject. // composites with only one element don't have subobjects, though. @@ -1003,7 +1004,7 @@ func (s *state) instrument(t *types.Type, addr *ssa.Value, wr bool) { fn = racewriterange } needWidth = true - } else if Flag.Race { + } else if base.Flag.Race { // for non-composite objects we can write just the start // address, as any write must write the first byte. fn = raceread @@ -1090,7 +1091,7 @@ func (s *state) stmt(n *Node) { case OCALLMETH, OCALLINTER: s.callResult(n, callNormal) if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Class() == PFUNC { - if fn := n.Left.Sym.Name; Flag.CompilingRuntime && fn == "throw" || + if fn := n.Left.Sym.Name; base.Flag.CompilingRuntime && fn == "throw" || n.Left.Sym.Pkg == Runtimepkg && (fn == "throwinit" || fn == "gopanic" || fn == "panicwrap" || fn == "block" || fn == "panicmakeslicelen" || fn == "panicmakeslicecap") { m := s.mem() b := s.endBlock() @@ -1102,7 +1103,7 @@ func (s *state) stmt(n *Node) { } } case ODEFER: - if Debug.Defer > 0 { + if base.Debug.Defer > 0 { var defertype string if s.hasOpenDefers { defertype = "open-coded" @@ -1111,7 +1112,7 @@ func (s *state) stmt(n *Node) { } else { defertype = "heap-allocated" } - Warnl(n.Pos, "%s defer", defertype) + base.WarnfAt(n.Pos, "%s defer", defertype) } if s.hasOpenDefers { s.openDeferRecord(n.Left) @@ -1225,20 +1226,20 @@ func (s *state) stmt(n *Node) { // Check whether we're writing the result of an append back to the same slice. // If so, we handle it specially to avoid write barriers on the fast // (non-growth) path. - if !samesafeexpr(n.Left, rhs.List.First()) || Flag.N != 0 { + if !samesafeexpr(n.Left, rhs.List.First()) || base.Flag.N != 0 { break } // If the slice can be SSA'd, it'll be on the stack, // so there will be no write barriers, // so there's no need to attempt to prevent them. if s.canSSA(n.Left) { - if Debug.Append > 0 { // replicating old diagnostic message - Warnl(n.Pos, "append: len-only update (in local slice)") + if base.Debug.Append > 0 { // replicating old diagnostic message + base.WarnfAt(n.Pos, "append: len-only update (in local slice)") } break } - if Debug.Append > 0 { - Warnl(n.Pos, "append: len-only update") + if base.Debug.Append > 0 { + base.WarnfAt(n.Pos, "append: len-only update") } s.append(rhs, true) return @@ -1814,7 +1815,7 @@ func floatForComplex(t *types.Type) *types.Type { case TCOMPLEX128: return types.Types[TFLOAT64] } - Fatalf("unexpected type: %v", t) + base.Fatalf("unexpected type: %v", t) return nil } @@ -1825,7 +1826,7 @@ func complexForFloat(t *types.Type) *types.Type { case TFLOAT64: return types.Types[TCOMPLEX128] } - Fatalf("unexpected type: %v", t) + base.Fatalf("unexpected type: %v", t) return nil } @@ -4130,9 +4131,9 @@ func findIntrinsic(sym *types.Sym) intrinsicBuilder { } pkg := sym.Pkg.Path if sym.Pkg == localpkg { - pkg = Ctxt.Pkgpath + pkg = base.Ctxt.Pkgpath } - if Flag.Race && pkg == "sync/atomic" { + if base.Flag.Race && pkg == "sync/atomic" { // The race detector needs to be able to intercept these calls. // We can't intrinsify them. return nil @@ -4172,7 +4173,7 @@ func (s *state) intrinsicCall(n *Node) *ssa.Value { if x.Op == ssa.OpSelect0 || x.Op == ssa.OpSelect1 { x = x.Args[0] } - Warnl(n.Pos, "intrinsic substitution for %v with %s", n.Left.Sym.Name, x.LongString()) + base.WarnfAt(n.Pos, "intrinsic substitution for %v with %s", n.Left.Sym.Name, x.LongString()) } return v } @@ -4240,7 +4241,7 @@ func (s *state) openDeferRecord(n *Node) { } } else if n.Op == OCALLMETH { if fn.Op != ODOTMETH { - Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn) + base.Fatalf("OCALLMETH: n.Left not an ODOTMETH: %v", fn) } closureVal := s.getMethodClosure(fn) // We must always store the function value in a stack slot for the @@ -4250,7 +4251,7 @@ func (s *state) openDeferRecord(n *Node) { opendefer.closureNode = closure.Aux.(*Node) } else { if fn.Op != ODOTINTER { - Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op) + base.Fatalf("OCALLINTER: n.Left not an ODOTINTER: %v", fn.Op) } closure, rcvr := s.getClosureAndRcvr(fn) opendefer.closure = s.openDeferSave(nil, closure.Type, closure) @@ -4382,7 +4383,7 @@ func (s *state) openDeferExit() { // Generate code to call the function call of the defer, using the // closure/receiver/args that were stored in argtmps at the point // of the defer statement. - argStart := Ctxt.FixedFrameSize() + argStart := base.Ctxt.FixedFrameSize() fn := r.n.Left stksize := fn.Type.ArgWidth() var ACArgs []ssa.Param @@ -4499,7 +4500,7 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { nf := res.NumFields() for i := 0; i < nf; i++ { fp := res.Field(i) - ACResults = append(ACResults, ssa.Param{Type: fp.Type, Offset: int32(fp.Offset + Ctxt.FixedFrameSize())}) + ACResults = append(ACResults, ssa.Param{Type: fp.Type, Offset: int32(fp.Offset + base.Ctxt.FixedFrameSize())}) } } @@ -4604,14 +4605,14 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { } // Call runtime.deferprocStack with pointer to _defer record. - ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(Ctxt.FixedFrameSize())}) + ACArgs = append(ACArgs, ssa.Param{Type: types.Types[TUINTPTR], Offset: int32(base.Ctxt.FixedFrameSize())}) aux := ssa.StaticAuxCall(deferprocStack, ACArgs, ACResults) if testLateExpansion { callArgs = append(callArgs, addr, s.mem()) call = s.newValue0A(ssa.OpStaticLECall, aux.LateExpansionResultType(), aux) call.AddArgs(callArgs...) } else { - arg0 := s.constOffPtrSP(types.Types[TUINTPTR], Ctxt.FixedFrameSize()) + arg0 := s.constOffPtrSP(types.Types[TUINTPTR], base.Ctxt.FixedFrameSize()) s.store(types.Types[TUINTPTR], arg0, addr) call = s.newValue1A(ssa.OpStaticCall, types.TypeMem, aux, s.mem()) } @@ -4625,7 +4626,7 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { } else { // Store arguments to stack, including defer/go arguments and receiver for method calls. // These are written in SP-offset order. - argStart := Ctxt.FixedFrameSize() + argStart := base.Ctxt.FixedFrameSize() // Defer/go args. if k != callNormal { // Write argsize and closure (args to newproc/deferproc). @@ -4766,13 +4767,13 @@ func (s *state) call(n *Node, k callKind, returnResultAddr bool) *ssa.Value { if testLateExpansion { return s.newValue1I(ssa.OpSelectNAddr, pt, 0, call) } - return s.constOffPtrSP(pt, fp.Offset+Ctxt.FixedFrameSize()) + return s.constOffPtrSP(pt, fp.Offset+base.Ctxt.FixedFrameSize()) } if testLateExpansion { return s.newValue1I(ssa.OpSelectN, fp.Type, 0, call) } - return s.load(n.Type, s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+Ctxt.FixedFrameSize())) + return s.load(n.Type, s.constOffPtrSP(types.NewPtr(fp.Type), fp.Offset+base.Ctxt.FixedFrameSize())) } // maybeNilCheckClosure checks if a nil check of a closure is needed in some @@ -4930,7 +4931,7 @@ func (s *state) addr(n *Node) *ssa.Value { // canSSA reports whether n is SSA-able. // n must be an ONAME (or an ODOT sequence with an ONAME base). func (s *state) canSSA(n *Node) bool { - if Flag.N != 0 { + if base.Flag.N != 0 { return false } for n.Op == ODOT || (n.Op == OINDEX && n.Left.Type.IsArray()) { @@ -5026,7 +5027,7 @@ func (s *state) exprPtr(n *Node, bounded bool, lineno src.XPos) *ssa.Value { // Used only for automatically inserted nil checks, // not for user code like 'x != nil'. func (s *state) nilCheck(ptr *ssa.Value) { - if Debug.DisableNil != 0 || s.curfn.Func.NilCheckDisabled() { + if base.Debug.DisableNil != 0 || s.curfn.Func.NilCheckDisabled() { return } s.newValue2(ssa.OpNilCheck, types.TypeVoid, ptr, s.mem()) @@ -5041,7 +5042,7 @@ func (s *state) nilCheck(ptr *ssa.Value) { func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bool) *ssa.Value { idx = s.extendIndex(idx, len, kind, bounded) - if bounded || Flag.B != 0 { + if bounded || base.Flag.B != 0 { // If bounded or bounds checking is flag-disabled, then no check necessary, // just return the extended index. // @@ -5114,7 +5115,7 @@ func (s *state) boundsCheck(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bo s.startBlock(bNext) // In Spectre index mode, apply an appropriate mask to avoid speculative out-of-bounds accesses. - if Flag.Cfg.SpectreIndex { + if base.Flag.Cfg.SpectreIndex { op := ssa.OpSpectreIndex if kind != ssa.BoundsIndex && kind != ssa.BoundsIndexU { op = ssa.OpSpectreSliceIndex @@ -5133,7 +5134,7 @@ func (s *state) check(cmp *ssa.Value, fn *obj.LSym) { b.Likely = ssa.BranchLikely bNext := s.f.NewBlock(ssa.BlockPlain) line := s.peekPos() - pos := Ctxt.PosTable.Pos(line) + pos := base.Ctxt.PosTable.Pos(line) fl := funcLine{f: fn, base: pos.Base(), line: pos.Line()} bPanic := s.panics[fl] if bPanic == nil { @@ -5172,7 +5173,7 @@ func (s *state) intDivide(n *Node, a, b *ssa.Value) *ssa.Value { func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args ...*ssa.Value) []*ssa.Value { s.prevCall = nil // Write args to the stack - off := Ctxt.FixedFrameSize() + off := base.Ctxt.FixedFrameSize() testLateExpansion := ssa.LateCallExpansionEnabledWithin(s.f) var ACArgs []ssa.Param var ACResults []ssa.Param @@ -5219,7 +5220,7 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*types.Type, args . b := s.endBlock() b.Kind = ssa.BlockExit b.SetControl(call) - call.AuxInt = off - Ctxt.FixedFrameSize() + call.AuxInt = off - base.Ctxt.FixedFrameSize() if len(results) > 0 { s.Fatalf("panic call can't have results") } @@ -5837,8 +5838,8 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) { if n.Type.IsEmptyInterface() { // Converting to an empty interface. // Input could be an empty or nonempty interface. - if Debug.TypeAssert > 0 { - Warnl(n.Pos, "type assertion inlined") + if base.Debug.TypeAssert > 0 { + base.WarnfAt(n.Pos, "type assertion inlined") } // Get itab/type field from input. @@ -5904,8 +5905,8 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) { return } // converting to a nonempty interface needs a runtime call. - if Debug.TypeAssert > 0 { - Warnl(n.Pos, "type assertion not inlined") + if base.Debug.TypeAssert > 0 { + base.WarnfAt(n.Pos, "type assertion not inlined") } if n.Left.Type.IsEmptyInterface() { if commaok { @@ -5921,15 +5922,15 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) { return s.rtcall(assertI2I, true, []*types.Type{n.Type}, target, iface)[0], nil } - if Debug.TypeAssert > 0 { - Warnl(n.Pos, "type assertion inlined") + if base.Debug.TypeAssert > 0 { + base.WarnfAt(n.Pos, "type assertion inlined") } // Converting to a concrete type. direct := isdirectiface(n.Type) itab := s.newValue1(ssa.OpITab, byteptr, iface) // type word of interface - if Debug.TypeAssert > 0 { - Warnl(n.Pos, "type assertion inlined") + if base.Debug.TypeAssert > 0 { + base.WarnfAt(n.Pos, "type assertion inlined") } var targetITab *ssa.Value if n.Left.Type.IsEmptyInterface() { @@ -6235,9 +6236,9 @@ func emitStackObjects(e *ssafn, pp *Progs) { p.To.Name = obj.NAME_EXTERN p.To.Sym = x - if Flag.Live != 0 { + if base.Flag.Live != 0 { for _, v := range vars { - Warnl(v.Pos, "stack object %v %s", v, v.Type.String()) + base.WarnfAt(v.Pos, "stack object %v %s", v, v.Type.String()) } } } @@ -6277,7 +6278,7 @@ func genssa(f *ssa.Func, pp *Progs) { s.ScratchFpMem = e.scratchFpMem - if Ctxt.Flag_locationlists { + if base.Ctxt.Flag_locationlists { if cap(f.Cache.ValueToProgAfter) < f.NumValues() { f.Cache.ValueToProgAfter = make([]*obj.Prog, f.NumValues()) } @@ -6373,7 +6374,7 @@ func genssa(f *ssa.Func, pp *Progs) { thearch.SSAGenValue(&s, v) } - if Ctxt.Flag_locationlists { + if base.Ctxt.Flag_locationlists { valueToProgAfter[v.ID] = s.pp.next } @@ -6397,7 +6398,7 @@ func genssa(f *ssa.Func, pp *Progs) { } // Emit control flow instructions for block var next *ssa.Block - if i < len(f.Blocks)-1 && Flag.N == 0 { + if i < len(f.Blocks)-1 && base.Flag.N == 0 { // If -N, leave next==nil so every block with successors // ends in a JMP (except call blocks - plive doesn't like // select{send,recv} followed by a JMP call). Helps keep @@ -6473,8 +6474,8 @@ func genssa(f *ssa.Func, pp *Progs) { } } - if Ctxt.Flag_locationlists { - e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(Ctxt, f, Debug.LocationLists > 1, stackOffset) + if base.Ctxt.Flag_locationlists { + e.curfn.Func.DebugInfo = ssa.BuildFuncDebug(base.Ctxt, f, base.Debug.LocationLists > 1, stackOffset) bstart := s.bstart // Note that at this moment, Prog.Pc is a sequence number; it's // not a real PC until after assembly, so this mapping has to @@ -6705,7 +6706,7 @@ func (s *state) extendIndex(idx, len *ssa.Value, kind ssa.BoundsKind, bounded bo } else { lo = s.newValue1(ssa.OpInt64Lo, types.Types[TUINT], idx) } - if bounded || Flag.B != 0 { + if bounded || base.Flag.B != 0 { return lo } bNext := s.f.NewBlock(ssa.BlockPlain) @@ -6807,7 +6808,7 @@ func CheckLoweredPhi(v *ssa.Value) { func CheckLoweredGetClosurePtr(v *ssa.Value) { entry := v.Block.Func.Entry if entry != v.Block || entry.Values[0] != v { - Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v) + base.Fatalf("in %s, badly placed LoweredGetClosurePtr: %v %v", v.Block.Func.Name, v.Block, v) } } @@ -6869,7 +6870,7 @@ func (s *SSAGenState) Call(v *ssa.Value) *obj.Prog { case sys.ARM, sys.ARM64, sys.MIPS, sys.MIPS64: p.To.Type = obj.TYPE_MEM default: - Fatalf("unknown indirect call family") + base.Fatalf("unknown indirect call family") } p.To.Reg = v.Args[0].Reg() } @@ -6884,7 +6885,7 @@ func (s *SSAGenState) PrepareCall(v *ssa.Value) { if !idx.StackMapValid() { // See Liveness.hasStackMap. if sym, ok := v.Aux.(*ssa.AuxCall); !ok || !(sym.Fn == typedmemclr || sym.Fn == typedmemmove) { - Fatalf("missing stack map index for %v", v.LongString()) + base.Fatalf("missing stack map index for %v", v.LongString()) } } @@ -7085,7 +7086,7 @@ func (e *ssafn) CanSSA(t *types.Type) bool { } func (e *ssafn) Line(pos src.XPos) string { - return linestr(pos) + return base.FmtPos(pos) } // Log logs a message from the compiler. @@ -7101,23 +7102,23 @@ func (e *ssafn) Log() bool { // Fatal reports a compiler error and exits. func (e *ssafn) Fatalf(pos src.XPos, msg string, args ...interface{}) { - lineno = pos + base.Pos = pos nargs := append([]interface{}{e.curfn.funcname()}, args...) - Fatalf("'%s': "+msg, nargs...) + base.Fatalf("'%s': "+msg, nargs...) } // Warnl reports a "warning", which is usually flag-triggered // logging output for the benefit of tests. func (e *ssafn) Warnl(pos src.XPos, fmt_ string, args ...interface{}) { - Warnl(pos, fmt_, args...) + base.WarnfAt(pos, fmt_, args...) } func (e *ssafn) Debug_checknil() bool { - return Debug.Nil != 0 + return base.Debug.Nil != 0 } func (e *ssafn) UseWriteBarrier() bool { - return Flag.WB + return base.Flag.WB } func (e *ssafn) Syslook(name string) *obj.LSym { @@ -7142,7 +7143,7 @@ func (e *ssafn) SetWBPos(pos src.XPos) { } func (e *ssafn) MyImportPath() string { - return Ctxt.Pkgpath + return base.Ctxt.Pkgpath } func (n *Node) Typ() *types.Type { @@ -7157,7 +7158,7 @@ func (n *Node) StorageClass() ssa.StorageClass { case PAUTO: return ssa.ClassAuto default: - Fatalf("untranslatable storage class for %v: %s", n, n.Class()) + base.Fatalf("untranslatable storage class for %v: %s", n, n.Class()) return 0 } } diff --git a/src/cmd/compile/internal/gc/subr.go b/src/cmd/compile/internal/gc/subr.go index 989d10a561..00402a1bee 100644 --- a/src/cmd/compile/internal/gc/subr.go +++ b/src/cmd/compile/internal/gc/subr.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "crypto/md5" @@ -49,8 +50,8 @@ func hasUniquePos(n *Node) bool { } if !n.Pos.IsKnown() { - if Flag.K != 0 { - Warn("setlineno: unknown position (line 0)") + if base.Flag.K != 0 { + base.Warn("setlineno: unknown position (line 0)") } return false } @@ -59,9 +60,9 @@ func hasUniquePos(n *Node) bool { } func setlineno(n *Node) src.XPos { - lno := lineno + lno := base.Pos if n != nil && hasUniquePos(n) { - lineno = n.Pos + base.Pos = n.Pos } return lno } @@ -87,11 +88,11 @@ func lookupN(prefix string, n int) *types.Sym { // user labels. func autolabel(prefix string) *types.Sym { if prefix[0] != '.' { - Fatalf("autolabel prefix must start with '.', have %q", prefix) + base.Fatalf("autolabel prefix must start with '.', have %q", prefix) } fn := Curfn if Curfn == nil { - Fatalf("autolabel outside function") + base.Fatalf("autolabel outside function") } n := fn.Func.Label fn.Func.Label++ @@ -112,7 +113,7 @@ func importdot(opkg *types.Pkg, pack *Node) { s1 := lookup(s.Name) if s1.Def != nil { pkgerror := fmt.Sprintf("during import %q", opkg.Path) - redeclare(lineno, s1, pkgerror) + redeclare(base.Pos, s1, pkgerror) continue } @@ -120,7 +121,7 @@ func importdot(opkg *types.Pkg, pack *Node) { s1.Block = s.Block if asNode(s1.Def).Name == nil { Dump("s1def", asNode(s1.Def)) - Fatalf("missing Name") + base.Fatalf("missing Name") } asNode(s1.Def).Name.Pack = pack s1.Origpkg = opkg @@ -129,12 +130,12 @@ func importdot(opkg *types.Pkg, pack *Node) { if n == 0 { // can't possibly be used - there were no symbols - yyerrorl(pack.Pos, "imported and not used: %q", opkg.Path) + base.ErrorfAt(pack.Pos, "imported and not used: %q", opkg.Path) } } func nod(op Op, nleft, nright *Node) *Node { - return nodl(lineno, op, nleft, nright) + return nodl(base.Pos, op, nleft, nright) } func nodl(pos src.XPos, op Op, nleft, nright *Node) *Node { @@ -149,7 +150,7 @@ func nodl(pos src.XPos, op Op, nleft, nright *Node) *Node { n.Func = &x.f n.Func.Decl = n case ONAME: - Fatalf("use newname instead") + base.Fatalf("use newname instead") case OLABEL, OPACK: var x struct { n Node @@ -171,7 +172,7 @@ func nodl(pos src.XPos, op Op, nleft, nright *Node) *Node { // newname returns a new ONAME Node associated with symbol s. func newname(s *types.Sym) *Node { - n := newnamel(lineno, s) + n := newnamel(base.Pos, s) n.Name.Curfn = Curfn return n } @@ -180,7 +181,7 @@ func newname(s *types.Sym) *Node { // The caller is responsible for setting n.Name.Curfn. func newnamel(pos src.XPos, s *types.Sym) *Node { if s == nil { - Fatalf("newnamel nil") + base.Fatalf("newnamel nil") } var x struct { @@ -203,7 +204,7 @@ func newnamel(pos src.XPos, s *types.Sym) *Node { // nodSym makes a Node with Op op and with the Left field set to left // and the Sym field set to sym. This is for ODOT and friends. func nodSym(op Op, left *Node, sym *types.Sym) *Node { - return nodlSym(lineno, op, left, sym) + return nodlSym(base.Pos, op, left, sym) } // nodlSym makes a Node with position Pos, with Op op, and with the Left field set to left @@ -290,7 +291,7 @@ func treecopy(n *Node, pos src.XPos) *Node { } if m.Name != nil && n.Op != ODCLFIELD { Dump("treecopy", n) - Fatalf("treecopy Name") + base.Fatalf("treecopy Name") } return m @@ -625,7 +626,7 @@ func assignconvfn(n *Node, t *types.Type, context func() string) *Node { } if t.Etype == TBLANK && n.Type.Etype == TNIL { - yyerror("use of untyped nil") + base.Errorf("use of untyped nil") } n = convlit1(n, t, false, context) @@ -654,7 +655,7 @@ func assignconvfn(n *Node, t *types.Type, context func() string) *Node { op, why := assignop(n.Type, t) if op == OXXX { - yyerror("cannot use %L as type %v in %s%s", n, t, context(), why) + base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why) op = OCONV } @@ -687,7 +688,7 @@ func (n *Node) SliceBounds() (low, high, max *Node) { s := n.List.Slice() return s[0], s[1], s[2] } - Fatalf("SliceBounds op %v: %v", n.Op, n) + base.Fatalf("SliceBounds op %v: %v", n.Op, n) return nil, nil, nil } @@ -697,7 +698,7 @@ func (n *Node) SetSliceBounds(low, high, max *Node) { switch n.Op { case OSLICE, OSLICEARR, OSLICESTR: if max != nil { - Fatalf("SetSliceBounds %v given three bounds", n.Op) + base.Fatalf("SetSliceBounds %v given three bounds", n.Op) } s := n.List.Slice() if s == nil { @@ -724,7 +725,7 @@ func (n *Node) SetSliceBounds(low, high, max *Node) { s[2] = max return } - Fatalf("SetSliceBounds op %v: %v", n.Op, n) + base.Fatalf("SetSliceBounds op %v: %v", n.Op, n) } // IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR). @@ -736,7 +737,7 @@ func (o Op) IsSlice3() bool { case OSLICE3, OSLICE3ARR: return true } - Fatalf("IsSlice3 op %v", o) + base.Fatalf("IsSlice3 op %v", o) return false } @@ -746,7 +747,7 @@ func (n *Node) backingArrayPtrLen() (ptr, len *Node) { var init Nodes c := cheapexpr(n, &init) if c != n || init.Len() != 0 { - Fatalf("backingArrayPtrLen not cheap: %v", n) + base.Fatalf("backingArrayPtrLen not cheap: %v", n) } ptr = nod(OSPTR, n, nil) if n.Type.IsString() { @@ -763,7 +764,7 @@ func (n *Node) backingArrayPtrLen() (ptr, len *Node) { // associated with the label n, if any. func (n *Node) labeledControl() *Node { if n.Op != OLABEL { - Fatalf("labeledControl %v", n.Op) + base.Fatalf("labeledControl %v", n.Op) } ctl := n.Name.Defn if ctl == nil { @@ -779,7 +780,7 @@ func (n *Node) labeledControl() *Node { func syslook(name string) *Node { s := Runtimepkg.Lookup(name) if s == nil || s.Def == nil { - Fatalf("syslook: can't find runtime.%s", name) + base.Fatalf("syslook: can't find runtime.%s", name) } return asNode(s.Def) } @@ -811,7 +812,7 @@ func calcHasCall(n *Node) bool { switch n.Op { case OLITERAL, ONIL, ONAME, OTYPE: if n.HasCall() { - Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n) + base.Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n) } return false case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER: @@ -870,7 +871,7 @@ func badtype(op Op, tl, tr *types.Type) { } } - yyerror("illegal types for operand: %v%s", op, s) + base.Errorf("illegal types for operand: %v%s", op, s) } // brcom returns !(op). @@ -890,7 +891,7 @@ func brcom(op Op) Op { case OGE: return OLT } - Fatalf("brcom: no com for %v\n", op) + base.Fatalf("brcom: no com for %v\n", op) return op } @@ -911,7 +912,7 @@ func brrev(op Op) Op { case OGE: return OLE } - Fatalf("brrev: no rev for %v\n", op) + base.Fatalf("brrev: no rev for %v\n", op) return op } @@ -972,7 +973,7 @@ func safeexpr(n *Node, init *Nodes) *Node { // make a copy; must not be used as an lvalue if islvalue(n) { - Fatalf("missing lvalue case in safeexpr: %v", n) + base.Fatalf("missing lvalue case in safeexpr: %v", n) } return cheapexpr(n, init) } @@ -1161,7 +1162,7 @@ func adddot(n *Node) *Node { n.Left.SetImplicit(true) } case ambig: - yyerror("ambiguous selector %v", n) + base.Errorf("ambiguous selector %v", n) n.Left = nil } @@ -1334,7 +1335,7 @@ func structargs(tl *types.Type, mustname bool) []*Node { // method - M func (t T)(), a TFIELD type struct // newnam - the eventual mangled name of this function func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) { - if false && Flag.LowerR != 0 { + if false && base.Flag.LowerR != 0 { fmt.Printf("genwrapper rcvrtype=%v method=%v newnam=%v\n", rcvr, method, newnam) } @@ -1350,7 +1351,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) { return } - lineno = autogeneratedPos + base.Pos = autogeneratedPos dclcontext = PEXTERN tfn := nod(OTFUNC, nil, nil) @@ -1384,7 +1385,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) { // the TOC to the appropriate value for that module. But if it returns // directly to the wrapper's caller, nothing will reset it to the correct // value for that function. - if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && Ctxt.Flag_dynlink) { + if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) { // generate tail call: adjust pointer receiver and jump to embedded method. dot = dot.Left // skip final .M // TODO(mdempsky): Remove dependency on dotlist. @@ -1407,12 +1408,12 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) { fn.Nbody.Append(call) } - if false && Flag.LowerR != 0 { + if false && base.Flag.LowerR != 0 { dumplist("genwrapper body", fn.Nbody) } funcbody() - if Debug.DclStack != 0 { + if base.Debug.DclStack != 0 { testdclstack() } @@ -1464,7 +1465,7 @@ func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field, path, ambig := dotpath(s, t, &m, ignorecase) if path == nil { if ambig { - yyerror("%v.%v is ambiguous", t, s) + base.Errorf("%v.%v is ambiguous", t, s) } return nil, false } @@ -1477,7 +1478,7 @@ func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field, } if !m.IsMethod() { - yyerror("%v.%v is a field, not a method", t, s) + base.Errorf("%v.%v is a field, not a method", t, s) return nil, followptr } @@ -1548,8 +1549,8 @@ func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool // the method does not exist for value types. rcvr := tm.Type.Recv().Type if rcvr.IsPtr() && !t0.IsPtr() && !followptr && !isifacemethod(tm.Type) { - if false && Flag.LowerR != 0 { - yyerror("interface pointer mismatch") + if false && base.Flag.LowerR != 0 { + base.Errorf("interface pointer mismatch") } *m = im @@ -1624,40 +1625,40 @@ var reservedimports = []string{ func isbadimport(path string, allowSpace bool) bool { if strings.Contains(path, "\x00") { - yyerror("import path contains NUL") + base.Errorf("import path contains NUL") return true } for _, ri := range reservedimports { if path == ri { - yyerror("import path %q is reserved and cannot be used", path) + base.Errorf("import path %q is reserved and cannot be used", path) return true } } for _, r := range path { if r == utf8.RuneError { - yyerror("import path contains invalid UTF-8 sequence: %q", path) + base.Errorf("import path contains invalid UTF-8 sequence: %q", path) return true } if r < 0x20 || r == 0x7f { - yyerror("import path contains control character: %q", path) + base.Errorf("import path contains control character: %q", path) return true } if r == '\\' { - yyerror("import path contains backslash; use slash: %q", path) + base.Errorf("import path contains backslash; use slash: %q", path) return true } if !allowSpace && unicode.IsSpace(r) { - yyerror("import path contains space character: %q", path) + base.Errorf("import path contains space character: %q", path) return true } if strings.ContainsRune("!\"#$%&'()*,:;<=>?[]^`{|}", r) { - yyerror("import path contains invalid character '%c': %q", r, path) + base.Errorf("import path contains invalid character '%c': %q", r, path) return true } } @@ -1709,7 +1710,7 @@ func itabType(itab *Node) *Node { // It follows the pointer if !isdirectiface(t). func ifaceData(pos src.XPos, n *Node, t *types.Type) *Node { if t.IsInterface() { - Fatalf("ifaceData interface: %v", t) + base.Fatalf("ifaceData interface: %v", t) } ptr := nodlSym(pos, OIDATA, n, nil) if isdirectiface(t) { @@ -1731,7 +1732,7 @@ func ifaceData(pos src.XPos, n *Node, t *types.Type) *Node { func typePos(t *types.Type) src.XPos { n := asNode(t.Nod) if n == nil || !n.Pos.IsKnown() { - Fatalf("bad type: %v", t) + base.Fatalf("bad type: %v", t) } return n.Pos } diff --git a/src/cmd/compile/internal/gc/swt.go b/src/cmd/compile/internal/gc/swt.go index c249a85b64..7befbdf06c 100644 --- a/src/cmd/compile/internal/gc/swt.go +++ b/src/cmd/compile/internal/gc/swt.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" "go/constant" @@ -26,7 +27,7 @@ func typecheckTypeSwitch(n *Node) { n.Left.Right = typecheck(n.Left.Right, ctxExpr) t := n.Left.Right.Type if t != nil && !t.IsInterface() { - yyerrorl(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right) + base.ErrorfAt(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right) t = nil } @@ -34,7 +35,7 @@ func typecheckTypeSwitch(n *Node) { // declaration itself. So if there are no cases, we won't // notice that it went unused. if v := n.Left.Left; v != nil && !v.isBlank() && n.List.Len() == 0 { - yyerrorl(v.Pos, "%v declared but not used", v.Sym) + base.ErrorfAt(v.Pos, "%v declared but not used", v.Sym) } var defCase, nilCase *Node @@ -43,7 +44,7 @@ func typecheckTypeSwitch(n *Node) { ls := ncase.List.Slice() if len(ls) == 0 { // default: if defCase != nil { - yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line()) + base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line()) } else { defCase = ncase } @@ -61,21 +62,21 @@ func typecheckTypeSwitch(n *Node) { switch { case n1.isNil(): // case nil: if nilCase != nil { - yyerrorl(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line()) + base.ErrorfAt(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line()) } else { nilCase = ncase } case n1.Op != OTYPE: - yyerrorl(ncase.Pos, "%L is not a type", n1) + base.ErrorfAt(ncase.Pos, "%L is not a type", n1) case !n1.Type.IsInterface() && !implements(n1.Type, t, &missing, &have, &ptr) && !missing.Broke(): if have != nil && !have.Broke() { - yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ + base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ " (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left.Right, n1.Type, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type) } else if ptr != 0 { - yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ + base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ " (%v method has pointer receiver)", n.Left.Right, n1.Type, missing.Sym) } else { - yyerrorl(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ + base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+ " (missing %v method)", n.Left.Right, n1.Type, missing.Sym) } } @@ -135,7 +136,7 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) { prevs := s.m[ls] for _, prev := range prevs { if types.Identical(typ, prev.typ) { - yyerrorl(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, linestr(prev.pos)) + base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos)) return } } @@ -162,9 +163,9 @@ func typecheckExprSwitch(n *Node) { case !IsComparable(t): if t.IsStruct() { - yyerrorl(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type) + base.ErrorfAt(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type) } else { - yyerrorl(n.Pos, "cannot switch on %L", n.Left) + base.ErrorfAt(n.Pos, "cannot switch on %L", n.Left) } t = nil } @@ -176,7 +177,7 @@ func typecheckExprSwitch(n *Node) { ls := ncase.List.Slice() if len(ls) == 0 { // default: if defCase != nil { - yyerrorl(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line()) + base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line()) } else { defCase = ncase } @@ -192,17 +193,17 @@ func typecheckExprSwitch(n *Node) { } if nilonly != "" && !n1.isNil() { - yyerrorl(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left) + base.ErrorfAt(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left) } else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) { - yyerrorl(ncase.Pos, "invalid case %L in switch (incomparable type)", n1) + base.ErrorfAt(ncase.Pos, "invalid case %L in switch (incomparable type)", n1) } else { op1, _ := assignop(n1.Type, t) op2, _ := assignop(t, n1.Type) if op1 == OXXX && op2 == OXXX { if n.Left != nil { - yyerrorl(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t) + base.ErrorfAt(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t) } else { - yyerrorl(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type) + base.ErrorfAt(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type) } } } @@ -267,7 +268,7 @@ func walkExprSwitch(sw *Node) { cond = copyexpr(cond, cond.Type, &sw.Nbody) } - lineno = lno + base.Pos = lno s := exprSwitch{ exprname: cond, @@ -282,7 +283,7 @@ func walkExprSwitch(sw *Node) { // Process case dispatch. if ncase.List.Len() == 0 { if defaultGoto != nil { - Fatalf("duplicate default case not detected during typechecking") + base.Fatalf("duplicate default case not detected during typechecking") } defaultGoto = jmp } @@ -464,7 +465,7 @@ func allCaseExprsAreSideEffectFree(sw *Node) bool { for _, ncase := range sw.List.Slice() { if ncase.Op != OCASE { - Fatalf("switch string(byteslice) bad op: %v", ncase.Op) + base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op) } for _, v := range ncase.List.Slice() { if v.Op != OLITERAL { @@ -517,7 +518,7 @@ func walkTypeSwitch(sw *Node) { // Use a similar strategy for non-empty interfaces. ifNil := nod(OIF, nil, nil) ifNil.Left = nod(OEQ, itab, nodnil()) - lineno = lineno.WithNotStmt() // disable statement marks after the first check. + base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check. ifNil.Left = typecheck(ifNil.Left, ctxExpr) ifNil.Left = defaultlit(ifNil.Left, nil) // ifNil.Nbody assigned at end. @@ -558,7 +559,7 @@ func walkTypeSwitch(sw *Node) { if ncase.List.Len() == 0 { // default: if defaultGoto != nil { - Fatalf("duplicate default case not detected during typechecking") + base.Fatalf("duplicate default case not detected during typechecking") } defaultGoto = jmp } @@ -566,7 +567,7 @@ func walkTypeSwitch(sw *Node) { for _, n1 := range ncase.List.Slice() { if n1.isNil() { // case nil: if nilGoto != nil { - Fatalf("duplicate nil case not detected during typechecking") + base.Fatalf("duplicate nil case not detected during typechecking") } nilGoto = jmp continue @@ -586,7 +587,7 @@ func walkTypeSwitch(sw *Node) { if singleType != nil { // We have a single concrete type. Extract the data. if singleType.IsInterface() { - Fatalf("singleType interface should have been handled in Add") + base.Fatalf("singleType interface should have been handled in Add") } val = ifaceData(ncase.Pos, s.facename, singleType) } @@ -733,7 +734,7 @@ func binarySearch(n int, out *Nodes, less func(i int) *Node, leaf func(i int, ni for i := lo; i < hi; i++ { nif := nod(OIF, nil, nil) leaf(i, nif) - lineno = lineno.WithNotStmt() + base.Pos = base.Pos.WithNotStmt() nif.Left = typecheck(nif.Left, ctxExpr) nif.Left = defaultlit(nif.Left, nil) out.Append(nif) @@ -745,7 +746,7 @@ func binarySearch(n int, out *Nodes, less func(i int) *Node, leaf func(i int, ni half := lo + n/2 nif := nod(OIF, nil, nil) nif.Left = less(half) - lineno = lineno.WithNotStmt() + base.Pos = base.Pos.WithNotStmt() nif.Left = typecheck(nif.Left, ctxExpr) nif.Left = defaultlit(nif.Left, nil) do(lo, half, &nif.Nbody) diff --git a/src/cmd/compile/internal/gc/syntax.go b/src/cmd/compile/internal/gc/syntax.go index f771a7184e..11671fc54a 100644 --- a/src/cmd/compile/internal/gc/syntax.go +++ b/src/cmd/compile/internal/gc/syntax.go @@ -7,6 +7,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/ssa" "cmd/compile/internal/types" "cmd/internal/obj" @@ -106,7 +107,7 @@ func (n *Node) SubOp() Op { switch n.Op { case OASOP, ONAME: default: - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } return Op(n.aux) } @@ -115,21 +116,21 @@ func (n *Node) SetSubOp(op Op) { switch n.Op { case OASOP, ONAME: default: - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } n.aux = uint8(op) } func (n *Node) IndexMapLValue() bool { if n.Op != OINDEXMAP { - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } return n.aux != 0 } func (n *Node) SetIndexMapLValue(b bool) { if n.Op != OINDEXMAP { - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } if b { n.aux = 1 @@ -140,14 +141,14 @@ func (n *Node) SetIndexMapLValue(b bool) { func (n *Node) TChanDir() types.ChanDir { if n.Op != OTCHAN { - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } return types.ChanDir(n.aux) } func (n *Node) SetTChanDir(dir types.ChanDir) { if n.Op != OTCHAN { - Fatalf("unexpected op: %v", n.Op) + base.Fatalf("unexpected op: %v", n.Op) } n.aux = uint8(dir) } @@ -236,7 +237,7 @@ func (n *Node) SetEmbedded(b bool) { n.flags.set(nodeEmbedded, b) } // inserted before dereferencing. See state.exprPtr. func (n *Node) MarkNonNil() { if !n.Type.IsPtr() && !n.Type.IsUnsafePtr() { - Fatalf("MarkNonNil(%v), type %v", n, n.Type) + base.Fatalf("MarkNonNil(%v), type %v", n, n.Type) } n.flags.set(nodeNonNil, true) } @@ -255,7 +256,7 @@ func (n *Node) SetBounded(b bool) { // No length and cap checks needed // since new slice and copied over slice data have same length. default: - Fatalf("SetBounded(%v)", n) + base.Fatalf("SetBounded(%v)", n) } n.flags.set(nodeBounded, b) } @@ -263,7 +264,7 @@ func (n *Node) SetBounded(b bool) { // MarkReadonly indicates that n is an ONAME with readonly contents. func (n *Node) MarkReadonly() { if n.Op != ONAME { - Fatalf("Node.MarkReadonly %v", n.Op) + base.Fatalf("Node.MarkReadonly %v", n.Op) } n.Name.SetReadonly(true) // Mark the linksym as readonly immediately @@ -284,9 +285,9 @@ func (n *Node) Val() constant.Value { // which must not have been used with SetOpt. func (n *Node) SetVal(v constant.Value) { if n.HasOpt() { - Flag.LowerH = 1 + base.Flag.LowerH = 1 Dump("have Opt", n) - Fatalf("have Opt") + base.Fatalf("have Opt") } if n.Op == OLITERAL { assertRepresents(n.Type, v) @@ -314,9 +315,9 @@ func (n *Node) SetOpt(x interface{}) { return } if n.HasVal() { - Flag.LowerH = 1 + base.Flag.LowerH = 1 Dump("have Val", n) - Fatalf("have Val") + base.Fatalf("have Val") } n.SetHasOpt(true) n.E = x @@ -367,7 +368,7 @@ func (n *Node) pkgFuncName() string { } pkg := s.Pkg - p := Ctxt.Pkgpath + p := base.Ctxt.Pkgpath if pkg != nil && pkg.Path != "" { p = pkg.Path } @@ -764,8 +765,8 @@ func (f *Func) SetInstrumentBody(b bool) { f.flags.set(funcInstrumentB func (f *Func) SetOpenCodedDeferDisallowed(b bool) { f.flags.set(funcOpenCodedDeferDisallowed, b) } func (f *Func) setWBPos(pos src.XPos) { - if Debug.WB != 0 { - Warnl(pos, "write barrier") + if base.Debug.WB != 0 { + base.WarnfAt(pos, "write barrier") } if !f.WBPos.IsKnown() { f.WBPos = pos diff --git a/src/cmd/compile/internal/gc/trace.go b/src/cmd/compile/internal/gc/trace.go index ed4b5a268d..c6eb23a090 100644 --- a/src/cmd/compile/internal/gc/trace.go +++ b/src/cmd/compile/internal/gc/trace.go @@ -9,6 +9,8 @@ package gc import ( "os" tracepkg "runtime/trace" + + "cmd/compile/internal/base" ) func init() { @@ -18,10 +20,10 @@ func init() { func traceHandlerGo17(traceprofile string) { f, err := os.Create(traceprofile) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } if err := tracepkg.Start(f); err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } - atExit(tracepkg.Stop) + base.AtExit(tracepkg.Stop) } diff --git a/src/cmd/compile/internal/gc/typecheck.go b/src/cmd/compile/internal/gc/typecheck.go index 7b299e553b..b61b9b0525 100644 --- a/src/cmd/compile/internal/gc/typecheck.go +++ b/src/cmd/compile/internal/gc/typecheck.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "fmt" "go/constant" @@ -25,7 +26,7 @@ func tracePrint(title string, n *Node) func(np **Node) { var pos, op string var tc uint8 if n != nil { - pos = linestr(n.Pos) + pos = base.FmtPos(n.Pos) op = n.Op.String() tc = n.Typecheck() } @@ -48,7 +49,7 @@ func tracePrint(title string, n *Node) func(np **Node) { var tc uint8 var typ *types.Type if n != nil { - pos = linestr(n.Pos) + pos = base.FmtPos(n.Pos) op = n.Op.String() tc = n.Typecheck() typ = n.Type @@ -84,13 +85,13 @@ func resolve(n *Node) (res *Node) { } // only trace if there's work to do - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("resolve", n)(&res) } if n.Sym.Pkg != localpkg { if inimport { - Fatalf("recursive inimport") + base.Fatalf("recursive inimport") } inimport = true expandDecl(n) @@ -203,7 +204,7 @@ var typecheck_tcstack []*Node func typecheck(n *Node, top int) (res *Node) { // cannot type check until all the source has been parsed if !typecheckok { - Fatalf("early typecheck") + base.Fatalf("early typecheck") } if n == nil { @@ -211,7 +212,7 @@ func typecheck(n *Node, top int) (res *Node) { } // only trace if there's work to do - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheck", n)(&res) } @@ -233,7 +234,7 @@ func typecheck(n *Node, top int) (res *Node) { break default: - lineno = lno + base.Pos = lno return n } } @@ -245,7 +246,7 @@ func typecheck(n *Node, top int) (res *Node) { // We can already diagnose variables used as types. case ONAME: if top&(ctxExpr|ctxType) == ctxType { - yyerror("%v is not a type", n) + base.Errorf("%v is not a type", n) } case OTYPE: @@ -263,34 +264,34 @@ func typecheck(n *Node, top int) (res *Node) { // with aliases that we can't handle properly yet. // Report an error rather than crashing later. if n.Name != nil && n.Name.Param.Alias() && n.Type == nil { - lineno = n.Pos - Fatalf("cannot handle alias type declaration (issue #25838): %v", n) + base.Pos = n.Pos + base.Fatalf("cannot handle alias type declaration (issue #25838): %v", n) } - lineno = lno + base.Pos = lno return n } } - yyerrorl(n.Pos, "invalid recursive type alias %v%s", n, cycleTrace(cycle)) + base.ErrorfAt(n.Pos, "invalid recursive type alias %v%s", n, cycleTrace(cycle)) } case OLITERAL: if top&(ctxExpr|ctxType) == ctxType { - yyerror("%v is not a type", n) + base.Errorf("%v is not a type", n) break } - yyerrorl(n.Pos, "constant definition loop%s", cycleTrace(cycleFor(n))) + base.ErrorfAt(n.Pos, "constant definition loop%s", cycleTrace(cycleFor(n))) } - if Errors() == 0 { + if base.Errors() == 0 { var trace string for i := len(typecheck_tcstack) - 1; i >= 0; i-- { x := typecheck_tcstack[i] trace += fmt.Sprintf("\n\t%v %v", x.Line(), x) } - yyerror("typechecking loop involving %v%s", n, trace) + base.Errorf("typechecking loop involving %v%s", n, trace) } - lineno = lno + base.Pos = lno return n } @@ -305,7 +306,7 @@ func typecheck(n *Node, top int) (res *Node) { typecheck_tcstack[last] = nil typecheck_tcstack = typecheck_tcstack[:last] - lineno = lno + base.Pos = lno return n } @@ -325,7 +326,7 @@ func indexlit(n *Node) *Node { // The result of typecheck1 MUST be assigned back to n, e.g. // n.Left = typecheck1(n.Left, top) func typecheck1(n *Node, top int) (res *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheck1", n)(&res) } @@ -336,7 +337,7 @@ func typecheck1(n *Node, top int) (res *Node) { } if n.Op == ONAME && n.SubOp() != 0 && top&ctxCallee == 0 { - yyerror("use of builtin %v not in function call", n.Sym) + base.Errorf("use of builtin %v not in function call", n.Sym) n.Type = nil return n } @@ -354,14 +355,14 @@ func typecheck1(n *Node, top int) (res *Node) { default: Dump("typecheck", n) - Fatalf("typecheck %v", n.Op) + base.Fatalf("typecheck %v", n.Op) // names case OLITERAL: ok |= ctxExpr if n.Type == nil && n.Val().Kind() == constant.String { - Fatalf("string literal missing type") + base.Fatalf("string literal missing type") } case ONIL, ONONAME: @@ -379,7 +380,7 @@ func typecheck1(n *Node, top int) (res *Node) { if top&ctxAssign == 0 { // not a write to the variable if n.isBlank() { - yyerror("cannot use _ as value") + base.Errorf("cannot use _ as value") n.Type = nil return n } @@ -390,7 +391,7 @@ func typecheck1(n *Node, top int) (res *Node) { ok |= ctxExpr case OPACK: - yyerror("use of package %v without selector", n.Sym) + base.Errorf("use of package %v without selector", n.Sym) n.Type = nil return n @@ -419,7 +420,7 @@ func typecheck1(n *Node, top int) (res *Node) { } else if n.Left.Op == ODDD { if !n.Diag() { n.SetDiag(true) - yyerror("use of [...] array outside of array literal") + base.Errorf("use of [...] array outside of array literal") } n.Type = nil return n @@ -431,9 +432,9 @@ func typecheck1(n *Node, top int) (res *Node) { case l.Type == nil: // Error already reported elsewhere. case l.Type.IsInteger() && l.Op != OLITERAL: - yyerror("non-constant array bound %v", l) + base.Errorf("non-constant array bound %v", l) default: - yyerror("invalid array bound %v", l) + base.Errorf("invalid array bound %v", l) } n.Type = nil return n @@ -441,13 +442,13 @@ func typecheck1(n *Node, top int) (res *Node) { v := l.Val() if doesoverflow(v, types.Types[TINT]) { - yyerror("array bound is too large") + base.Errorf("array bound is too large") n.Type = nil return n } if constant.Sign(v) < 0 { - yyerror("array bound must be non-negative") + base.Errorf("array bound must be non-negative") n.Type = nil return n } @@ -472,10 +473,10 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if l.Type.NotInHeap() { - yyerror("incomplete (or unallocatable) map key not allowed") + base.Errorf("incomplete (or unallocatable) map key not allowed") } if r.Type.NotInHeap() { - yyerror("incomplete (or unallocatable) map value not allowed") + base.Errorf("incomplete (or unallocatable) map value not allowed") } setTypeNode(n, types.NewMap(l.Type, r.Type)) @@ -492,7 +493,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if l.Type.NotInHeap() { - yyerror("chan of incomplete (or unallocatable) type not allowed") + base.Errorf("chan of incomplete (or unallocatable) type not allowed") } setTypeNode(n, types.NewChan(l.Type, n.TChanDir())) @@ -535,7 +536,7 @@ func typecheck1(n *Node, top int) (res *Node) { if !t.IsPtr() { if top&(ctxExpr|ctxStmt) != 0 { - yyerror("invalid indirect of %L", n.Left) + base.Errorf("invalid indirect of %L", n.Left) n.Type = nil return n } @@ -582,7 +583,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if n.Implicit() && !okforarith[l.Type.Etype] { - yyerror("invalid operation: %v (non-numeric type %v)", n, l.Type) + base.Errorf("invalid operation: %v (non-numeric type %v)", n, l.Type) n.Type = nil return n } @@ -605,18 +606,18 @@ func typecheck1(n *Node, top int) (res *Node) { n.Right = r t := r.Type if !t.IsInteger() { - yyerror("invalid operation: %v (shift count type %v, must be integer)", n, r.Type) + base.Errorf("invalid operation: %v (shift count type %v, must be integer)", n, r.Type) n.Type = nil return n } if t.IsSigned() && !langSupported(1, 13, curpkg()) { - yyerrorv("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type) + base.ErrorfVers("go1.13", "invalid operation: %v (signed shift count type %v)", n, r.Type) n.Type = nil return n } t = l.Type if t != nil && t.Etype != TIDEAL && !t.IsInteger() { - yyerror("invalid operation: %v (shift of type %v)", n, t) + base.Errorf("invalid operation: %v (shift of type %v)", n, t) n.Type = nil return n } @@ -636,12 +637,12 @@ func typecheck1(n *Node, top int) (res *Node) { // can't be converted to int (see issue #41500). if n.Op == OANDAND || n.Op == OOROR { if !n.Left.Type.IsBoolean() { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Left.Type)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Left.Type)) n.Type = nil return n } if !n.Right.Type.IsBoolean() { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Right.Type)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(n.Right.Type)) n.Type = nil return n } @@ -678,7 +679,7 @@ func typecheck1(n *Node, top int) (res *Node) { aop, _ = assignop(l.Type, r.Type) if aop != OXXX { if r.Type.IsInterface() && !l.Type.IsInterface() && !IsComparable(l.Type) { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type)) n.Type = nil return n } @@ -700,7 +701,7 @@ func typecheck1(n *Node, top int) (res *Node) { aop, _ = assignop(r.Type, l.Type) if aop != OXXX { if l.Type.IsInterface() && !r.Type.IsInterface() && !IsComparable(r.Type) { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type)) n.Type = nil return n } @@ -727,7 +728,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if l.Type.IsInterface() == r.Type.IsInterface() || aop == 0 { - yyerror("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type) + base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type) n.Type = nil return n } @@ -737,7 +738,7 @@ func typecheck1(n *Node, top int) (res *Node) { t = mixUntyped(l.Type, r.Type) } if dt := defaultType(t); !okfor[op][dt.Etype] { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(t)) n.Type = nil return n } @@ -745,32 +746,32 @@ func typecheck1(n *Node, top int) (res *Node) { // okfor allows any array == array, map == map, func == func. // restrict to slice/map/func == nil and nil == slice/map/func. if l.Type.IsArray() && !IsComparable(l.Type) { - yyerror("invalid operation: %v (%v cannot be compared)", n, l.Type) + base.Errorf("invalid operation: %v (%v cannot be compared)", n, l.Type) n.Type = nil return n } if l.Type.IsSlice() && !l.isNil() && !r.isNil() { - yyerror("invalid operation: %v (slice can only be compared to nil)", n) + base.Errorf("invalid operation: %v (slice can only be compared to nil)", n) n.Type = nil return n } if l.Type.IsMap() && !l.isNil() && !r.isNil() { - yyerror("invalid operation: %v (map can only be compared to nil)", n) + base.Errorf("invalid operation: %v (map can only be compared to nil)", n) n.Type = nil return n } if l.Type.Etype == TFUNC && !l.isNil() && !r.isNil() { - yyerror("invalid operation: %v (func can only be compared to nil)", n) + base.Errorf("invalid operation: %v (func can only be compared to nil)", n) n.Type = nil return n } if l.Type.IsStruct() { if f := IncomparableField(l.Type); f != nil { - yyerror("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type) + base.Errorf("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type) n.Type = nil return n } @@ -806,7 +807,7 @@ func typecheck1(n *Node, top int) (res *Node) { if (op == ODIV || op == OMOD) && Isconst(r, constant.Int) { if constant.Sign(r.Val()) == 0 { - yyerror("division by zero") + base.Errorf("division by zero") n.Type = nil return n } @@ -824,7 +825,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !okfor[n.Op][defaultType(t).Etype] { - yyerror("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(t)) + base.Errorf("invalid operation: %v (operator %v not defined on %s)", n, n.Op, typekind(t)) n.Type = nil return n } @@ -850,7 +851,7 @@ func typecheck1(n *Node, top int) (res *Node) { r := outervalue(n.Left) if r.Op == ONAME { if r.Orig != r { - Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean? + base.Fatalf("found non-orig name node %v", r) // TODO(mdempsky): What does this mean? } r.Name.SetAddrtaken(true) if r.Name.IsClosureVar() && !capturevarscomplete { @@ -893,7 +894,7 @@ func typecheck1(n *Node, top int) (res *Node) { t := n.Left.Type if t == nil { - UpdateErrorDot(n.Line(), n.Left.String(), n.String()) + base.UpdateErrorDot(n.Line(), n.Left.String(), n.String()) n.Type = nil return n } @@ -920,7 +921,7 @@ func typecheck1(n *Node, top int) (res *Node) { } if n.Sym.IsBlank() { - yyerror("cannot refer to blank field or method") + base.Errorf("cannot refer to blank field or method") n.Type = nil return n } @@ -929,21 +930,21 @@ func typecheck1(n *Node, top int) (res *Node) { // Legitimate field or method lookup failed, try to explain the error switch { case t.IsEmptyInterface(): - yyerror("%v undefined (type %v is interface with no methods)", n, n.Left.Type) + base.Errorf("%v undefined (type %v is interface with no methods)", n, n.Left.Type) case t.IsPtr() && t.Elem().IsInterface(): // Pointer to interface is almost always a mistake. - yyerror("%v undefined (type %v is pointer to interface, not interface)", n, n.Left.Type) + base.Errorf("%v undefined (type %v is pointer to interface, not interface)", n, n.Left.Type) case lookdot(n, t, 1) != nil: // Field or method matches by name, but it is not exported. - yyerror("%v undefined (cannot refer to unexported field or method %v)", n, n.Sym) + base.Errorf("%v undefined (cannot refer to unexported field or method %v)", n, n.Sym) default: if mt := lookdot(n, t, 2); mt != nil && visible(mt.Sym) { // Case-insensitive lookup. - yyerror("%v undefined (type %v has no field or method %v, but does have %v)", n, n.Left.Type, n.Sym, mt.Sym) + base.Errorf("%v undefined (type %v has no field or method %v, but does have %v)", n, n.Left.Type, n.Sym, mt.Sym) } else { - yyerror("%v undefined (type %v has no field or method %v)", n, n.Left.Type, n.Sym) + base.Errorf("%v undefined (type %v has no field or method %v)", n, n.Left.Type, n.Sym) } } n.Type = nil @@ -974,7 +975,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsInterface() { - yyerror("invalid type assertion: %v (non-interface type %v on left)", n, t) + base.Errorf("invalid type assertion: %v (non-interface type %v on left)", n, t) n.Type = nil return n } @@ -993,15 +994,15 @@ func typecheck1(n *Node, top int) (res *Node) { var ptr int if !implements(n.Type, t, &missing, &have, &ptr) { if have != nil && have.Sym == missing.Sym { - yyerror("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+ + base.Errorf("impossible type assertion:\n\t%v does not implement %v (wrong type for %v method)\n"+ "\t\thave %v%0S\n\t\twant %v%0S", n.Type, t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type) } else if ptr != 0 { - yyerror("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type, t, missing.Sym) + base.Errorf("impossible type assertion:\n\t%v does not implement %v (%v method has pointer receiver)", n.Type, t, missing.Sym) } else if have != nil { - yyerror("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+ + base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)\n"+ "\t\thave %v%0S\n\t\twant %v%0S", n.Type, t, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type) } else { - yyerror("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type, t, missing.Sym) + base.Errorf("impossible type assertion:\n\t%v does not implement %v (missing %v method)", n.Type, t, missing.Sym) } n.Type = nil return n @@ -1023,7 +1024,7 @@ func typecheck1(n *Node, top int) (res *Node) { } switch t.Etype { default: - yyerror("invalid operation: %v (type %v does not support indexing)", n, t) + base.Errorf("invalid operation: %v (type %v does not support indexing)", n, t) n.Type = nil return n @@ -1042,20 +1043,20 @@ func typecheck1(n *Node, top int) (res *Node) { } if n.Right.Type != nil && !n.Right.Type.IsInteger() { - yyerror("non-integer %s index %v", why, n.Right) + base.Errorf("non-integer %s index %v", why, n.Right) break } if !n.Bounded() && Isconst(n.Right, constant.Int) { x := n.Right.Val() if constant.Sign(x) < 0 { - yyerror("invalid %s index %v (index must be non-negative)", why, n.Right) + base.Errorf("invalid %s index %v (index must be non-negative)", why, n.Right) } else if t.IsArray() && constant.Compare(x, token.GEQ, constant.MakeInt64(t.NumElem())) { - yyerror("invalid array index %v (out of bounds for %d-element array)", n.Right, t.NumElem()) + base.Errorf("invalid array index %v (out of bounds for %d-element array)", n.Right, t.NumElem()) } else if Isconst(n.Left, constant.String) && constant.Compare(x, token.GEQ, constant.MakeInt64(int64(len(n.Left.StringVal())))) { - yyerror("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(n.Left.StringVal())) + base.Errorf("invalid string index %v (out of bounds for %d-byte string)", n.Right, len(n.Left.StringVal())) } else if doesoverflow(x, types.Types[TINT]) { - yyerror("invalid %s index %v (index too large)", why, n.Right) + base.Errorf("invalid %s index %v (index too large)", why, n.Right) } } @@ -1077,13 +1078,13 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsChan() { - yyerror("invalid operation: %v (receive from non-chan type %v)", n, t) + base.Errorf("invalid operation: %v (receive from non-chan type %v)", n, t) n.Type = nil return n } if !t.ChanDir().CanRecv() { - yyerror("invalid operation: %v (receive from send-only type %v)", n, t) + base.Errorf("invalid operation: %v (receive from send-only type %v)", n, t) n.Type = nil return n } @@ -1101,13 +1102,13 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsChan() { - yyerror("invalid operation: %v (send to non-chan type %v)", n, t) + base.Errorf("invalid operation: %v (send to non-chan type %v)", n, t) n.Type = nil return n } if !t.ChanDir().CanSend() { - yyerror("invalid operation: %v (send to receive-only type %v)", n, t) + base.Errorf("invalid operation: %v (send to receive-only type %v)", n, t) n.Type = nil return n } @@ -1120,7 +1121,7 @@ func typecheck1(n *Node, top int) (res *Node) { n.Type = nil case OSLICEHEADER: - // Errors here are Fatalf instead of yyerror because only the compiler + // Errors here are Fatalf instead of Errorf because only the compiler // can construct an OSLICEHEADER node. // Components used in OSLICEHEADER that are supplied by parsed source code // have already been typechecked in e.g. OMAKESLICE earlier. @@ -1128,19 +1129,19 @@ func typecheck1(n *Node, top int) (res *Node) { t := n.Type if t == nil { - Fatalf("no type specified for OSLICEHEADER") + base.Fatalf("no type specified for OSLICEHEADER") } if !t.IsSlice() { - Fatalf("invalid type %v for OSLICEHEADER", n.Type) + base.Fatalf("invalid type %v for OSLICEHEADER", n.Type) } if n.Left == nil || n.Left.Type == nil || !n.Left.Type.IsUnsafePtr() { - Fatalf("need unsafe.Pointer for OSLICEHEADER") + base.Fatalf("need unsafe.Pointer for OSLICEHEADER") } if x := n.List.Len(); x != 2 { - Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x) + base.Fatalf("expected 2 params (len, cap) for OSLICEHEADER, got %d", x) } n.Left = typecheck(n.Left, ctxExpr) @@ -1150,22 +1151,22 @@ func typecheck1(n *Node, top int) (res *Node) { c = defaultlit(c, types.Types[TINT]) if Isconst(l, constant.Int) && l.Int64Val() < 0 { - Fatalf("len for OSLICEHEADER must be non-negative") + base.Fatalf("len for OSLICEHEADER must be non-negative") } if Isconst(c, constant.Int) && c.Int64Val() < 0 { - Fatalf("cap for OSLICEHEADER must be non-negative") + base.Fatalf("cap for OSLICEHEADER must be non-negative") } if Isconst(l, constant.Int) && Isconst(c, constant.Int) && constant.Compare(l.Val(), token.GTR, c.Val()) { - Fatalf("len larger than cap for OSLICEHEADER") + base.Fatalf("len larger than cap for OSLICEHEADER") } n.List.SetFirst(l) n.List.SetSecond(c) case OMAKESLICECOPY: - // Errors here are Fatalf instead of yyerror because only the compiler + // Errors here are Fatalf instead of Errorf because only the compiler // can construct an OMAKESLICECOPY node. // Components used in OMAKESCLICECOPY that are supplied by parsed source code // have already been typechecked in OMAKE and OCOPY earlier. @@ -1174,19 +1175,19 @@ func typecheck1(n *Node, top int) (res *Node) { t := n.Type if t == nil { - Fatalf("no type specified for OMAKESLICECOPY") + base.Fatalf("no type specified for OMAKESLICECOPY") } if !t.IsSlice() { - Fatalf("invalid type %v for OMAKESLICECOPY", n.Type) + base.Fatalf("invalid type %v for OMAKESLICECOPY", n.Type) } if n.Left == nil { - Fatalf("missing len argument for OMAKESLICECOPY") + base.Fatalf("missing len argument for OMAKESLICECOPY") } if n.Right == nil { - Fatalf("missing slice argument to copy for OMAKESLICECOPY") + base.Fatalf("missing slice argument to copy for OMAKESLICECOPY") } n.Left = typecheck(n.Left, ctxExpr) @@ -1195,15 +1196,15 @@ func typecheck1(n *Node, top int) (res *Node) { n.Left = defaultlit(n.Left, types.Types[TINT]) if !n.Left.Type.IsInteger() && n.Type.Etype != TIDEAL { - yyerror("non-integer len argument in OMAKESLICECOPY") + base.Errorf("non-integer len argument in OMAKESLICECOPY") } if Isconst(n.Left, constant.Int) { if doesoverflow(n.Left.Val(), types.Types[TINT]) { - Fatalf("len for OMAKESLICECOPY too large") + base.Fatalf("len for OMAKESLICECOPY too large") } if constant.Sign(n.Left.Val()) < 0 { - Fatalf("len for OMAKESLICECOPY must be non-negative") + base.Fatalf("len for OMAKESLICECOPY must be non-negative") } } @@ -1227,7 +1228,7 @@ func typecheck1(n *Node, top int) (res *Node) { } if l.Type.IsArray() { if !islvalue(n.Left) { - yyerror("invalid operation %v (slice of unaddressable value)", n) + base.Errorf("invalid operation %v (slice of unaddressable value)", n) n.Type = nil return n } @@ -1241,7 +1242,7 @@ func typecheck1(n *Node, top int) (res *Node) { var tp *types.Type if t.IsString() { if hasmax { - yyerror("invalid operation %v (3-index slice of string)", n) + base.Errorf("invalid operation %v (3-index slice of string)", n) n.Type = nil return n } @@ -1259,7 +1260,7 @@ func typecheck1(n *Node, top int) (res *Node) { } else if t.IsSlice() { n.Type = t } else { - yyerror("cannot slice %v (type %v)", l, t) + base.Errorf("cannot slice %v (type %v)", l, t) n.Type = nil return n } @@ -1293,7 +1294,7 @@ func typecheck1(n *Node, top int) (res *Node) { if l.Op == ONAME && l.SubOp() != 0 { if n.IsDDD() && l.SubOp() != OAPPEND { - yyerror("invalid use of ... with builtin %v", l) + base.Errorf("invalid use of ... with builtin %v", l) } // builtin: OLEN, OCAP, etc. @@ -1309,7 +1310,7 @@ func typecheck1(n *Node, top int) (res *Node) { if l.Op == OTYPE { if n.IsDDD() { if !l.Type.Broke() { - yyerror("invalid use of ... in type conversion to %v", l.Type) + base.Errorf("invalid use of ... in type conversion to %v", l.Type) } n.SetDiag(true) } @@ -1352,7 +1353,7 @@ func typecheck1(n *Node, top int) (res *Node) { tp := t.Recv().Type if l.Left == nil || !types.Identical(l.Left.Type, tp) { - Fatalf("method receiver") + base.Fatalf("method receiver") } default: @@ -1362,10 +1363,10 @@ func typecheck1(n *Node, top int) (res *Node) { if isBuiltinFuncName(name) && l.Name.Defn != nil { // be more specific when the function // name matches a predeclared function - yyerror("cannot call non-function %s (type %v), declared at %s", - name, t, linestr(l.Name.Defn.Pos)) + base.Errorf("cannot call non-function %s (type %v), declared at %s", + name, t, base.FmtPos(l.Name.Defn.Pos)) } else { - yyerror("cannot call non-function %s (type %v)", name, t) + base.Errorf("cannot call non-function %s (type %v)", name, t) } n.Type = nil return n @@ -1396,7 +1397,7 @@ func typecheck1(n *Node, top int) (res *Node) { // multiple return if top&(ctxMultiOK|ctxStmt) == 0 { - yyerror("multiple-value %v() in single-value context", l) + base.Errorf("multiple-value %v() in single-value context", l) break } @@ -1434,7 +1435,7 @@ func typecheck1(n *Node, top int) (res *Node) { ok = okforcap[t.Etype] } if !ok { - yyerror("invalid argument %L for %v", l, n.Op) + base.Errorf("invalid argument %L for %v", l, n.Op) n.Type = nil return n } @@ -1465,7 +1466,7 @@ func typecheck1(n *Node, top int) (res *Node) { case TCOMPLEX128: n.Type = types.Types[TFLOAT64] default: - yyerror("invalid argument %L for %v", l, n.Op) + base.Errorf("invalid argument %L for %v", l, n.Op) n.Type = nil return n } @@ -1492,7 +1493,7 @@ func typecheck1(n *Node, top int) (res *Node) { n.Right = r if !types.Identical(l.Type, r.Type) { - yyerror("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type) + base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, l.Type, r.Type) n.Type = nil return n } @@ -1500,7 +1501,7 @@ func typecheck1(n *Node, top int) (res *Node) { var t *types.Type switch l.Type.Etype { default: - yyerror("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type) + base.Errorf("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type) n.Type = nil return n @@ -1529,13 +1530,13 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsChan() { - yyerror("invalid operation: %v (non-chan type %v)", n, t) + base.Errorf("invalid operation: %v (non-chan type %v)", n, t) n.Type = nil return n } if !t.ChanDir().CanSend() { - yyerror("invalid operation: %v (cannot close receive-only channel)", n) + base.Errorf("invalid operation: %v (cannot close receive-only channel)", n) n.Type = nil return n } @@ -1547,19 +1548,19 @@ func typecheck1(n *Node, top int) (res *Node) { typecheckargs(n) args := n.List if args.Len() == 0 { - yyerror("missing arguments to delete") + base.Errorf("missing arguments to delete") n.Type = nil return n } if args.Len() == 1 { - yyerror("missing second (key) argument to delete") + base.Errorf("missing second (key) argument to delete") n.Type = nil return n } if args.Len() != 2 { - yyerror("too many arguments to delete") + base.Errorf("too many arguments to delete") n.Type = nil return n } @@ -1567,7 +1568,7 @@ func typecheck1(n *Node, top int) (res *Node) { l := args.First() r := args.Second() if l.Type != nil && !l.Type.IsMap() { - yyerror("first argument to delete must be map; have %L", l.Type) + base.Errorf("first argument to delete must be map; have %L", l.Type) n.Type = nil return n } @@ -1579,7 +1580,7 @@ func typecheck1(n *Node, top int) (res *Node) { typecheckargs(n) args := n.List if args.Len() == 0 { - yyerror("missing arguments to append") + base.Errorf("missing arguments to append") n.Type = nil return n } @@ -1593,25 +1594,25 @@ func typecheck1(n *Node, top int) (res *Node) { n.Type = t if !t.IsSlice() { if args.First().isNil() { - yyerror("first argument to append must be typed slice; have untyped nil") + base.Errorf("first argument to append must be typed slice; have untyped nil") n.Type = nil return n } - yyerror("first argument to append must be slice; have %L", t) + base.Errorf("first argument to append must be slice; have %L", t) n.Type = nil return n } if n.IsDDD() { if args.Len() == 1 { - yyerror("cannot use ... on first argument to append") + base.Errorf("cannot use ... on first argument to append") n.Type = nil return n } if args.Len() != 2 { - yyerror("too many arguments to append") + base.Errorf("too many arguments to append") n.Type = nil return n } @@ -1658,25 +1659,25 @@ func typecheck1(n *Node, top int) (res *Node) { if types.Identical(n.Left.Type.Elem(), types.Bytetype) { break } - yyerror("arguments to copy have different element types: %L and string", n.Left.Type) + base.Errorf("arguments to copy have different element types: %L and string", n.Left.Type) n.Type = nil return n } if !n.Left.Type.IsSlice() || !n.Right.Type.IsSlice() { if !n.Left.Type.IsSlice() && !n.Right.Type.IsSlice() { - yyerror("arguments to copy must be slices; have %L, %L", n.Left.Type, n.Right.Type) + base.Errorf("arguments to copy must be slices; have %L, %L", n.Left.Type, n.Right.Type) } else if !n.Left.Type.IsSlice() { - yyerror("first argument to copy should be slice; have %L", n.Left.Type) + base.Errorf("first argument to copy should be slice; have %L", n.Left.Type) } else { - yyerror("second argument to copy should be slice or string; have %L", n.Right.Type) + base.Errorf("second argument to copy should be slice or string; have %L", n.Right.Type) } n.Type = nil return n } if !types.Identical(n.Left.Type.Elem(), n.Right.Type.Elem()) { - yyerror("arguments to copy have different element types: %L and %L", n.Left.Type, n.Right.Type) + base.Errorf("arguments to copy have different element types: %L and %L", n.Left.Type, n.Right.Type) n.Type = nil return n } @@ -1695,7 +1696,7 @@ func typecheck1(n *Node, top int) (res *Node) { n.Op = op if n.Op == OXXX { if !n.Diag() && !n.Type.Broke() && !n.Left.Diag() { - yyerror("cannot convert %L to type %v%s", n.Left, n.Type, why) + base.Errorf("cannot convert %L to type %v%s", n.Left, n.Type, why) n.SetDiag(true) } n.Op = OCONV @@ -1729,7 +1730,7 @@ func typecheck1(n *Node, top int) (res *Node) { ok |= ctxExpr args := n.List.Slice() if len(args) == 0 { - yyerror("missing argument to make") + base.Errorf("missing argument to make") n.Type = nil return n } @@ -1746,13 +1747,13 @@ func typecheck1(n *Node, top int) (res *Node) { i := 1 switch t.Etype { default: - yyerror("cannot make type %v", t) + base.Errorf("cannot make type %v", t) n.Type = nil return n case TSLICE: if i >= len(args) { - yyerror("missing len argument to make(%v)", t) + base.Errorf("missing len argument to make(%v)", t) n.Type = nil return n } @@ -1776,7 +1777,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if Isconst(l, constant.Int) && r != nil && Isconst(r, constant.Int) && constant.Compare(l.Val(), token.GTR, r.Val()) { - yyerror("len larger than cap in make(%v)", t) + base.Errorf("len larger than cap in make(%v)", t) n.Type = nil return n } @@ -1828,7 +1829,7 @@ func typecheck1(n *Node, top int) (res *Node) { } if i < len(args) { - yyerror("too many arguments to make(%v)", t) + base.Errorf("too many arguments to make(%v)", t) n.Op = OMAKE n.Type = nil return n @@ -1840,7 +1841,7 @@ func typecheck1(n *Node, top int) (res *Node) { ok |= ctxExpr args := n.List if args.Len() == 0 { - yyerror("missing argument to new") + base.Errorf("missing argument to new") n.Type = nil return n } @@ -1853,7 +1854,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if args.Len() > 1 { - yyerror("too many arguments to new(%v)", t) + base.Errorf("too many arguments to new(%v)", t) n.Type = nil return n } @@ -1890,7 +1891,7 @@ func typecheck1(n *Node, top int) (res *Node) { case ORECOVER: ok |= ctxExpr | ctxStmt if n.List.Len() != 0 { - yyerror("too many arguments to recover") + base.Errorf("too many arguments to recover") n.Type = nil return n } @@ -1913,14 +1914,14 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsInterface() { - Fatalf("OITAB of %v", t) + base.Fatalf("OITAB of %v", t) } n.Type = types.NewPtr(types.Types[TUINTPTR]) case OIDATA: // Whoever creates the OIDATA node must know a priori the concrete type at that moment, // usually by just having checked the OITAB. - Fatalf("cannot typecheck interface data %v", n) + base.Fatalf("cannot typecheck interface data %v", n) case OSPTR: ok |= ctxExpr @@ -1931,7 +1932,7 @@ func typecheck1(n *Node, top int) (res *Node) { return n } if !t.IsSlice() && !t.IsString() { - Fatalf("OSPTR of %v", t) + base.Fatalf("OSPTR of %v", t) } if t.IsString() { n.Type = types.NewPtr(types.Types[TUINT8]) @@ -2008,7 +2009,7 @@ func typecheck1(n *Node, top int) (res *Node) { if n.Left != nil { t := n.Left.Type if t != nil && !t.IsBoolean() { - yyerror("non-bool %L used as for condition", n.Left) + base.Errorf("non-bool %L used as for condition", n.Left) } } n.Right = typecheck(n.Right, ctxStmt) @@ -2026,7 +2027,7 @@ func typecheck1(n *Node, top int) (res *Node) { if n.Left != nil { t := n.Left.Type if t != nil && !t.IsBoolean() { - yyerror("non-bool %L used as if condition", n.Left) + base.Errorf("non-bool %L used as if condition", n.Left) } } typecheckslice(n.Nbody.Slice(), ctxStmt) @@ -2036,7 +2037,7 @@ func typecheck1(n *Node, top int) (res *Node) { ok |= ctxStmt typecheckargs(n) if Curfn == nil { - yyerror("return outside function") + base.Errorf("return outside function") n.Type = nil return n } @@ -2062,7 +2063,7 @@ func typecheck1(n *Node, top int) (res *Node) { typecheckrange(n) case OTYPESW: - yyerror("use of .(type) outside type switch") + base.Errorf("use of .(type) outside type switch") n.Type = nil return n @@ -2095,28 +2096,28 @@ func typecheck1(n *Node, top int) (res *Node) { n = evalConst(n) if n.Op == OTYPE && top&ctxType == 0 { if !n.Type.Broke() { - yyerror("type %v is not an expression", n.Type) + base.Errorf("type %v is not an expression", n.Type) } n.Type = nil return n } if top&(ctxExpr|ctxType) == ctxType && n.Op != OTYPE { - yyerror("%v is not a type", n) + base.Errorf("%v is not a type", n) n.Type = nil return n } // TODO(rsc): simplify if (top&(ctxCallee|ctxExpr|ctxType) != 0) && top&ctxStmt == 0 && ok&(ctxExpr|ctxType|ctxCallee) == 0 { - yyerror("%v used as value", n) + base.Errorf("%v used as value", n) n.Type = nil return n } if (top&ctxStmt != 0) && top&(ctxCallee|ctxExpr|ctxType) == 0 && ok&ctxStmt == 0 { if !n.Diag() { - yyerror("%v evaluated but not used", n) + base.Errorf("%v evaluated but not used", n) n.SetDiag(true) } @@ -2178,23 +2179,23 @@ func checksliceindex(l *Node, r *Node, tp *types.Type) bool { return false } if !t.IsInteger() { - yyerror("invalid slice index %v (type %v)", r, t) + base.Errorf("invalid slice index %v (type %v)", r, t) return false } if r.Op == OLITERAL { x := r.Val() if constant.Sign(x) < 0 { - yyerror("invalid slice index %v (index must be non-negative)", r) + base.Errorf("invalid slice index %v (index must be non-negative)", r) return false } else if tp != nil && tp.NumElem() >= 0 && constant.Compare(x, token.GTR, constant.MakeInt64(tp.NumElem())) { - yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem()) + base.Errorf("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem()) return false } else if Isconst(l, constant.String) && constant.Compare(x, token.GTR, constant.MakeInt64(int64(len(l.StringVal())))) { - yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal())) + base.Errorf("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.StringVal())) return false } else if doesoverflow(x, types.Types[TINT]) { - yyerror("invalid slice index %v (index too large)", r) + base.Errorf("invalid slice index %v (index too large)", r) return false } } @@ -2204,7 +2205,7 @@ func checksliceindex(l *Node, r *Node, tp *types.Type) bool { func checksliceconst(lo *Node, hi *Node) bool { if lo != nil && hi != nil && lo.Op == OLITERAL && hi.Op == OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) { - yyerror("invalid slice index: %v > %v", lo, hi) + base.Errorf("invalid slice index: %v > %v", lo, hi) return false } @@ -2246,7 +2247,7 @@ func checkdefergo(n *Node) { if n.Left.Orig != nil && n.Left.Orig.Op == OCONV { break } - yyerrorl(n.Pos, "%s discards result of %v", what, n.Left) + base.ErrorfAt(n.Pos, "%s discards result of %v", what, n.Left) return } @@ -2260,7 +2261,7 @@ func checkdefergo(n *Node) { // The syntax made sure it was a call, so this must be // a conversion. n.SetDiag(true) - yyerrorl(n.Pos, "%s requires function call, not conversion", what) + base.ErrorfAt(n.Pos, "%s requires function call, not conversion", what) } } @@ -2291,13 +2292,13 @@ func onearg(n *Node, f string, args ...interface{}) bool { } if n.List.Len() == 0 { p := fmt.Sprintf(f, args...) - yyerror("missing argument to %s: %v", p, n) + base.Errorf("missing argument to %s: %v", p, n) return false } if n.List.Len() > 1 { p := fmt.Sprintf(f, args...) - yyerror("too many arguments to %s: %v", p, n) + base.Errorf("too many arguments to %s: %v", p, n) n.Left = n.List.First() n.List.Set(nil) return false @@ -2314,9 +2315,9 @@ func twoarg(n *Node) bool { } if n.List.Len() != 2 { if n.List.Len() < 2 { - yyerror("not enough arguments in call to %v", n) + base.Errorf("not enough arguments in call to %v", n) } else { - yyerror("too many arguments in call to %v", n) + base.Errorf("too many arguments in call to %v", n) } return false } @@ -2340,11 +2341,11 @@ func lookdot1(errnode *Node, s *types.Sym, t *types.Type, fs *types.Fields, dost } if r != nil { if errnode != nil { - yyerror("ambiguous selector %v", errnode) + base.Errorf("ambiguous selector %v", errnode) } else if t.IsPtr() { - yyerror("ambiguous selector (%v).%v", t, s) + base.Errorf("ambiguous selector (%v).%v", t, s) } else { - yyerror("ambiguous selector %v.%v", t, s) + base.Errorf("ambiguous selector %v.%v", t, s) } break } @@ -2358,7 +2359,7 @@ func lookdot1(errnode *Node, s *types.Sym, t *types.Type, fs *types.Fields, dost // typecheckMethodExpr checks selector expressions (ODOT) where the // base expression is a type expression (OTYPE). func typecheckMethodExpr(n *Node) (res *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckMethodExpr", n)(&res) } @@ -2371,7 +2372,7 @@ func typecheckMethodExpr(n *Node) (res *Node) { } else { mt := methtype(t) if mt == nil { - yyerror("%v undefined (type %v has no method %v)", n, t, n.Sym) + base.Errorf("%v undefined (type %v has no method %v)", n, t, n.Sym) n.Type = nil return n } @@ -2394,18 +2395,18 @@ func typecheckMethodExpr(n *Node) (res *Node) { m := lookdot1(n, s, t, ms, 0) if m == nil { if lookdot1(n, s, t, ms, 1) != nil { - yyerror("%v undefined (cannot refer to unexported method %v)", n, s) + base.Errorf("%v undefined (cannot refer to unexported method %v)", n, s) } else if _, ambig := dotpath(s, t, nil, false); ambig { - yyerror("%v undefined (ambiguous selector)", n) // method or field + base.Errorf("%v undefined (ambiguous selector)", n) // method or field } else { - yyerror("%v undefined (type %v has no method %v)", n, t, s) + base.Errorf("%v undefined (type %v has no method %v)", n, t, s) } n.Type = nil return n } if !isMethodApplicable(t, m) { - yyerror("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s) + base.Errorf("invalid method expression %v (needs pointer receiver: (*%v).%S)", n, t, s) n.Type = nil return n } @@ -2423,7 +2424,7 @@ func typecheckMethodExpr(n *Node) (res *Node) { // methodSym already marked n.Sym as a function. // Issue 25065. Make sure that we emit the symbol for a local method. - if Ctxt.Flag_dynlink && !inimport && (t.Sym == nil || t.Sym.Pkg == localpkg) { + if base.Ctxt.Flag_dynlink && !inimport && (t.Sym == nil || t.Sym.Pkg == localpkg) { makefuncsym(n.Sym) } @@ -2468,10 +2469,10 @@ func lookdot(n *Node, t *types.Type, dostrcmp int) *types.Field { return f1 } if f2 != nil { - yyerror("%v is both field and method", n.Sym) + base.Errorf("%v is both field and method", n.Sym) } if f1.Offset == BADWIDTH { - Fatalf("lookdot badwidth %v %p", f1, f1) + base.Fatalf("lookdot badwidth %v %p", f1, f1) } n.Xoffset = f1.Offset n.Type = f1.Type @@ -2509,7 +2510,7 @@ func lookdot(n *Node, t *types.Type, dostrcmp int) *types.Field { n.Left.SetImplicit(true) n.Left = typecheck(n.Left, ctxType|ctxExpr) } else if tt.IsPtr() && tt.Elem().IsPtr() && types.Identical(derefall(tt), derefall(rcvr)) { - yyerror("calling method %v with receiver %L requires explicit dereference", n.Sym, n.Left) + base.Errorf("calling method %v with receiver %L requires explicit dereference", n.Sym, n.Left) for tt.IsPtr() { // Stop one level early for method with pointer receiver. if rcvr.IsPtr() && !tt.Elem().IsPtr() { @@ -2521,7 +2522,7 @@ func lookdot(n *Node, t *types.Type, dostrcmp int) *types.Field { tt = tt.Elem() } } else { - Fatalf("method mismatch: %v for %v", rcvr, tt) + base.Fatalf("method mismatch: %v for %v", rcvr, tt) } } @@ -2574,8 +2575,8 @@ func typecheckaste(op Op, call *Node, isddd bool, tstruct *types.Type, nl Nodes, var t *types.Type var i int - lno := lineno - defer func() { lineno = lno }() + lno := base.Pos + defer func() { base.Pos = lno }() if tstruct.Broke() { return @@ -2656,9 +2657,9 @@ func typecheckaste(op Op, call *Node, isddd bool, tstruct *types.Type, nl Nodes, } if isddd { if call != nil { - yyerror("invalid use of ... in call to %v", call) + base.Errorf("invalid use of ... in call to %v", call) } else { - yyerror("invalid use of ... in %v", op) + base.Errorf("invalid use of ... in %v", op) } } return @@ -2671,12 +2672,12 @@ notenough: // Method expressions have the form T.M, and the compiler has // rewritten those to ONAME nodes but left T in Left. if call.Op == OMETHEXPR { - yyerror("not enough arguments in call to method expression %v%s", call, details) + base.Errorf("not enough arguments in call to method expression %v%s", call, details) } else { - yyerror("not enough arguments in call to %v%s", call, details) + base.Errorf("not enough arguments in call to %v%s", call, details) } } else { - yyerror("not enough arguments to %v%s", op, details) + base.Errorf("not enough arguments to %v%s", op, details) } if n != nil { n.SetDiag(true) @@ -2687,9 +2688,9 @@ notenough: toomany: details := errorDetails(nl, tstruct, isddd) if call != nil { - yyerror("too many arguments in call to %v%s", call, details) + base.Errorf("too many arguments in call to %v%s", call, details) } else { - yyerror("too many arguments to %v%s", op, details) + base.Errorf("too many arguments to %v%s", op, details) } } @@ -2729,7 +2730,7 @@ func sigrepr(t *types.Type, isddd bool) string { // Turn []T... argument to ...T for clearer error message. if isddd { if !t.IsSlice() { - Fatalf("bad type for ... argument: %v", t) + base.Fatalf("bad type for ... argument: %v", t) } return "..." + t.Elem().String() } @@ -2754,7 +2755,7 @@ func (nl Nodes) sigerr(isddd bool) string { // type check composite func fielddup(name string, hash map[string]bool) { if hash[name] { - yyerror("duplicate field name in struct literal: %s", name) + base.Errorf("duplicate field name in struct literal: %s", name) return } hash[name] = true @@ -2796,17 +2797,17 @@ func pushtype(n *Node, t *types.Type) *Node { // The result of typecheckcomplit MUST be assigned back to n, e.g. // n.Left = typecheckcomplit(n.Left) func typecheckcomplit(n *Node) (res *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckcomplit", n)(&res) } - lno := lineno + lno := base.Pos defer func() { - lineno = lno + base.Pos = lno }() if n.Right == nil { - yyerrorl(n.Pos, "missing type in composite literal") + base.ErrorfAt(n.Pos, "missing type in composite literal") n.Type = nil return n } @@ -2843,7 +2844,7 @@ func typecheckcomplit(n *Node) (res *Node) { switch t.Etype { default: - yyerror("invalid composite literal type %v", t) + base.Errorf("invalid composite literal type %v", t) n.Type = nil case TARRAY: @@ -2862,7 +2863,7 @@ func typecheckcomplit(n *Node) (res *Node) { setlineno(l) if l.Op != OKEY { n.List.SetIndex(i3, typecheck(l, ctxExpr)) - yyerror("missing key in map literal") + base.Errorf("missing key in map literal") continue } @@ -2870,7 +2871,7 @@ func typecheckcomplit(n *Node) (res *Node) { r = pushtype(r, t.Key()) r = typecheck(r, ctxExpr) l.Left = assignconv(r, t.Key(), "map key") - cs.add(lineno, l.Left, "key", "map literal") + cs.add(base.Pos, l.Left, "key", "map literal") r = l.Right r = pushtype(r, t.Elem()) @@ -2895,7 +2896,7 @@ func typecheckcomplit(n *Node) (res *Node) { ls[i] = n1 if i >= t.NumFields() { if !errored { - yyerror("too many values in %v", n) + base.Errorf("too many values in %v", n) errored = true } continue @@ -2904,7 +2905,7 @@ func typecheckcomplit(n *Node) (res *Node) { f := t.Field(i) s := f.Sym if s != nil && !types.IsExported(s.Name) && s.Pkg != localpkg { - yyerror("implicit assignment of unexported field '%s' in %v literal", s.Name, t) + base.Errorf("implicit assignment of unexported field '%s' in %v literal", s.Name, t) } // No pushtype allowed here. Must name fields for that. n1 = assignconv(n1, f.Type, "field value") @@ -2913,7 +2914,7 @@ func typecheckcomplit(n *Node) (res *Node) { ls[i] = n1 } if len(ls) < t.NumFields() { - yyerror("too few values in %v", n) + base.Errorf("too few values in %v", n) } } else { hash := make(map[string]bool) @@ -2935,7 +2936,7 @@ func typecheckcomplit(n *Node) (res *Node) { // so s will be non-nil, but an OXDOT // is never a valid struct literal key. if key.Sym == nil || key.Op == OXDOT || key.Sym.IsBlank() { - yyerror("invalid field name %v in struct initializer", key) + base.Errorf("invalid field name %v in struct initializer", key) l.Left = typecheck(l.Left, ctxExpr) continue } @@ -2955,7 +2956,7 @@ func typecheckcomplit(n *Node) (res *Node) { if l.Op != OSTRUCTKEY { if !errored { - yyerror("mixture of field:value and value initializers") + base.Errorf("mixture of field:value and value initializers") errored = true } ls[i] = typecheck(ls[i], ctxExpr) @@ -2966,18 +2967,18 @@ func typecheckcomplit(n *Node) (res *Node) { if f == nil { if ci := lookdot1(nil, l.Sym, t, t.Fields(), 2); ci != nil { // Case-insensitive lookup. if visible(ci.Sym) { - yyerror("unknown field '%v' in struct literal of type %v (but does have %v)", l.Sym, t, ci.Sym) + base.Errorf("unknown field '%v' in struct literal of type %v (but does have %v)", l.Sym, t, ci.Sym) } else if nonexported(l.Sym) && l.Sym.Name == ci.Sym.Name { // Ensure exactness before the suggestion. - yyerror("cannot refer to unexported field '%v' in struct literal of type %v", l.Sym, t) + base.Errorf("cannot refer to unexported field '%v' in struct literal of type %v", l.Sym, t) } else { - yyerror("unknown field '%v' in struct literal of type %v", l.Sym, t) + base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym, t) } continue } var f *types.Field p, _ := dotpath(l.Sym, t, &f, true) if p == nil || f.IsMethod() { - yyerror("unknown field '%v' in struct literal of type %v", l.Sym, t) + base.Errorf("unknown field '%v' in struct literal of type %v", l.Sym, t) continue } // dotpath returns the parent embedded types in reverse order. @@ -2986,7 +2987,7 @@ func typecheckcomplit(n *Node) (res *Node) { ep = append(ep, p[ei].field.Sym.Name) } ep = append(ep, l.Sym.Name) - yyerror("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t) + base.Errorf("cannot use promoted field %v in struct literal of type %v", strings.Join(ep, "."), t) continue } fielddup(f.Sym.Name, hash) @@ -3028,9 +3029,9 @@ func typecheckarraylit(elemType *types.Type, bound int64, elts []*Node, ctx stri if key < 0 { if !elt.Left.Diag() { if key == -2 { - yyerror("index too large") + base.Errorf("index too large") } else { - yyerror("index must be non-negative integer constant") + base.Errorf("index must be non-negative integer constant") } elt.Left.SetDiag(true) } @@ -3052,14 +3053,14 @@ func typecheckarraylit(elemType *types.Type, bound int64, elts []*Node, ctx stri if key >= 0 { if indices != nil { if indices[key] { - yyerror("duplicate index in %s: %d", ctx, key) + base.Errorf("duplicate index in %s: %d", ctx, key) } else { indices[key] = true } } if bound >= 0 && key >= bound { - yyerror("array index %d out of bounds [0:%d]", key, bound) + base.Errorf("array index %d out of bounds [0:%d]", key, bound) bound = -1 } } @@ -3112,7 +3113,7 @@ func islvalue(n *Node) bool { func checklvalue(n *Node, verb string) { if !islvalue(n) { - yyerror("cannot %s %v", verb, n) + base.Errorf("cannot %s %v", verb, n) } } @@ -3143,13 +3144,13 @@ func checkassign(stmt *Node, n *Node) { switch { case n.Op == ODOT && n.Left.Op == OINDEXMAP: - yyerror("cannot assign to struct field %v in map", n) + base.Errorf("cannot assign to struct field %v in map", n) case (n.Op == OINDEX && n.Left.Type.IsString()) || n.Op == OSLICESTR: - yyerror("cannot assign to %v (strings are immutable)", n) + base.Errorf("cannot assign to %v (strings are immutable)", n) case n.Op == OLITERAL && n.Sym != nil && n.isGoConst(): - yyerror("cannot assign to %v (declared const)", n) + base.Errorf("cannot assign to %v (declared const)", n) default: - yyerror("cannot assign to %v", n) + base.Errorf("cannot assign to %v", n) } n.Type = nil } @@ -3214,7 +3215,7 @@ func samesafeexpr(l *Node, r *Node) bool { // if this assignment is the definition of a var on the left side, // fill in the var's type. func typecheckas(n *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckas", n)(nil) } @@ -3237,7 +3238,7 @@ func typecheckas(n *Node) { checkassign(n, n.Left) if n.Right != nil && n.Right.Type != nil { if n.Right.Type.IsFuncArgStruct() { - yyerror("assignment mismatch: 1 variable but %v returns %d values", n.Right.Left, n.Right.Type.NumFields()) + base.Errorf("assignment mismatch: 1 variable but %v returns %d values", n.Right.Left, n.Right.Type.NumFields()) // Multi-value RHS isn't actually valid for OAS; nil out // to indicate failed typechecking. n.Right.Type = nil @@ -3266,13 +3267,13 @@ func typecheckas(n *Node) { func checkassignto(src *types.Type, dst *Node) { if op, why := assignop(src, dst.Type); op == OXXX { - yyerror("cannot assign %v to %L in multiple assignment%s", src, dst, why) + base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why) return } } func typecheckas2(n *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckas2", n)(nil) } @@ -3387,9 +3388,9 @@ func typecheckas2(n *Node) { mismatch: switch r.Op { default: - yyerror("assignment mismatch: %d variables but %d values", cl, cr) + base.Errorf("assignment mismatch: %d variables but %d values", cl, cr) case OCALLFUNC, OCALLMETH, OCALLINTER: - yyerror("assignment mismatch: %d variables but %v returns %d values", cl, r.Left, cr) + base.Errorf("assignment mismatch: %d variables but %v returns %d values", cl, r.Left, cr) } // second half of dance @@ -3405,7 +3406,7 @@ out: // type check function definition func typecheckfunc(n *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckfunc", n)(nil) } @@ -3432,7 +3433,7 @@ func typecheckfunc(n *Node) { declare(n.Func.Nname, PFUNC) } - if Ctxt.Flag_dynlink && !inimport && n.Func.Nname != nil { + if base.Ctxt.Flag_dynlink && !inimport && n.Func.Nname != nil { makefuncsym(n.Func.Nname.Sym) } } @@ -3441,7 +3442,7 @@ func typecheckfunc(n *Node) { // n.Left = stringtoruneslit(n.Left) func stringtoruneslit(n *Node) *Node { if n.Left.Op != OLITERAL || n.Left.Val().Kind() != constant.String { - Fatalf("stringtoarraylit %v", n) + base.Fatalf("stringtoarraylit %v", n) } var l []*Node @@ -3463,7 +3464,7 @@ func checkMapKeys() { for _, n := range mapqueue { k := n.Type.MapType().Key if !k.Broke() && !IsComparable(k) { - yyerrorl(n.Pos, "invalid map key type %v", k) + base.ErrorfAt(n.Pos, "invalid map key type %v", k) } } mapqueue = nil @@ -3513,13 +3514,13 @@ func setUnderlying(t, underlying *types.Type) { // Double-check use of type as embedded type. if ft.Embedlineno.IsKnown() { if t.IsPtr() || t.IsUnsafePtr() { - yyerrorl(ft.Embedlineno, "embedded type cannot be a pointer") + base.ErrorfAt(ft.Embedlineno, "embedded type cannot be a pointer") } } } func typecheckdeftype(n *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckdeftype", n)(nil) } @@ -3539,7 +3540,7 @@ func typecheckdeftype(n *Node) { } func typecheckdef(n *Node) { - if enableTrace && Flag.LowerT { + if enableTrace && base.Flag.LowerT { defer tracePrint("typecheckdef", n)(nil) } @@ -3551,27 +3552,27 @@ func typecheckdef(n *Node) { // Note: adderrorname looks for this string and // adds context about the outer expression - yyerrorl(lineno, "undefined: %v", n.Sym) + base.ErrorfAt(base.Pos, "undefined: %v", n.Sym) } - lineno = lno + base.Pos = lno return } if n.Walkdef() == 1 { - lineno = lno + base.Pos = lno return } typecheckdefstack = append(typecheckdefstack, n) if n.Walkdef() == 2 { - flusherrors() + base.FlushErrors() fmt.Printf("typecheckdef loop:") for i := len(typecheckdefstack) - 1; i >= 0; i-- { n := typecheckdefstack[i] fmt.Printf(" %v", n.Sym) } fmt.Printf("\n") - Fatalf("typecheckdef loop") + base.Fatalf("typecheckdef loop") } n.SetWalkdef(2) @@ -3582,7 +3583,7 @@ func typecheckdef(n *Node) { switch n.Op { default: - Fatalf("typecheckdef %v", n.Op) + base.Fatalf("typecheckdef %v", n.Op) case OLITERAL: if n.Name.Param.Ntype != nil { @@ -3599,7 +3600,7 @@ func typecheckdef(n *Node) { n.Name.Defn = nil if e == nil { Dump("typecheckdef nil defn", n) - yyerrorl(n.Pos, "xxx") + base.ErrorfAt(n.Pos, "xxx") } e = typecheck(e, ctxExpr) @@ -3609,9 +3610,9 @@ func typecheckdef(n *Node) { if !e.isGoConst() { if !e.Diag() { if e.Op == ONIL { - yyerrorl(n.Pos, "const initializer cannot be nil") + base.ErrorfAt(n.Pos, "const initializer cannot be nil") } else { - yyerrorl(n.Pos, "const initializer %v is not a constant", e) + base.ErrorfAt(n.Pos, "const initializer %v is not a constant", e) } e.SetDiag(true) } @@ -3621,12 +3622,12 @@ func typecheckdef(n *Node) { t := n.Type if t != nil { if !okforconst[t.Etype] { - yyerrorl(n.Pos, "invalid constant type %v", t) + base.ErrorfAt(n.Pos, "invalid constant type %v", t) goto ret } if !e.Type.IsUntyped() && !types.Identical(t, e.Type) { - yyerrorl(n.Pos, "cannot use %L as type %v in const initializer", e, t) + base.ErrorfAt(n.Pos, "cannot use %L as type %v in const initializer", e, t) goto ret } @@ -3655,7 +3656,7 @@ func typecheckdef(n *Node) { if n.SubOp() != 0 { // like OPRINTN break } - if Errors() > 0 { + if base.Errors() > 0 { // Can have undefined variables in x := foo // that make x have an n.name.Defn == nil. // If there are other errors anyway, don't @@ -3663,7 +3664,7 @@ func typecheckdef(n *Node) { break } - Fatalf("var without type, init: %v", n.Sym) + base.Fatalf("var without type, init: %v", n.Sym) } if n.Name.Defn.Op == ONAME { @@ -3700,9 +3701,9 @@ func typecheckdef(n *Node) { n.SetWalkdef(1) setTypeNode(n, types.New(TFORW)) n.Type.Sym = n.Sym - errorsBefore := Errors() + errorsBefore := base.Errors() typecheckdeftype(n) - if n.Type.Etype == TFORW && Errors() > errorsBefore { + if n.Type.Etype == TFORW && base.Errors() > errorsBefore { // Something went wrong during type-checking, // but it was reported. Silence future errors. n.Type.SetBroke(true) @@ -3712,23 +3713,23 @@ func typecheckdef(n *Node) { ret: if n.Op != OLITERAL && n.Type != nil && n.Type.IsUntyped() { - Fatalf("got %v for %v", n.Type, n) + base.Fatalf("got %v for %v", n.Type, n) } last := len(typecheckdefstack) - 1 if typecheckdefstack[last] != n { - Fatalf("typecheckdefstack mismatch") + base.Fatalf("typecheckdefstack mismatch") } typecheckdefstack[last] = nil typecheckdefstack = typecheckdefstack[:last] - lineno = lno + base.Pos = lno n.SetWalkdef(1) } func checkmake(t *types.Type, arg string, np **Node) bool { n := *np if !n.Type.IsInteger() && n.Type.Etype != TIDEAL { - yyerror("non-integer %s argument in make(%v) - %v", arg, t, n.Type) + base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type) return false } @@ -3737,11 +3738,11 @@ func checkmake(t *types.Type, arg string, np **Node) bool { if n.Op == OLITERAL { v := toint(n.Val()) if constant.Sign(v) < 0 { - yyerror("negative %s argument in make(%v)", arg, t) + base.Errorf("negative %s argument in make(%v)", arg, t) return false } if doesoverflow(v, types.Types[TINT]) { - yyerror("%s argument too large in make(%v)", arg, t) + base.Errorf("%s argument too large in make(%v)", arg, t) return false } } @@ -3874,7 +3875,7 @@ func checkreturn(fn *Node) { if fn.Type.NumResults() != 0 && fn.Nbody.Len() != 0 { markbreaklist(fn.Nbody, nil) if !fn.Nbody.isterminating() { - yyerrorl(fn.Func.Endlineno, "missing return at end of function") + base.ErrorfAt(fn.Func.Endlineno, "missing return at end of function") } } } @@ -4047,6 +4048,6 @@ func (n *Node) MethodFunc() *types.Field { case OCALLPART: return callpartMethod(n) } - Fatalf("unexpected node: %v (%v)", n, n.Op) + base.Fatalf("unexpected node: %v (%v)", n, n.Op) panic("unreachable") } diff --git a/src/cmd/compile/internal/gc/universe.go b/src/cmd/compile/internal/gc/universe.go index 8c32f2f6d2..aa0ee4075d 100644 --- a/src/cmd/compile/internal/gc/universe.go +++ b/src/cmd/compile/internal/gc/universe.go @@ -7,6 +7,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/src" ) @@ -98,7 +99,7 @@ func lexinit() { for _, s := range &basicTypes { etype := s.etype if int(etype) >= len(types.Types) { - Fatalf("lexinit: %s bad etype", s.name) + base.Fatalf("lexinit: %s bad etype", s.name) } s2 := builtinpkg.Lookup(s.name) t := types.Types[etype] @@ -169,7 +170,7 @@ func lexinit() { func typeinit() { if Widthptr == 0 { - Fatalf("typeinit before betypeinit") + base.Fatalf("typeinit before betypeinit") } for et := types.EType(0); et < NTYPE; et++ { diff --git a/src/cmd/compile/internal/gc/unsafe.go b/src/cmd/compile/internal/gc/unsafe.go index a3151e83bf..a1c1c1bf6e 100644 --- a/src/cmd/compile/internal/gc/unsafe.go +++ b/src/cmd/compile/internal/gc/unsafe.go @@ -4,6 +4,8 @@ package gc +import "cmd/compile/internal/base" + // evalunsafe evaluates a package unsafe operation and returns the result. func evalunsafe(n *Node) int64 { switch n.Op { @@ -23,7 +25,7 @@ func evalunsafe(n *Node) int64 { case OOFFSETOF: // must be a selector. if n.Left.Op != OXDOT { - yyerror("invalid expression %v", n) + base.Errorf("invalid expression %v", n) return 0 } @@ -41,10 +43,10 @@ func evalunsafe(n *Node) int64 { case ODOT, ODOTPTR: break case OCALLPART: - yyerror("invalid expression %v: argument is a method value", n) + base.Errorf("invalid expression %v: argument is a method value", n) return 0 default: - yyerror("invalid expression %v", n) + base.Errorf("invalid expression %v", n) return 0 } @@ -57,7 +59,7 @@ func evalunsafe(n *Node) int64 { // but accessing f must not otherwise involve // indirection via embedded pointer types. if r.Left != sbase { - yyerror("invalid expression %v: selector implies indirection of embedded %v", n, r.Left) + base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left) return 0 } fallthrough @@ -65,12 +67,12 @@ func evalunsafe(n *Node) int64 { v += r.Xoffset default: Dump("unsafenmagic", n.Left) - Fatalf("impossible %#v node after dot insertion", r.Op) + base.Fatalf("impossible %#v node after dot insertion", r.Op) } } return v } - Fatalf("unexpected op %v", n.Op) + base.Fatalf("unexpected op %v", n.Op) return 0 } diff --git a/src/cmd/compile/internal/gc/util.go b/src/cmd/compile/internal/gc/util.go index d1a5993daf..597a29a940 100644 --- a/src/cmd/compile/internal/gc/util.go +++ b/src/cmd/compile/internal/gc/util.go @@ -8,27 +8,14 @@ import ( "os" "runtime" "runtime/pprof" + + "cmd/compile/internal/base" ) // Line returns n's position as a string. If n has been inlined, // it uses the outermost position where n has been inlined. func (n *Node) Line() string { - return linestr(n.Pos) -} - -var atExitFuncs []func() - -func atExit(f func()) { - atExitFuncs = append(atExitFuncs, f) -} - -func Exit(code int) { - for i := len(atExitFuncs) - 1; i >= 0; i-- { - f := atExitFuncs[i] - atExitFuncs = atExitFuncs[:i] - f() - } - os.Exit(code) + return base.FmtPos(n.Pos) } var ( @@ -37,25 +24,25 @@ var ( ) func startProfile() { - if Flag.CPUProfile != "" { - f, err := os.Create(Flag.CPUProfile) + if base.Flag.CPUProfile != "" { + f, err := os.Create(base.Flag.CPUProfile) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } if err := pprof.StartCPUProfile(f); err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } - atExit(pprof.StopCPUProfile) + base.AtExit(pprof.StopCPUProfile) } - if Flag.MemProfile != "" { + if base.Flag.MemProfile != "" { if memprofilerate != 0 { runtime.MemProfileRate = int(memprofilerate) } - f, err := os.Create(Flag.MemProfile) + f, err := os.Create(base.Flag.MemProfile) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } - atExit(func() { + base.AtExit(func() { // Profile all outstanding allocations. runtime.GC() // compilebench parses the memory profile to extract memstats, @@ -63,36 +50,36 @@ func startProfile() { // See golang.org/issue/18641 and runtime/pprof/pprof.go:writeHeap. const writeLegacyFormat = 1 if err := pprof.Lookup("heap").WriteTo(f, writeLegacyFormat); err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } }) } else { // Not doing memory profiling; disable it entirely. runtime.MemProfileRate = 0 } - if Flag.BlockProfile != "" { - f, err := os.Create(Flag.BlockProfile) + if base.Flag.BlockProfile != "" { + f, err := os.Create(base.Flag.BlockProfile) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } runtime.SetBlockProfileRate(1) - atExit(func() { + base.AtExit(func() { pprof.Lookup("block").WriteTo(f, 0) f.Close() }) } - if Flag.MutexProfile != "" { - f, err := os.Create(Flag.MutexProfile) + if base.Flag.MutexProfile != "" { + f, err := os.Create(base.Flag.MutexProfile) if err != nil { - Fatalf("%v", err) + base.Fatalf("%v", err) } startMutexProfiling() - atExit(func() { + base.AtExit(func() { pprof.Lookup("mutex").WriteTo(f, 0) f.Close() }) } - if Flag.TraceProfile != "" && traceHandler != nil { - traceHandler(Flag.TraceProfile) + if base.Flag.TraceProfile != "" && traceHandler != nil { + traceHandler(base.Flag.TraceProfile) } } diff --git a/src/cmd/compile/internal/gc/walk.go b/src/cmd/compile/internal/gc/walk.go index de2733909e..d7cd7ddf27 100644 --- a/src/cmd/compile/internal/gc/walk.go +++ b/src/cmd/compile/internal/gc/walk.go @@ -5,6 +5,7 @@ package gc import ( + "cmd/compile/internal/base" "cmd/compile/internal/types" "cmd/internal/obj" "cmd/internal/objabi" @@ -22,14 +23,14 @@ const zeroValSize = 1024 // must match value of runtime/map.go:maxZero func walk(fn *Node) { Curfn = fn - errorsBefore := Errors() + errorsBefore := base.Errors() - if Flag.W != 0 { + if base.Flag.W != 0 { s := fmt.Sprintf("\nbefore walk %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Nbody) } - lno := lineno + lno := base.Pos // Final typecheck for any unused variables. for i, ln := range fn.Func.Dcl { @@ -54,26 +55,26 @@ func walk(fn *Node) { if defn.Left.Name.Used() { continue } - yyerrorl(defn.Left.Pos, "%v declared but not used", ln.Sym) + base.ErrorfAt(defn.Left.Pos, "%v declared but not used", ln.Sym) defn.Left.Name.SetUsed(true) // suppress repeats } else { - yyerrorl(ln.Pos, "%v declared but not used", ln.Sym) + base.ErrorfAt(ln.Pos, "%v declared but not used", ln.Sym) } } - lineno = lno - if Errors() > errorsBefore { + base.Pos = lno + if base.Errors() > errorsBefore { return } walkstmtlist(Curfn.Nbody.Slice()) - if Flag.W != 0 { + if base.Flag.W != 0 { s := fmt.Sprintf("after walk %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Nbody) } zeroResults() heapmoves() - if Flag.W != 0 && Curfn.Func.Enter.Len() > 0 { + if base.Flag.W != 0 && Curfn.Func.Enter.Len() > 0 { s := fmt.Sprintf("enter %v", Curfn.Func.Nname.Sym) dumplist(s, Curfn.Func.Enter) } @@ -116,9 +117,9 @@ func walkstmt(n *Node) *Node { switch n.Op { default: if n.Op == ONAME { - yyerror("%v is not a top level statement", n.Sym) + base.Errorf("%v is not a top level statement", n.Sym) } else { - yyerror("%v is not a top level statement", n.Op) + base.Errorf("%v is not a top level statement", n.Op) } Dump("nottop", n) @@ -144,7 +145,7 @@ func walkstmt(n *Node) *Node { ORECOVER, OGETG: if n.Typecheck() == 0 { - Fatalf("missing typecheck: %+v", n) + base.Fatalf("missing typecheck: %+v", n) } wascopy := n.Op == OCOPY init := n.Ninit @@ -159,7 +160,7 @@ func walkstmt(n *Node) *Node { // the value received. case ORECV: if n.Typecheck() == 0 { - Fatalf("missing typecheck: %+v", n) + base.Fatalf("missing typecheck: %+v", n) } init := n.Ninit n.Ninit.Set(nil) @@ -186,8 +187,8 @@ func walkstmt(n *Node) *Node { case ODCL: v := n.Left if v.Class() == PAUTOHEAP { - if Flag.CompilingRuntime { - yyerror("%v escapes to heap, not allowed in runtime", v) + if base.Flag.CompilingRuntime { + base.Errorf("%v escapes to heap, not allowed in runtime", v) } if prealloc[v] == nil { prealloc[v] = callnew(v.Type) @@ -202,7 +203,7 @@ func walkstmt(n *Node) *Node { walkstmtlist(n.List.Slice()) case OCASE: - yyerror("case statement out of place") + base.Errorf("case statement out of place") case ODEFER: Curfn.Func.SetHasDefer(true) @@ -291,7 +292,7 @@ func walkstmt(n *Node) *Node { if got, want := n.List.Len(), len(rl); got != want { // order should have rewritten multi-value function calls // with explicit OAS2FUNC nodes. - Fatalf("expected %v return arguments, have %v", want, got) + base.Fatalf("expected %v return arguments, have %v", want, got) } // move function calls out, to make reorder3's job easier. @@ -334,7 +335,7 @@ func walkstmt(n *Node) *Node { } if n.Op == ONAME { - Fatalf("walkstmt ended up with name: %+v", n) + base.Fatalf("walkstmt ended up with name: %+v", n) } return n } @@ -405,7 +406,7 @@ func convFuncName(from, to *types.Type) (fnname string, needsaddr bool) { return "convT2I", true } } - Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie()) + base.Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie()) panic("unreachable") } @@ -429,7 +430,7 @@ func walkexpr(n *Node, init *Nodes) *Node { // not okay to use n->ninit when walking n, // because we might replace n with some other node // and would lose the init list. - Fatalf("walkexpr init == &n->ninit") + base.Fatalf("walkexpr init == &n->ninit") } if n.Ninit.Len() != 0 { @@ -439,16 +440,16 @@ func walkexpr(n *Node, init *Nodes) *Node { lno := setlineno(n) - if Flag.LowerW > 1 { + if base.Flag.LowerW > 1 { Dump("before walk expr", n) } if n.Typecheck() != 1 { - Fatalf("missed typecheck: %+v", n) + base.Fatalf("missed typecheck: %+v", n) } if n.Type.IsUntyped() { - Fatalf("expression has untyped type: %+v", n) + base.Fatalf("expression has untyped type: %+v", n) } if n.Op == ONAME && n.Class() == PAUTOHEAP { @@ -463,7 +464,7 @@ opswitch: switch n.Op { default: Dump("walk", n) - Fatalf("walkexpr: switch 1 unknown op %+S", n) + base.Fatalf("walkexpr: switch 1 unknown op %+S", n) case ONONAME, OEMPTY, OGETG, ONEWOBJ, OMETHEXPR: @@ -587,7 +588,7 @@ opswitch: // the mapassign call. mapAppend := n.Left.Op == OINDEXMAP && n.Right.Op == OAPPEND if mapAppend && !samesafeexpr(n.Left, n.Right.List.First()) { - Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First()) + base.Fatalf("not same expressions: %v != %v", n.Left, n.Right.List.First()) } n.Left = walkexpr(n.Left, init) @@ -638,7 +639,7 @@ opswitch: // x = append(...) r := n.Right if r.Type.Elem().NotInHeap() { - yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", r.Type.Elem()) + base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", r.Type.Elem()) } switch { case isAppendOfMake(r): @@ -1046,25 +1047,25 @@ opswitch: } if t.IsArray() { n.SetBounded(bounded(r, t.NumElem())) - if Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) { - Warn("index bounds check elided") + if base.Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) { + base.Warn("index bounds check elided") } if smallintconst(n.Right) && !n.Bounded() { - yyerror("index out of bounds") + base.Errorf("index out of bounds") } } else if Isconst(n.Left, constant.String) { n.SetBounded(bounded(r, int64(len(n.Left.StringVal())))) - if Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) { - Warn("index bounds check elided") + if base.Flag.LowerM != 0 && n.Bounded() && !Isconst(n.Right, constant.Int) { + base.Warn("index bounds check elided") } if smallintconst(n.Right) && !n.Bounded() { - yyerror("index out of bounds") + base.Errorf("index out of bounds") } } if Isconst(n.Right, constant.Int) { if v := n.Right.Val(); constant.Sign(v) < 0 || doesoverflow(v, types.Types[TINT]) { - yyerror("index out of bounds") + base.Errorf("index out of bounds") } } @@ -1107,7 +1108,7 @@ opswitch: n.SetTypecheck(1) case ORECV: - Fatalf("walkexpr ORECV") // should see inside OAS only + base.Fatalf("walkexpr ORECV") // should see inside OAS only case OSLICEHEADER: n.Left = walkexpr(n.Left, init) @@ -1149,11 +1150,11 @@ opswitch: case ONEW: if n.Type.Elem().NotInHeap() { - yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type.Elem()) + base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", n.Type.Elem()) } if n.Esc == EscNone { if n.Type.Elem().Width >= maxImplicitStackVarSize { - Fatalf("large ONEW with EscNone: %v", n) + base.Fatalf("large ONEW with EscNone: %v", n) } r := temp(n.Type.Elem()) r = nod(OAS, r, nil) // zero temp @@ -1171,10 +1172,10 @@ opswitch: case OAPPEND: // order should make sure we only see OAS(node, OAPPEND), which we handle above. - Fatalf("append outside assignment") + base.Fatalf("append outside assignment") case OCOPY: - n = copyany(n, init, instrumenting && !Flag.CompilingRuntime) + n = copyany(n, init, instrumenting && !base.Flag.CompilingRuntime) // cannot use chanfn - closechan takes any, not chan any case OCLOSE: @@ -1320,17 +1321,17 @@ opswitch: } t := n.Type if t.Elem().NotInHeap() { - yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) + base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) } if n.Esc == EscNone { if why := heapAllocReason(n); why != "" { - Fatalf("%v has EscNone, but %v", n, why) + base.Fatalf("%v has EscNone, but %v", n, why) } // var arr [r]T // n = arr[:l] i := indexconst(r) if i < 0 { - Fatalf("walkexpr: invalid index %v", r) + base.Fatalf("walkexpr: invalid index %v", r) } // cap is constrained to [0,2^31) or [0,2^63) depending on whether @@ -1392,12 +1393,12 @@ opswitch: case OMAKESLICECOPY: if n.Esc == EscNone { - Fatalf("OMAKESLICECOPY with EscNone: %v", n) + base.Fatalf("OMAKESLICECOPY with EscNone: %v", n) } t := n.Type if t.Elem().NotInHeap() { - yyerror("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) + base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", t.Elem()) } length := conv(n.Left, types.Types[TINT]) @@ -1583,7 +1584,7 @@ opswitch: t := n.Type n = evalConst(n) if n.Type != t { - Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type) + base.Fatalf("evconst changed Type: %v had type %v, now %v", n, t, n.Type) } if n.Op == OLITERAL { n = typecheck(n, ctxExpr) @@ -1596,11 +1597,11 @@ opswitch: updateHasCall(n) - if Flag.LowerW != 0 && n != nil { + if base.Flag.LowerW != 0 && n != nil { Dump("after walk expr", n) } - lineno = lno + base.Pos = lno return n } @@ -1685,8 +1686,8 @@ func reduceSlice(n *Node) *Node { n.SetSliceBounds(low, high, max) if (n.Op == OSLICE || n.Op == OSLICESTR) && low == nil && high == nil { // Reduce x[:] to x. - if Debug.Slice > 0 { - Warn("slice: omit slice operation") + if base.Debug.Slice > 0 { + base.Warn("slice: omit slice operation") } return n.Left } @@ -1736,7 +1737,7 @@ func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node { var nln, nrn Nodes nln.Set(nl) nrn.Set(nr) - Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname()) + base.Fatalf("error in shape across %+v %v %+v / %d %d [%s]", nln, op, nrn, len(nl), len(nr), Curfn.funcname()) } return nn } @@ -1758,7 +1759,7 @@ func fncall(l *Node, rt *types.Type) bool { // expr-list = func() func ascompatet(nl Nodes, nr *types.Type) []*Node { if nl.Len() != nr.NumFields() { - Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) + base.Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) } var nn, mm Nodes @@ -1780,7 +1781,7 @@ func ascompatet(nl Nodes, nr *types.Type) []*Node { } res := nod(ORESULT, nil, nil) - res.Xoffset = Ctxt.FixedFrameSize() + r.Offset + res.Xoffset = base.Ctxt.FixedFrameSize() + r.Offset res.Type = r.Type res.SetTypecheck(1) @@ -1789,7 +1790,7 @@ func ascompatet(nl Nodes, nr *types.Type) []*Node { updateHasCall(a) if a.HasCall() { Dump("ascompatet ucount", a) - Fatalf("ascompatet: too many function calls evaluating parameters") + base.Fatalf("ascompatet: too many function calls evaluating parameters") } nn.Append(a) @@ -1811,7 +1812,7 @@ func mkdotargslice(typ *types.Type, args []*Node) *Node { n = typecheck(n, ctxExpr) if n.Type == nil { - Fatalf("mkdotargslice: typecheck failed") + base.Fatalf("mkdotargslice: typecheck failed") } return n } @@ -2069,7 +2070,7 @@ func isReflectHeaderDataField(l *Node) bool { func convas(n *Node, init *Nodes) *Node { if n.Op != OAS { - Fatalf("convas: not OAS %v", n.Op) + base.Fatalf("convas: not OAS %v", n.Op) } defer updateHasCall(n) @@ -2134,7 +2135,7 @@ func reorder3(all []*Node) []*Node { switch l.Op { default: - Fatalf("reorder3 unexpected lvalue %#v", l.Op) + base.Fatalf("reorder3 unexpected lvalue %#v", l.Op) case ONAME: break @@ -2182,7 +2183,7 @@ func outervalue(n *Node) *Node { for { switch n.Op { case OXDOT: - Fatalf("OXDOT in walk") + base.Fatalf("OXDOT in walk") case ODOT, OPAREN, OCONVNOP: n = n.Left continue @@ -2230,7 +2231,7 @@ func aliased(r *Node, all []*Node) bool { switch l.Class() { default: - Fatalf("unexpected class: %v, %v", l, l.Class()) + base.Fatalf("unexpected class: %v, %v", l, l.Class()) case PAUTOHEAP, PEXTERN: memwrite = true @@ -2317,7 +2318,7 @@ func varexpr(n *Node) bool { case ODOT: // but not ODOTPTR // Should have been handled in aliased. - Fatalf("varexpr unexpected ODOT") + base.Fatalf("varexpr unexpected ODOT") } // Be conservative. @@ -2468,25 +2469,25 @@ func returnsfromheap(params *types.Type) []*Node { // between the stack and the heap. The generated code is added to Curfn's // Enter and Exit lists. func heapmoves() { - lno := lineno - lineno = Curfn.Pos + lno := base.Pos + base.Pos = Curfn.Pos nn := paramstoheap(Curfn.Type.Recvs()) nn = append(nn, paramstoheap(Curfn.Type.Params())...) nn = append(nn, paramstoheap(Curfn.Type.Results())...) Curfn.Func.Enter.Append(nn...) - lineno = Curfn.Func.Endlineno + base.Pos = Curfn.Func.Endlineno Curfn.Func.Exit.Append(returnsfromheap(Curfn.Type.Results())...) - lineno = lno + base.Pos = lno } func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node { if fn.Type == nil || fn.Type.Etype != TFUNC { - Fatalf("mkcall %v %v", fn, fn.Type) + base.Fatalf("mkcall %v %v", fn, fn.Type) } n := fn.Type.NumParams() if n != len(va) { - Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va)) + base.Fatalf("vmkcall %v needs %v args got %v", fn, n, len(va)) } r := nod(OCALL, fn, nil) @@ -2552,12 +2553,12 @@ func byteindex(n *Node) *Node { func chanfn(name string, n int, t *types.Type) *Node { if !t.IsChan() { - Fatalf("chanfn %v", t) + base.Fatalf("chanfn %v", t) } fn := syslook(name) switch n { default: - Fatalf("chanfn %d", n) + base.Fatalf("chanfn %d", n) case 1: fn = substArgTypes(fn, t.Elem()) case 2: @@ -2568,7 +2569,7 @@ func chanfn(name string, n int, t *types.Type) *Node { func mapfn(name string, t *types.Type) *Node { if !t.IsMap() { - Fatalf("mapfn %v", t) + base.Fatalf("mapfn %v", t) } fn := syslook(name) fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key(), t.Elem()) @@ -2577,7 +2578,7 @@ func mapfn(name string, t *types.Type) *Node { func mapfndel(name string, t *types.Type) *Node { if !t.IsMap() { - Fatalf("mapfn %v", t) + base.Fatalf("mapfn %v", t) } fn := syslook(name) fn = substArgTypes(fn, t.Key(), t.Elem(), t.Key()) @@ -2618,7 +2619,7 @@ func mapfast(t *types.Type) int { if Widthptr == 4 { return mapfast32ptr } - Fatalf("small pointer %v", t.Key()) + base.Fatalf("small pointer %v", t.Key()) case AMEM64: if !t.Key().HasPointers() { return mapfast64 @@ -2645,7 +2646,7 @@ func addstr(n *Node, init *Nodes) *Node { c := n.List.Len() if c < 2 { - Fatalf("addstr count %d too small", c) + base.Fatalf("addstr count %d too small", c) } buf := nodnil() @@ -2784,7 +2785,7 @@ func appendslice(n *Node, init *Nodes) *Node { ptr1, len1 := nptr1.backingArrayPtrLen() ptr2, len2 := nptr2.backingArrayPtrLen() ncopy = mkcall1(fn, types.Types[TINT], &nodes, typename(elemtype), ptr1, len1, ptr2, len2) - } else if instrumenting && !Flag.CompilingRuntime { + } else if instrumenting && !base.Flag.CompilingRuntime { // rely on runtime to instrument: // copy(s[len(l1):], l2) // l2 can be a slice or string. @@ -2827,12 +2828,12 @@ func appendslice(n *Node, init *Nodes) *Node { // isAppendOfMake reports whether n is of the form append(x , make([]T, y)...). // isAppendOfMake assumes n has already been typechecked. func isAppendOfMake(n *Node) bool { - if Flag.N != 0 || instrumenting { + if base.Flag.N != 0 || instrumenting { return false } if n.Typecheck() == 0 { - Fatalf("missing typecheck: %+v", n) + base.Fatalf("missing typecheck: %+v", n) } if n.Op != OAPPEND || !n.IsDDD() || n.List.Len() != 2 { @@ -3036,7 +3037,7 @@ func walkappend(n *Node, init *Nodes, dst *Node) *Node { // General case, with no function calls left as arguments. // Leave for gen, except that instrumentation requires old form. - if !instrumenting || Flag.CompilingRuntime { + if !instrumenting || base.Flag.CompilingRuntime { return n } @@ -3185,7 +3186,7 @@ func eqfor(t *types.Type) (n *Node, needsize bool) { }) return n, false } - Fatalf("eqfor %v", t) + base.Fatalf("eqfor %v", t) return nil, false } @@ -3262,7 +3263,7 @@ func walkcompare(n *Node, init *Nodes) *Node { switch t.Etype { default: - if Debug.Libfuzzer != 0 && t.IsInteger() { + if base.Debug.Libfuzzer != 0 && t.IsInteger() { n.Left = cheapexpr(n.Left, init) n.Right = cheapexpr(n.Right, init) @@ -3304,7 +3305,7 @@ func walkcompare(n *Node, init *Nodes) *Node { } paramType = types.Types[TUINT64] default: - Fatalf("unexpected integer size %d for %v", t.Size(), t) + base.Fatalf("unexpected integer size %d for %v", t.Size(), t) } init.Append(mkcall(fn, nil, init, tracecmpArg(l, paramType, init), tracecmpArg(r, paramType, init))) } @@ -3329,7 +3330,7 @@ func walkcompare(n *Node, init *Nodes) *Node { if !inline { // eq algs take pointers; cmpl and cmpr must be addressable if !islvalue(cmpl) || !islvalue(cmpr) { - Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) + base.Fatalf("arguments of comparison must be lvalues - %v %v", cmpl, cmpr) } fn, needsize := eqfor(t) @@ -3722,7 +3723,7 @@ func usefield(n *Node) { switch n.Op { default: - Fatalf("usefield %v", n.Op) + base.Fatalf("usefield %v", n.Op) case ODOT, ODOTPTR: break @@ -3739,10 +3740,10 @@ func usefield(n *Node) { } field := n.Opt().(*types.Field) if field == nil { - Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) + base.Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym) } if field.Sym != n.Sym || field.Offset != n.Xoffset { - Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sym, n.Xoffset) + base.Fatalf("field inconsistency: %v,%v != %v,%v", field.Sym, field.Offset, n.Sym, n.Xoffset) } if !strings.Contains(field.Note, "go:\"track\"") { return @@ -3753,10 +3754,10 @@ func usefield(n *Node) { outer = outer.Elem() } if outer.Sym == nil { - yyerror("tracked field must be in named struct type") + base.Errorf("tracked field must be in named struct type") } if !types.IsExported(field.Sym.Name) { - yyerror("tracked field must be exported (upper case)") + base.Errorf("tracked field must be exported (upper case)") } sym := tracksym(outer, field) @@ -3968,7 +3969,7 @@ func substArgTypes(old *Node, types_ ...*types.Type) *Node { } n.Type = types.SubstAny(n.Type, &types_) if len(types_) > 0 { - Fatalf("substArgTypes: too many argument types") + base.Fatalf("substArgTypes: too many argument types") } return n } @@ -3991,17 +3992,17 @@ func canMergeLoads() bool { // isRuneCount reports whether n is of the form len([]rune(string)). // These are optimized into a call to runtime.countrunes. func isRuneCount(n *Node) bool { - return Flag.N == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES + return base.Flag.N == 0 && !instrumenting && n.Op == OLEN && n.Left.Op == OSTR2RUNES } func walkCheckPtrAlignment(n *Node, init *Nodes, count *Node) *Node { if !n.Type.IsPtr() { - Fatalf("expected pointer type: %v", n.Type) + base.Fatalf("expected pointer type: %v", n.Type) } elem := n.Type.Elem() if count != nil { if !elem.IsArray() { - Fatalf("expected array type: %v", elem) + base.Fatalf("expected array type: %v", elem) } elem = elem.Elem() } @@ -4031,7 +4032,7 @@ func walkCheckPtrArithmetic(n *Node, init *Nodes) *Node { } else if opt != nil { // We use n.Opt() here because today it's not used for OCONVNOP. If that changes, // there's no guarantee that temporarily replacing it is safe, so just hard fail here. - Fatalf("unexpected Opt: %v", opt) + base.Fatalf("unexpected Opt: %v", opt) } n.SetOpt(&walkCheckPtrArithmeticMarker) defer n.SetOpt(nil) @@ -4087,5 +4088,5 @@ func walkCheckPtrArithmetic(n *Node, init *Nodes) *Node { // function fn at a given level. See debugHelpFooter for defined // levels. func checkPtr(fn *Node, level int) bool { - return Debug.Checkptr >= level && fn.Func.Pragma&NoCheckPtr == 0 + return base.Debug.Checkptr >= level && fn.Func.Pragma&NoCheckPtr == 0 } diff --git a/src/cmd/compile/internal/mips/ggen.go b/src/cmd/compile/internal/mips/ggen.go index 5e867721c3..2356267df7 100644 --- a/src/cmd/compile/internal/mips/ggen.go +++ b/src/cmd/compile/internal/mips/ggen.go @@ -5,6 +5,7 @@ package mips import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/mips" @@ -18,7 +19,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog { } if cnt < int64(4*gc.Widthptr) { for i := int64(0); i < cnt; i += int64(gc.Widthptr) { - p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, gc.Ctxt.FixedFrameSize()+off+i) + p = pp.Appendpp(p, mips.AMOVW, obj.TYPE_REG, mips.REGZERO, 0, obj.TYPE_MEM, mips.REGSP, base.Ctxt.FixedFrameSize()+off+i) } } else { //fmt.Printf("zerorange frame:%v, lo: %v, hi:%v \n", frame ,lo, hi) @@ -28,7 +29,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog { // MOVW R0, (Widthptr)r1 // ADD $Widthptr, r1 // BNE r1, r2, loop - p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0) + p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-4, obj.TYPE_REG, mips.REGRT1, 0) p.Reg = mips.REGSP p = pp.Appendpp(p, mips.AADD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, mips.REGRT2, 0) p.Reg = mips.REGRT1 diff --git a/src/cmd/compile/internal/mips/ssa.go b/src/cmd/compile/internal/mips/ssa.go index 1d2e2c79e6..c37a2e0714 100644 --- a/src/cmd/compile/internal/mips/ssa.go +++ b/src/cmd/compile/internal/mips/ssa.go @@ -7,6 +7,7 @@ package mips import ( "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -766,8 +767,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpMIPSFPFlagTrue, ssa.OpMIPSFPFlagFalse: @@ -796,7 +797,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(mips.AMOVW) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() diff --git a/src/cmd/compile/internal/mips64/ssa.go b/src/cmd/compile/internal/mips64/ssa.go index 067b8158c9..a7c10d8869 100644 --- a/src/cmd/compile/internal/mips64/ssa.go +++ b/src/cmd/compile/internal/mips64/ssa.go @@ -7,6 +7,7 @@ package mips64 import ( "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -724,8 +725,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpMIPS64FPFlagTrue, ssa.OpMIPS64FPFlagFalse: @@ -757,7 +758,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(mips.AMOVV) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() diff --git a/src/cmd/compile/internal/ppc64/ggen.go b/src/cmd/compile/internal/ppc64/ggen.go index a5a772b491..8f5caf5f99 100644 --- a/src/cmd/compile/internal/ppc64/ggen.go +++ b/src/cmd/compile/internal/ppc64/ggen.go @@ -5,6 +5,7 @@ package ppc64 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/ppc64" @@ -16,17 +17,17 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog { } if cnt < int64(4*gc.Widthptr) { for i := int64(0); i < cnt; i += int64(gc.Widthptr) { - p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, gc.Ctxt.FixedFrameSize()+off+i) + p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_REG, ppc64.REGZERO, 0, obj.TYPE_MEM, ppc64.REGSP, base.Ctxt.FixedFrameSize()+off+i) } } else if cnt <= int64(128*gc.Widthptr) { - p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0) + p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGRT1, 0) p.Reg = ppc64.REGSP p = pp.Appendpp(p, obj.ADUFFZERO, obj.TYPE_NONE, 0, 0, obj.TYPE_MEM, 0, 0) p.To.Name = obj.NAME_EXTERN p.To.Sym = gc.Duffzero p.To.Offset = 4 * (128 - cnt/int64(gc.Widthptr)) } else { - p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, gc.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0) + p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, base.Ctxt.FixedFrameSize()+off-8, obj.TYPE_REG, ppc64.REGTMP, 0) p = pp.Appendpp(p, ppc64.AADD, obj.TYPE_REG, ppc64.REGTMP, 0, obj.TYPE_REG, ppc64.REGRT1, 0) p.Reg = ppc64.REGSP p = pp.Appendpp(p, ppc64.AMOVD, obj.TYPE_CONST, 0, cnt, obj.TYPE_REG, ppc64.REGTMP, 0) @@ -66,7 +67,7 @@ func ginsnopdefer(pp *gc.Progs) *obj.Prog { // on ppc64 in both shared and non-shared modes. ginsnop(pp) - if gc.Ctxt.Flag_shared { + if base.Ctxt.Flag_shared { p := pp.Prog(ppc64.AMOVD) p.From.Type = obj.TYPE_MEM p.From.Offset = 24 diff --git a/src/cmd/compile/internal/ppc64/ssa.go b/src/cmd/compile/internal/ppc64/ssa.go index f0e7c41923..e3f0ee1a93 100644 --- a/src/cmd/compile/internal/ppc64/ssa.go +++ b/src/cmd/compile/internal/ppc64/ssa.go @@ -5,6 +5,7 @@ package ppc64 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -473,7 +474,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(ppc64.AMOVD) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() @@ -1784,7 +1785,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // Insert a hint this is not a subroutine return. pp.SetFrom3(obj.Addr{Type: obj.TYPE_CONST, Offset: 1}) - if gc.Ctxt.Flag_shared { + if base.Ctxt.Flag_shared { // When compiling Go into PIC, the function we just // called via pointer might have been implemented in // a separate module and so overwritten the TOC @@ -1852,8 +1853,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } // These should be resolved by rules and not make it here. diff --git a/src/cmd/compile/internal/riscv64/ggen.go b/src/cmd/compile/internal/riscv64/ggen.go index f7c03fe7c2..18905a4aea 100644 --- a/src/cmd/compile/internal/riscv64/ggen.go +++ b/src/cmd/compile/internal/riscv64/ggen.go @@ -5,6 +5,7 @@ package riscv64 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/riscv" @@ -16,7 +17,7 @@ func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog { } // Adjust the frame to account for LR. - off += gc.Ctxt.FixedFrameSize() + off += base.Ctxt.FixedFrameSize() if cnt < int64(4*gc.Widthptr) { for i := int64(0); i < cnt; i += int64(gc.Widthptr) { diff --git a/src/cmd/compile/internal/riscv64/ssa.go b/src/cmd/compile/internal/riscv64/ssa.go index d49927ee04..5a71b33c00 100644 --- a/src/cmd/compile/internal/riscv64/ssa.go +++ b/src/cmd/compile/internal/riscv64/ssa.go @@ -5,6 +5,7 @@ package riscv64 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/ssa" "cmd/compile/internal/types" @@ -91,7 +92,7 @@ func loadByType(t *types.Type) obj.As { case 8: return riscv.AMOVD default: - gc.Fatalf("unknown float width for load %d in type %v", width, t) + base.Fatalf("unknown float width for load %d in type %v", width, t) return 0 } } @@ -118,7 +119,7 @@ func loadByType(t *types.Type) obj.As { case 8: return riscv.AMOV default: - gc.Fatalf("unknown width for load %d in type %v", width, t) + base.Fatalf("unknown width for load %d in type %v", width, t) return 0 } } @@ -134,7 +135,7 @@ func storeByType(t *types.Type) obj.As { case 8: return riscv.AMOVD default: - gc.Fatalf("unknown float width for store %d in type %v", width, t) + base.Fatalf("unknown float width for store %d in type %v", width, t) return 0 } } @@ -149,7 +150,7 @@ func storeByType(t *types.Type) obj.As { case 8: return riscv.AMOV default: - gc.Fatalf("unknown width for store %d in type %v", width, t) + base.Fatalf("unknown width for store %d in type %v", width, t) return 0 } } @@ -586,8 +587,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { gc.AddAux(&p.From, v) p.To.Type = obj.TYPE_REG p.To.Reg = riscv.REG_ZERO - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos == 1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpRISCV64LoweredGetClosurePtr: @@ -598,7 +599,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(riscv.AMOV) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() diff --git a/src/cmd/compile/internal/s390x/ggen.go b/src/cmd/compile/internal/s390x/ggen.go index 5a837d8574..0e2f48bf4c 100644 --- a/src/cmd/compile/internal/s390x/ggen.go +++ b/src/cmd/compile/internal/s390x/ggen.go @@ -5,6 +5,7 @@ package s390x import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj" "cmd/internal/obj/s390x" @@ -23,7 +24,7 @@ func zerorange(pp *gc.Progs, p *obj.Prog, off, cnt int64, _ *uint32) *obj.Prog { } // Adjust the frame to account for LR. - off += gc.Ctxt.FixedFrameSize() + off += base.Ctxt.FixedFrameSize() reg := int16(s390x.REGSP) // If the off cannot fit in a 12-bit unsigned displacement then we diff --git a/src/cmd/compile/internal/s390x/ssa.go b/src/cmd/compile/internal/s390x/ssa.go index cb13f8d3c0..366adffd98 100644 --- a/src/cmd/compile/internal/s390x/ssa.go +++ b/src/cmd/compile/internal/s390x/ssa.go @@ -7,6 +7,7 @@ package s390x import ( "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -573,7 +574,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is FixedFrameSize below the address of the first arg p := s.Prog(s390x.AMOVD) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() + p.From.Offset = -base.Ctxt.FixedFrameSize() p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() @@ -642,8 +643,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpS390XMVC: vo := v.AuxValAndOff() diff --git a/src/cmd/compile/internal/wasm/ssa.go b/src/cmd/compile/internal/wasm/ssa.go index 3f05515b9a..373dc431e5 100644 --- a/src/cmd/compile/internal/wasm/ssa.go +++ b/src/cmd/compile/internal/wasm/ssa.go @@ -5,6 +5,7 @@ package wasm import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -33,7 +34,7 @@ func zeroRange(pp *gc.Progs, p *obj.Prog, off, cnt int64, state *uint32) *obj.Pr return p } if cnt%8 != 0 { - gc.Fatalf("zerorange count not a multiple of widthptr %d", cnt) + base.Fatalf("zerorange count not a multiple of widthptr %d", cnt) } for i := int64(0); i < cnt; i += 8 { @@ -165,8 +166,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpWasmLoweredWB: diff --git a/src/cmd/compile/internal/x86/galign.go b/src/cmd/compile/internal/x86/galign.go index e137daa3fc..7d628f9b7c 100644 --- a/src/cmd/compile/internal/x86/galign.go +++ b/src/cmd/compile/internal/x86/galign.go @@ -5,6 +5,7 @@ package x86 import ( + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/internal/obj/x86" "cmd/internal/objabi" @@ -24,10 +25,10 @@ func Init(arch *gc.Arch) { arch.SoftFloat = true case "387": fmt.Fprintf(os.Stderr, "unsupported setting GO386=387. Consider using GO386=softfloat instead.\n") - gc.Exit(1) + base.Exit(1) default: fmt.Fprintf(os.Stderr, "unsupported setting GO386=%s\n", v) - gc.Exit(1) + base.Exit(1) } diff --git a/src/cmd/compile/internal/x86/ssa.go b/src/cmd/compile/internal/x86/ssa.go index 65d7e75a53..a3aaf03c95 100644 --- a/src/cmd/compile/internal/x86/ssa.go +++ b/src/cmd/compile/internal/x86/ssa.go @@ -8,6 +8,7 @@ import ( "fmt" "math" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/logopt" "cmd/compile/internal/ssa" @@ -480,9 +481,9 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { p.From.Name = obj.NAME_EXTERN f := math.Float64frombits(uint64(v.AuxInt)) if v.Op == ssa.Op386MOVSDconst1 { - p.From.Sym = gc.Ctxt.Float64Sym(f) + p.From.Sym = base.Ctxt.Float64Sym(f) } else { - p.From.Sym = gc.Ctxt.Float32Sym(float32(f)) + p.From.Sym = base.Ctxt.Float32Sym(float32(f)) } p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() @@ -713,7 +714,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { r := v.Reg() // See the comments in cmd/internal/obj/x86/obj6.go // near CanUse1InsnTLS for a detailed explanation of these instructions. - if x86.CanUse1InsnTLS(gc.Ctxt) { + if x86.CanUse1InsnTLS(base.Ctxt) { // MOVL (TLS), r p := s.Prog(x86.AMOVL) p.From.Type = obj.TYPE_MEM @@ -749,7 +750,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { // caller's SP is the address of the first arg p := s.Prog(x86.AMOVL) p.From.Type = obj.TYPE_ADDR - p.From.Offset = -gc.Ctxt.FixedFrameSize() // 0 on 386, just to be consistent with other architectures + p.From.Offset = -base.Ctxt.FixedFrameSize() // 0 on 386, just to be consistent with other architectures p.From.Name = obj.NAME_PARAM p.To.Type = obj.TYPE_REG p.To.Reg = v.Reg() @@ -850,8 +851,8 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) { if logopt.Enabled() { logopt.LogOpt(v.Pos, "nilcheck", "genssa", v.Block.Func.Name) } - if gc.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers - gc.Warnl(v.Pos, "generated nil check") + if base.Debug.Nil != 0 && v.Pos.Line() > 1 { // v.Pos.Line()==1 in generated wrappers + base.WarnfAt(v.Pos, "generated nil check") } case ssa.OpClobber: p := s.Prog(x86.AMOVL) diff --git a/src/cmd/compile/main.go b/src/cmd/compile/main.go index 3aa64a5ce2..5a33719d87 100644 --- a/src/cmd/compile/main.go +++ b/src/cmd/compile/main.go @@ -8,6 +8,7 @@ import ( "cmd/compile/internal/amd64" "cmd/compile/internal/arm" "cmd/compile/internal/arm64" + "cmd/compile/internal/base" "cmd/compile/internal/gc" "cmd/compile/internal/mips" "cmd/compile/internal/mips64" @@ -50,5 +51,5 @@ func main() { } gc.Main(archInit) - gc.Exit(0) + base.Exit(0) } diff --git a/src/cmd/dist/buildtool.go b/src/cmd/dist/buildtool.go index e39f284db5..f8e1f2f951 100644 --- a/src/cmd/dist/buildtool.go +++ b/src/cmd/dist/buildtool.go @@ -38,6 +38,7 @@ var bootstrapDirs = []string{ "cmd/cgo", "cmd/compile", "cmd/compile/internal/amd64", + "cmd/compile/internal/base", "cmd/compile/internal/arm", "cmd/compile/internal/arm64", "cmd/compile/internal/gc", @@ -72,6 +73,7 @@ var bootstrapDirs = []string{ "cmd/internal/sys", "cmd/link", "cmd/link/internal/amd64", + "cmd/compile/internal/base", "cmd/link/internal/arm", "cmd/link/internal/arm64", "cmd/link/internal/benchmark",