We ran 'go mod vendor' to pull in the newly used packages.
Also, add a cmd/go script test that minimally
exercises each analyzer, analogous to the cmd/vet test.
For #75266
For #75267
For #71859
Change-Id: I334daea048e3d2f614a1788292a3175acf173932
Reviewed-on: https://go-review.googlesource.com/c/go/+/710995
Reviewed-by: Michael Matloob <matloob@golang.org>
Auto-Submit: Alan Donovan <adonovan@google.com>
TryBot-Bypass: Alan Donovan <adonovan@google.com>
Reviewed-by: Michael Matloob <matloob@google.com>
import (
"cmd/internal/objabi"
"cmd/internal/telemetry/counter"
+ "slices"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildtag"
"golang.org/x/tools/go/analysis/passes/hostport"
+ "golang.org/x/tools/go/analysis/passes/inline"
+ "golang.org/x/tools/go/analysis/passes/modernize"
"golang.org/x/tools/go/analysis/unitchecker"
)
unitchecker.Main(suite...) // (never returns)
}
-// The fix suite analyzers produce fixes that are safe to apply.
-// (Diagnostics may not describe actual problems,
-// but their fixes must be unambiguously safe to apply.)
-var suite = []*analysis.Analyzer{
- buildtag.Analyzer,
- hostport.Analyzer,
- // TODO(adonovan): now the modernize (proposal #75266) and
- // inline (proposal #75267) analyzers are published, revendor
- // x/tools and add them here.
- //
- // TODO(adonovan):add any other vet analyzers whose fixes are always safe.
+// The fix suite analyzers produce fixes are unambiguously safe to apply,
+// even if the diagnostics might not describe actual problems.
+var suite = slices.Concat(
+ []*analysis.Analyzer{
+ buildtag.Analyzer,
+ hostport.Analyzer,
+ inline.Analyzer,
+ },
+ modernize.Suite,
+ // TODO(adonovan): add any other vet analyzers whose fixes are always safe.
// Candidates to audit: sigchanyzer, printf, assign, unreachable.
+ // Many of staticcheck's analyzers would make good candidates
+ // (e.g. rewriting WriteString(fmt.Sprintf()) to Fprintf.)
// Rejected:
// - composites: some types (e.g. PointXY{1,2}) don't want field names.
// - timeformat: flipping MM/DD is a behavior change, but the code
// could potentially be a workaround for another bug.
// - stringintconv: offers two fixes, user input required to choose.
// - fieldalignment: poor signal/noise; fix could be a regression.
-}
+)
--- /dev/null
+# Elementary test of each analyzer in the "go fix" suite.
+# This is simply to prove that they are running at all;
+# detailed behavior is tested in x/tools.
+#
+# Each assertion matches the expected diff.
+#
+# Tip: to see the actual stdout,
+# temporarily prefix the go command with "! ".
+
+go fix -diff example.com/x
+
+# buildtag
+stdout '-// \+build go1.26'
+
+# hostport
+stdout 'net.Dial.*net.JoinHostPort'
+
+# inline
+stdout 'var three = 1 \+ 2'
+
+# newexpr (proxy for whole modernize suite)
+stdout 'var _ = new\(123\)'
+
+-- go.mod --
+module example.com/x
+go 1.26
+
+-- x.go --
+//go:build go1.26
+// +build go1.26
+
+// ↑ buildtag
+
+package x
+
+import (
+ "fmt"
+ "net"
+)
+
+// hostport
+var s string
+var _, _ = net.Dial("tcp", fmt.Sprintf("%s:%d", s, 80))
+
+//go:fix inline
+func add(x, y int) int { return x + y }
+
+// inline
+var three = add(1, 2)
+
+// newexpr
+func varOf(x int) *int { return &x }
+var _ = varOf(123)
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package inline defines an analyzer that inlines calls to functions
+and uses of constants marked with a "//go:fix inline" directive.
+
+# Analyzer inline
+
+inline: apply fixes based on 'go:fix inline' comment directives
+
+The inline analyzer inlines functions and constants that are marked for inlining.
+
+## Functions
+
+Given a function that is marked for inlining, like this one:
+
+ //go:fix inline
+ func Square(x int) int { return Pow(x, 2) }
+
+this analyzer will recommend that calls to the function elsewhere, in the same
+or other packages, should be inlined.
+
+Inlining can be used to move off of a deprecated function:
+
+ // Deprecated: prefer Pow(x, 2).
+ //go:fix inline
+ func Square(x int) int { return Pow(x, 2) }
+
+It can also be used to move off of an obsolete package,
+as when the import path has changed or a higher major version is available:
+
+ package pkg
+
+ import pkg2 "pkg/v2"
+
+ //go:fix inline
+ func F() { pkg2.F(nil) }
+
+Replacing a call pkg.F() by pkg2.F(nil) can have no effect on the program,
+so this mechanism provides a low-risk way to update large numbers of calls.
+We recommend, where possible, expressing the old API in terms of the new one
+to enable automatic migration.
+
+The inliner takes care to avoid behavior changes, even subtle ones,
+such as changes to the order in which argument expressions are
+evaluated. When it cannot safely eliminate all parameter variables,
+it may introduce a "binding declaration" of the form
+
+ var params = args
+
+to evaluate argument expressions in the correct order and bind them to
+parameter variables. Since the resulting code transformation may be
+stylistically suboptimal, such inlinings may be disabled by specifying
+the -inline.allow_binding_decl=false flag to the analyzer driver.
+
+(In cases where it is not safe to "reduce" a call—that is, to replace
+a call f(x) by the body of function f, suitably substituted—the
+inliner machinery is capable of replacing f by a function literal,
+func(){...}(). However, the inline analyzer discards all such
+"literalizations" unconditionally, again on grounds of style.)
+
+## Constants
+
+Given a constant that is marked for inlining, like this one:
+
+ //go:fix inline
+ const Ptr = Pointer
+
+this analyzer will recommend that uses of Ptr should be replaced with Pointer.
+
+As with functions, inlining can be used to replace deprecated constants and
+constants in obsolete packages.
+
+A constant definition can be marked for inlining only if it refers to another
+named constant.
+
+The "//go:fix inline" comment must appear before a single const declaration on its own,
+as above; before a const declaration that is part of a group, as in this case:
+
+ const (
+ C = 1
+ //go:fix inline
+ Ptr = Pointer
+ )
+
+or before a group, applying to every constant in the group:
+
+ //go:fix inline
+ const (
+ Ptr = Pointer
+ Val = Value
+ )
+
+The proposal https://go.dev/issue/32816 introduces the "//go:fix inline" directives.
+
+You can use this command to apply inline fixes en masse:
+
+ $ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./...
+
+# Analyzer gofixdirective
+
+gofixdirective: validate uses of //go:fix comment directives
+
+The gofixdirective analyzer checks "//go:fix inline" directives for correctness.
+See the documentation for the gofix analyzer for more about "/go:fix inline".
+*/
+package inline
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "slices"
+ "strings"
+
+ _ "embed"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/analysis/passes/internal/gofixdirective"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/packagepath"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/refactor/inline"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+//go:embed doc.go
+var doc string
+
+var Analyzer = &analysis.Analyzer{
+ Name: "inline",
+ Doc: analysisinternal.MustExtractDoc(doc, "inline"),
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inline",
+ Run: run,
+ FactTypes: []analysis.Fact{
+ (*goFixInlineFuncFact)(nil),
+ (*goFixInlineConstFact)(nil),
+ (*goFixInlineAliasFact)(nil),
+ },
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+}
+
+var allowBindingDecl bool
+
+func init() {
+ Analyzer.Flags.BoolVar(&allowBindingDecl, "allow_binding_decl", false,
+ "permit inlinings that require a 'var params = args' declaration")
+}
+
+// analyzer holds the state for this analysis.
+type analyzer struct {
+ pass *analysis.Pass
+ root inspector.Cursor
+ // memoization of repeated calls for same file.
+ fileContent map[string][]byte
+ // memoization of fact imports (nil => no fact)
+ inlinableFuncs map[*types.Func]*inline.Callee
+ inlinableConsts map[*types.Const]*goFixInlineConstFact
+ inlinableAliases map[*types.TypeName]*goFixInlineAliasFact
+}
+
+func run(pass *analysis.Pass) (any, error) {
+ a := &analyzer{
+ pass: pass,
+ root: pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Root(),
+ fileContent: make(map[string][]byte),
+ inlinableFuncs: make(map[*types.Func]*inline.Callee),
+ inlinableConsts: make(map[*types.Const]*goFixInlineConstFact),
+ inlinableAliases: make(map[*types.TypeName]*goFixInlineAliasFact),
+ }
+ gofixdirective.Find(pass, a.root, a)
+ a.inline()
+ return nil, nil
+}
+
+// HandleFunc exports a fact for functions marked with go:fix.
+func (a *analyzer) HandleFunc(decl *ast.FuncDecl) {
+ content, err := a.readFile(decl)
+ if err != nil {
+ a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err)
+ return
+ }
+ callee, err := inline.AnalyzeCallee(discard, a.pass.Fset, a.pass.Pkg, a.pass.TypesInfo, decl, content)
+ if err != nil {
+ a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err)
+ return
+ }
+ fn := a.pass.TypesInfo.Defs[decl.Name].(*types.Func)
+ a.pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee})
+ a.inlinableFuncs[fn] = callee
+}
+
+// HandleAlias exports a fact for aliases marked with go:fix.
+func (a *analyzer) HandleAlias(spec *ast.TypeSpec) {
+ // Remember that this is an inlinable alias.
+ typ := &goFixInlineAliasFact{}
+ lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName)
+ a.inlinableAliases[lhs] = typ
+ // Create a fact only if the LHS is exported and defined at top level.
+ // We create a fact even if the RHS is non-exported,
+ // so we can warn about uses in other packages.
+ if lhs.Exported() && typesinternal.IsPackageLevel(lhs) {
+ a.pass.ExportObjectFact(lhs, typ)
+ }
+}
+
+// HandleConst exports a fact for constants marked with go:fix.
+func (a *analyzer) HandleConst(nameIdent, rhsIdent *ast.Ident) {
+ lhs := a.pass.TypesInfo.Defs[nameIdent].(*types.Const)
+ rhs := a.pass.TypesInfo.Uses[rhsIdent].(*types.Const) // must be so in a well-typed program
+ con := &goFixInlineConstFact{
+ RHSName: rhs.Name(),
+ RHSPkgName: rhs.Pkg().Name(),
+ RHSPkgPath: rhs.Pkg().Path(),
+ }
+ if rhs.Pkg() == a.pass.Pkg {
+ con.rhsObj = rhs
+ }
+ a.inlinableConsts[lhs] = con
+ // Create a fact only if the LHS is exported and defined at top level.
+ // We create a fact even if the RHS is non-exported,
+ // so we can warn about uses in other packages.
+ if lhs.Exported() && typesinternal.IsPackageLevel(lhs) {
+ a.pass.ExportObjectFact(lhs, con)
+ }
+}
+
+// inline inlines each static call to an inlinable function
+// and each reference to an inlinable constant or type alias.
+//
+// TODO(adonovan): handle multiple diffs that each add the same import.
+func (a *analyzer) inline() {
+ for cur := range a.root.Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) {
+ switch n := cur.Node().(type) {
+ case *ast.CallExpr:
+ a.inlineCall(n, cur)
+
+ case *ast.Ident:
+ switch t := a.pass.TypesInfo.Uses[n].(type) {
+ case *types.TypeName:
+ a.inlineAlias(t, cur)
+ case *types.Const:
+ a.inlineConst(t, cur)
+ }
+ }
+ }
+}
+
+// If call is a call to an inlinable func, suggest inlining its use at cur.
+func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
+ if fn := typeutil.StaticCallee(a.pass.TypesInfo, call); fn != nil {
+ // Inlinable?
+ callee, ok := a.inlinableFuncs[fn]
+ if !ok {
+ var fact goFixInlineFuncFact
+ if a.pass.ImportObjectFact(fn, &fact) {
+ callee = fact.Callee
+ a.inlinableFuncs[fn] = callee
+ }
+ }
+ if callee == nil {
+ return // nope
+ }
+
+ // Inline the call.
+ content, err := a.readFile(call)
+ if err != nil {
+ a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err)
+ return
+ }
+ curFile := astutil.EnclosingFile(cur)
+ caller := &inline.Caller{
+ Fset: a.pass.Fset,
+ Types: a.pass.Pkg,
+ Info: a.pass.TypesInfo,
+ File: curFile,
+ Call: call,
+ Content: content,
+ }
+ res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard})
+ if err != nil {
+ a.pass.Reportf(call.Lparen, "%v", err)
+ return
+ }
+
+ if res.Literalized {
+ // Users are not fond of inlinings that literalize
+ // f(x) to func() { ... }(), so avoid them.
+ //
+ // (Unfortunately the inliner is very timid,
+ // and often literalizes when it cannot prove that
+ // reducing the call is safe; the user of this tool
+ // has no indication of what the problem is.)
+ return
+ }
+ if res.BindingDecl && !allowBindingDecl {
+ // When applying fix en masse, users are similarly
+ // unenthusiastic about inlinings that cannot
+ // entirely eliminate the parameters and
+ // insert a 'var params = args' declaration.
+ // The flag allows them to decline such fixes.
+ return
+ }
+ got := res.Content
+
+ // Suggest the "fix".
+ var textEdits []analysis.TextEdit
+ for _, edit := range diff.Bytes(content, got) {
+ textEdits = append(textEdits, analysis.TextEdit{
+ Pos: curFile.FileStart + token.Pos(edit.Start),
+ End: curFile.FileStart + token.Pos(edit.End),
+ NewText: []byte(edit.New),
+ })
+ }
+ a.pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: fmt.Sprintf("Call of %v should be inlined", callee),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Inline call of %v", callee),
+ TextEdits: textEdits,
+ }},
+ })
+ }
+}
+
+// If tn is the TypeName of an inlinable alias, suggest inlining its use at cur.
+func (a *analyzer) inlineAlias(tn *types.TypeName, curId inspector.Cursor) {
+ inalias, ok := a.inlinableAliases[tn]
+ if !ok {
+ var fact goFixInlineAliasFact
+ if a.pass.ImportObjectFact(tn, &fact) {
+ inalias = &fact
+ a.inlinableAliases[tn] = inalias
+ }
+ }
+ if inalias == nil {
+ return // nope
+ }
+
+ alias := tn.Type().(*types.Alias)
+ // Remember the names of the alias's type params. When we check for shadowing
+ // later, we'll ignore these because they won't appear in the replacement text.
+ typeParamNames := map[*types.TypeName]bool{}
+ for tp := range alias.TypeParams().TypeParams() {
+ typeParamNames[tp.Obj()] = true
+ }
+ rhs := alias.Rhs()
+ curPath := a.pass.Pkg.Path()
+ curFile := astutil.EnclosingFile(curId)
+ id := curId.Node().(*ast.Ident)
+ // We have an identifier A here (n), possibly qualified by a package
+ // identifier (sel.n), and an inlinable "type A = rhs" elsewhere.
+ //
+ // We can replace A with rhs if no name in rhs is shadowed at n's position,
+ // and every package in rhs is importable by the current package.
+
+ var (
+ importPrefixes = map[string]string{curPath: ""} // from pkg path to prefix
+ edits []analysis.TextEdit
+ )
+ for _, tn := range typenames(rhs) {
+ // Ignore the type parameters of the alias: they won't appear in the result.
+ if typeParamNames[tn] {
+ continue
+ }
+ var pkgPath, pkgName string
+ if pkg := tn.Pkg(); pkg != nil {
+ pkgPath = pkg.Path()
+ pkgName = pkg.Name()
+ }
+ if pkgPath == "" || pkgPath == curPath {
+ // The name is in the current package or the universe scope, so no import
+ // is required. Check that it is not shadowed (that is, that the type
+ // it refers to in rhs is the same one it refers to at n).
+ scope := a.pass.TypesInfo.Scopes[curFile].Innermost(id.Pos()) // n's scope
+ _, obj := scope.LookupParent(tn.Name(), id.Pos()) // what qn.name means in n's scope
+ if obj != tn {
+ return
+ }
+ } else if !packagepath.CanImport(a.pass.Pkg.Path(), pkgPath) {
+ // If this package can't see the package of this part of rhs, we can't inline.
+ return
+ } else if _, ok := importPrefixes[pkgPath]; !ok {
+ // Use AddImport to add pkgPath if it's not there already. Associate the prefix it assigns
+ // with the package path for use by the TypeString qualifier below.
+ prefix, eds := refactor.AddImport(
+ a.pass.TypesInfo, curFile, pkgName, pkgPath, tn.Name(), id.Pos())
+ importPrefixes[pkgPath] = strings.TrimSuffix(prefix, ".")
+ edits = append(edits, eds...)
+ }
+ }
+ // Find the complete identifier, which may take any of these forms:
+ // Id
+ // Id[T]
+ // Id[K, V]
+ // pkg.Id
+ // pkg.Id[T]
+ // pkg.Id[K, V]
+ var expr ast.Expr = id
+ if astutil.IsChildOf(curId, edge.SelectorExpr_Sel) {
+ curId = curId.Parent()
+ expr = curId.Node().(ast.Expr)
+ }
+ // If expr is part of an IndexExpr or IndexListExpr, we'll need that node.
+ // Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated.
+ switch ek, _ := curId.ParentEdge(); ek {
+ case edge.IndexExpr_X:
+ expr = curId.Parent().Node().(*ast.IndexExpr)
+ case edge.IndexListExpr_X:
+ expr = curId.Parent().Node().(*ast.IndexListExpr)
+ }
+ t := a.pass.TypesInfo.TypeOf(expr).(*types.Alias) // type of entire identifier
+ if targs := t.TypeArgs(); targs.Len() > 0 {
+ // Instantiate the alias with the type args from this use.
+ // For example, given type A = M[K, V], compute the type of the use
+ // A[int, Foo] as M[int, Foo].
+ // Don't validate instantiation: it can't panic unless we have a bug,
+ // in which case seeing the stack trace via telemetry would be helpful.
+ instAlias, _ := types.Instantiate(nil, alias, slices.Collect(targs.Types()), false)
+ rhs = instAlias.(*types.Alias).Rhs()
+ }
+ // To get the replacement text, render the alias RHS using the package prefixes
+ // we assigned above.
+ newText := types.TypeString(rhs, func(p *types.Package) string {
+ if p == a.pass.Pkg {
+ return ""
+ }
+ if prefix, ok := importPrefixes[p.Path()]; ok {
+ return prefix
+ }
+ panic(fmt.Sprintf("in %q, package path %q has no import prefix", rhs, p.Path()))
+ })
+ a.reportInline("type alias", "Type alias", expr, edits, newText)
+}
+
+// typenames returns the TypeNames for types within t (including t itself) that have
+// them: basic types, named types and alias types.
+// The same name may appear more than once.
+func typenames(t types.Type) []*types.TypeName {
+ var tns []*types.TypeName
+
+ var visit func(types.Type)
+ visit = func(t types.Type) {
+ if hasName, ok := t.(interface{ Obj() *types.TypeName }); ok {
+ tns = append(tns, hasName.Obj())
+ }
+ switch t := t.(type) {
+ case *types.Basic:
+ tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName))
+ case *types.Named:
+ for t := range t.TypeArgs().Types() {
+ visit(t)
+ }
+ case *types.Alias:
+ for t := range t.TypeArgs().Types() {
+ visit(t)
+ }
+ case *types.TypeParam:
+ tns = append(tns, t.Obj())
+ case *types.Pointer:
+ visit(t.Elem())
+ case *types.Slice:
+ visit(t.Elem())
+ case *types.Array:
+ visit(t.Elem())
+ case *types.Chan:
+ visit(t.Elem())
+ case *types.Map:
+ visit(t.Key())
+ visit(t.Elem())
+ case *types.Struct:
+ for i := range t.NumFields() {
+ visit(t.Field(i).Type())
+ }
+ case *types.Signature:
+ // Ignore the receiver: although it may be present, it has no meaning
+ // in a type expression.
+ // Ditto for receiver type params.
+ // Also, function type params cannot appear in a type expression.
+ if t.TypeParams() != nil {
+ panic("Signature.TypeParams in type expression")
+ }
+ visit(t.Params())
+ visit(t.Results())
+ case *types.Interface:
+ for i := range t.NumEmbeddeds() {
+ visit(t.EmbeddedType(i))
+ }
+ for i := range t.NumExplicitMethods() {
+ visit(t.ExplicitMethod(i).Type())
+ }
+ case *types.Tuple:
+ for v := range t.Variables() {
+ visit(v.Type())
+ }
+ case *types.Union:
+ panic("Union in type expression")
+ default:
+ panic(fmt.Sprintf("unknown type %T", t))
+ }
+ }
+
+ visit(t)
+
+ return tns
+}
+
+// If con is an inlinable constant, suggest inlining its use at cur.
+func (a *analyzer) inlineConst(con *types.Const, cur inspector.Cursor) {
+ incon, ok := a.inlinableConsts[con]
+ if !ok {
+ var fact goFixInlineConstFact
+ if a.pass.ImportObjectFact(con, &fact) {
+ incon = &fact
+ a.inlinableConsts[con] = incon
+ }
+ }
+ if incon == nil {
+ return // nope
+ }
+
+ // If n is qualified by a package identifier, we'll need the full selector expression.
+ curFile := astutil.EnclosingFile(cur)
+ n := cur.Node().(*ast.Ident)
+
+ // We have an identifier A here (n), possibly qualified by a package identifier (sel.X,
+ // where sel is the parent of n), // and an inlinable "const A = B" elsewhere (incon).
+ // Consider replacing A with B.
+
+ // Check that the expression we are inlining (B) means the same thing
+ // (refers to the same object) in n's scope as it does in A's scope.
+ // If the RHS is not in the current package, AddImport will handle
+ // shadowing, so we only need to worry about when both expressions
+ // are in the current package.
+ if a.pass.Pkg.Path() == incon.RHSPkgPath {
+ // incon.rhsObj is the object referred to by B in the definition of A.
+ scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope
+ _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope
+ if obj == nil {
+ // Should be impossible: if code at n can refer to the LHS,
+ // it can refer to the RHS.
+ panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, incon.RHSName))
+ }
+ if obj != incon.rhsObj {
+ // "B" means something different here than at the inlinable const's scope.
+ return
+ }
+ } else if !packagepath.CanImport(a.pass.Pkg.Path(), incon.RHSPkgPath) {
+ // If this package can't see the RHS's package, we can't inline.
+ return
+ }
+ var (
+ importPrefix string
+ edits []analysis.TextEdit
+ )
+ if incon.RHSPkgPath != a.pass.Pkg.Path() {
+ importPrefix, edits = refactor.AddImport(
+ a.pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos())
+ }
+ // If n is qualified by a package identifier, we'll need the full selector expression.
+ var expr ast.Expr = n
+ if astutil.IsChildOf(cur, edge.SelectorExpr_Sel) {
+ expr = cur.Parent().Node().(ast.Expr)
+ }
+ a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName)
+}
+
+// reportInline reports a diagnostic for fixing an inlinable name.
+func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) {
+ edits = append(edits, analysis.TextEdit{
+ Pos: ident.Pos(),
+ End: ident.End(),
+ NewText: []byte(newText),
+ })
+ name := astutil.Format(a.pass.Fset, ident)
+ a.pass.Report(analysis.Diagnostic{
+ Pos: ident.Pos(),
+ End: ident.End(),
+ Message: fmt.Sprintf("%s %s should be inlined", capKind, name),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Inline %s %s", kind, name),
+ TextEdits: edits,
+ }},
+ })
+}
+
+func (a *analyzer) readFile(node ast.Node) ([]byte, error) {
+ filename := a.pass.Fset.File(node.Pos()).Name()
+ content, ok := a.fileContent[filename]
+ if !ok {
+ var err error
+ content, err = a.pass.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ a.fileContent[filename] = content
+ }
+ return content, nil
+}
+
+// A goFixInlineFuncFact is exported for each function marked "//go:fix inline".
+// It holds information about the callee to support inlining.
+type goFixInlineFuncFact struct{ Callee *inline.Callee }
+
+func (f *goFixInlineFuncFact) String() string { return "goFixInline " + f.Callee.String() }
+func (*goFixInlineFuncFact) AFact() {}
+
+// A goFixInlineConstFact is exported for each constant marked "//go:fix inline".
+// It holds information about an inlinable constant. Gob-serializable.
+type goFixInlineConstFact struct {
+ // Information about "const LHSName = RHSName".
+ RHSName string
+ RHSPkgPath string
+ RHSPkgName string
+ rhsObj types.Object // for current package
+}
+
+func (c *goFixInlineConstFact) String() string {
+ return fmt.Sprintf("goFixInline const %q.%s", c.RHSPkgPath, c.RHSName)
+}
+
+func (*goFixInlineConstFact) AFact() {}
+
+// A goFixInlineAliasFact is exported for each type alias marked "//go:fix inline".
+// It holds no information; its mere existence demonstrates that an alias is inlinable.
+type goFixInlineAliasFact struct{}
+
+func (c *goFixInlineAliasFact) String() string { return "goFixInline alias" }
+func (*goFixInlineAliasFact) AFact() {}
+
+func discard(string, ...any) {}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gofixdirective searches for and validates go:fix directives. The
+// go/analysis/passes/inline package uses findgofix to perform inlining.
+// The go/analysis/passes/gofix package uses findgofix to check for problems
+// with go:fix directives.
+//
+// gofixdirective is separate from gofix to avoid depending on refactor/inline,
+// which is large.
+package gofixdirective
+
+// This package is tested by go/analysis/passes/inline.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/inspector"
+ internalastutil "golang.org/x/tools/internal/astutil"
+)
+
+// A Handler handles language entities with go:fix directives.
+type Handler interface {
+ HandleFunc(*ast.FuncDecl)
+ HandleAlias(*ast.TypeSpec)
+ HandleConst(name, rhs *ast.Ident)
+}
+
+// Find finds functions and constants annotated with an appropriate "//go:fix"
+// comment (the syntax proposed by #32816), and calls handler methods for each one.
+// h may be nil.
+func Find(pass *analysis.Pass, root inspector.Cursor, h Handler) {
+ for cur := range root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) {
+ switch decl := cur.Node().(type) {
+ case *ast.FuncDecl:
+ findFunc(decl, h)
+
+ case *ast.GenDecl:
+ if decl.Tok != token.CONST && decl.Tok != token.TYPE {
+ continue
+ }
+ declInline := hasFixInline(decl.Doc)
+ // Accept inline directives on the entire decl as well as individual specs.
+ for _, spec := range decl.Specs {
+ switch spec := spec.(type) {
+ case *ast.TypeSpec: // Tok == TYPE
+ findAlias(pass, spec, declInline, h)
+
+ case *ast.ValueSpec: // Tok == CONST
+ findConst(pass, spec, declInline, h)
+ }
+ }
+ }
+ }
+}
+
+func findFunc(decl *ast.FuncDecl, h Handler) {
+ if !hasFixInline(decl.Doc) {
+ return
+ }
+ if h != nil {
+ h.HandleFunc(decl)
+ }
+}
+
+func findAlias(pass *analysis.Pass, spec *ast.TypeSpec, declInline bool, h Handler) {
+ if !declInline && !hasFixInline(spec.Doc) {
+ return
+ }
+ if !spec.Assign.IsValid() {
+ pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias")
+ return
+ }
+
+ // Disallow inlines of type expressions containing array types.
+ // Given an array type like [N]int where N is a named constant, go/types provides
+ // only the value of the constant as an int64. So inlining A in this code:
+ //
+ // const N = 5
+ // type A = [N]int
+ //
+ // would result in [5]int, breaking the connection with N.
+ for n := range ast.Preorder(spec.Type) {
+ if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil {
+ // Make an exception when the array length is a literal int.
+ if lit, ok := ast.Unparen(ar.Len).(*ast.BasicLit); ok && lit.Kind == token.INT {
+ continue
+ }
+ pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported")
+ return
+ }
+ }
+ if h != nil {
+ h.HandleAlias(spec)
+ }
+}
+
+func findConst(pass *analysis.Pass, spec *ast.ValueSpec, declInline bool, h Handler) {
+ specInline := hasFixInline(spec.Doc)
+ if declInline || specInline {
+ for i, nameIdent := range spec.Names {
+ if i >= len(spec.Values) {
+ // Possible following an iota.
+ break
+ }
+ var rhsIdent *ast.Ident
+ switch val := spec.Values[i].(type) {
+ case *ast.Ident:
+ // Constants defined with the predeclared iota cannot be inlined.
+ if pass.TypesInfo.Uses[val] == builtinIota {
+ pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota")
+ return
+ }
+ rhsIdent = val
+ case *ast.SelectorExpr:
+ rhsIdent = val.Sel
+ default:
+ pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant")
+ return
+ }
+ if h != nil {
+ h.HandleConst(nameIdent, rhsIdent)
+ }
+ }
+ }
+}
+
+// hasFixInline reports the presence of a "//go:fix inline" directive
+// in the comments.
+func hasFixInline(cg *ast.CommentGroup) bool {
+ for _, d := range internalastutil.Directives(cg) {
+ if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" {
+ return true
+ }
+ }
+ return false
+}
+
+var builtinIota = types.Universe.Lookup("iota")
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+)
+
+var AnyAnalyzer = &analysis.Analyzer{
+ Name: "any",
+ Doc: analysisinternal.MustExtractDoc(doc, "any"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: runAny,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any",
+}
+
+// The any pass replaces interface{} with go1.18's 'any'.
+func runAny(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.18") {
+ for curIface := range curFile.Preorder((*ast.InterfaceType)(nil)) {
+ iface := curIface.Node().(*ast.InterfaceType)
+
+ if iface.Methods.NumFields() == 0 {
+ // Check that 'any' is not shadowed.
+ if lookup(pass.TypesInfo, curIface, "any") == builtinAny {
+ pass.Report(analysis.Diagnostic{
+ Pos: iface.Pos(),
+ End: iface.End(),
+ Message: "interface{} can be replaced by any",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace interface{} by any",
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: iface.Pos(),
+ End: iface.End(),
+ NewText: []byte("any"),
+ },
+ },
+ }},
+ })
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/moreiters"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var BLoopAnalyzer = &analysis.Analyzer{
+ Name: "bloop",
+ Doc: analysisinternal.MustExtractDoc(doc, "bloop"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: bloop,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#bloop",
+}
+
+// bloop updates benchmarks that use "for range b.N", replacing it
+// with go1.24's b.Loop() and eliminating any preceding
+// b.{Start,Stop,Reset}Timer calls.
+//
+// Variants:
+//
+// for i := 0; i < b.N; i++ {} => for b.Loop() {}
+// for range b.N {}
+func bloop(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ if !typesinternal.Imports(pass.Pkg, "testing") {
+ return nil, nil
+ }
+
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+ )
+
+ // edits computes the text edits for a matched for/range loop
+ // at the specified cursor. b is the *testing.B value, and
+ // (start, end) is the portion using b.N to delete.
+ edits := func(curLoop inspector.Cursor, b ast.Expr, start, end token.Pos) (edits []analysis.TextEdit) {
+ curFn, _ := enclosingFunc(curLoop)
+ // Within the same function, delete all calls to
+ // b.{Start,Stop,Timer} that precede the loop.
+ filter := []ast.Node{(*ast.ExprStmt)(nil), (*ast.FuncLit)(nil)}
+ curFn.Inspect(filter, func(cur inspector.Cursor) (descend bool) {
+ node := cur.Node()
+ if is[*ast.FuncLit](node) {
+ return false // don't descend into FuncLits (e.g. sub-benchmarks)
+ }
+ stmt := node.(*ast.ExprStmt)
+ if stmt.Pos() > start {
+ return false // not preceding: stop
+ }
+ if call, ok := stmt.X.(*ast.CallExpr); ok {
+ obj := typeutil.Callee(info, call)
+ if typesinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") {
+ // Delete call statement.
+ // TODO(adonovan): delete following newline, or
+ // up to start of next stmt? (May delete a comment.)
+ edits = append(edits, analysis.TextEdit{
+ Pos: stmt.Pos(),
+ End: stmt.End(),
+ })
+ }
+ }
+ return true
+ })
+
+ // Replace ...b.N... with b.Loop().
+ return append(edits, analysis.TextEdit{
+ Pos: start,
+ End: end,
+ NewText: fmt.Appendf(nil, "%s.Loop()", astutil.Format(pass.Fset, b)),
+ })
+ }
+
+ // Find all for/range statements.
+ loops := []ast.Node{
+ (*ast.ForStmt)(nil),
+ (*ast.RangeStmt)(nil),
+ }
+ for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curLoop := range curFile.Preorder(loops...) {
+ switch n := curLoop.Node().(type) {
+ case *ast.ForStmt:
+ // for _; i < b.N; _ {}
+ if cmp, ok := n.Cond.(*ast.BinaryExpr); ok && cmp.Op == token.LSS {
+ if sel, ok := cmp.Y.(*ast.SelectorExpr); ok &&
+ sel.Sel.Name == "N" &&
+ typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) {
+
+ delStart, delEnd := n.Cond.Pos(), n.Cond.End()
+
+ // Eliminate variable i if no longer needed:
+ // for i := 0; i < b.N; i++ {
+ // ...no references to i...
+ // }
+ body, _ := curLoop.LastChild()
+ if v := isIncrementLoop(info, n); v != nil &&
+ !uses(index, body, v) {
+ delStart, delEnd = n.Init.Pos(), n.Post.End()
+ }
+
+ pass.Report(analysis.Diagnostic{
+ // Highlight "i < b.N".
+ Pos: n.Cond.Pos(),
+ End: n.Cond.End(),
+ Message: "b.N can be modernized using b.Loop()",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace b.N with b.Loop()",
+ TextEdits: edits(curLoop, sel.X, delStart, delEnd),
+ }},
+ })
+ }
+ }
+
+ case *ast.RangeStmt:
+ // for range b.N {} -> for b.Loop() {}
+ //
+ // TODO(adonovan): handle "for i := range b.N".
+ if sel, ok := n.X.(*ast.SelectorExpr); ok &&
+ n.Key == nil &&
+ n.Value == nil &&
+ sel.Sel.Name == "N" &&
+ typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) {
+
+ pass.Report(analysis.Diagnostic{
+ // Highlight "range b.N".
+ Pos: n.Range,
+ End: n.X.End(),
+ Message: "b.N can be modernized using b.Loop()",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace b.N with b.Loop()",
+ TextEdits: edits(curLoop, sel.X, n.Range, n.X.End()),
+ }},
+ })
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// uses reports whether the subtree cur contains a use of obj.
+func uses(index *typeindex.Index, cur inspector.Cursor, obj types.Object) bool {
+ for use := range index.Uses(obj) {
+ if cur.Contains(use) {
+ return true
+ }
+ }
+ return false
+}
+
+// enclosingFunc returns the cursor for the innermost Func{Decl,Lit}
+// that encloses c, if any.
+func enclosingFunc(c inspector.Cursor) (inspector.Cursor, bool) {
+ return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)))
+}
+
+// usesBenchmarkNOnce reports whether a b.N loop should be modernized to b.Loop().
+// Only modernize loops that are:
+// 1. Directly in a benchmark function (not in nested functions)
+// - b.Loop() must be called in the same goroutine as the benchmark function
+// - Function literals are often used with goroutines (go func(){...})
+//
+// 2. The only b.N loop in that benchmark function
+// - b.Loop() can only be called once per benchmark execution
+// - Multiple calls result in "B.Loop called with timer stopped" error
+func usesBenchmarkNOnce(c inspector.Cursor, info *types.Info) bool {
+ // Find the enclosing benchmark function
+ curFunc, ok := enclosingFunc(c)
+ if !ok {
+ return false
+ }
+
+ // Check if this is actually a benchmark function
+ fdecl, ok := curFunc.Node().(*ast.FuncDecl)
+ if !ok {
+ return false // not in a function; or, inside a FuncLit
+ }
+ if !isBenchmarkFunc(fdecl) {
+ return false
+ }
+
+ // Count b.N references in this benchmark function
+ bnRefCount := 0
+ filter := []ast.Node{(*ast.SelectorExpr)(nil), (*ast.FuncLit)(nil)}
+ curFunc.Inspect(filter, func(cur inspector.Cursor) bool {
+ switch n := cur.Node().(type) {
+ case *ast.FuncLit:
+ return false // don't descend into nested function literals
+ case *ast.SelectorExpr:
+ if n.Sel.Name == "N" && typesinternal.IsPointerToNamed(info.TypeOf(n.X), "testing", "B") {
+ bnRefCount++
+ }
+ }
+ return true
+ })
+
+ // Only modernize if there's exactly one b.N reference
+ return bnRefCount == 1
+}
+
+// isBenchmarkFunc reports whether f is a benchmark function.
+func isBenchmarkFunc(f *ast.FuncDecl) bool {
+ return f.Recv == nil &&
+ f.Name != nil &&
+ f.Name.IsExported() &&
+ strings.HasPrefix(f.Name.Name, "Benchmark") &&
+ f.Type.Params != nil &&
+ len(f.Type.Params.List) == 1
+}
+
+// isIncrementLoop reports whether loop has the form "for i := 0; ...; i++ { ... }",
+// and if so, it returns the symbol for the index variable.
+func isIncrementLoop(info *types.Info, loop *ast.ForStmt) *types.Var {
+ if assign, ok := loop.Init.(*ast.AssignStmt); ok &&
+ assign.Tok == token.DEFINE &&
+ len(assign.Rhs) == 1 &&
+ isZeroIntLiteral(info, assign.Rhs[0]) &&
+ is[*ast.IncDecStmt](loop.Post) &&
+ loop.Post.(*ast.IncDecStmt).Tok == token.INC &&
+ astutil.EqualSyntax(loop.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) {
+ return info.Defs[assign.Lhs[0].(*ast.Ident)].(*types.Var)
+ }
+ return nil
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package modernize provides a suite of analyzers that suggest
+simplifications to Go code, using modern language and library
+features.
+
+Each diagnostic provides a fix. Our intent is that these fixes may
+be safely applied en masse without changing the behavior of your
+program. In some cases the suggested fixes are imperfect and may
+lead to (for example) unused imports or unused local variables,
+causing build breakage. However, these problems are generally
+trivial to fix. We regard any modernizer whose fix changes program
+behavior to have a serious bug and will endeavor to fix it.
+
+To apply all modernization fixes en masse, you can use the
+following command:
+
+ $ go run golang.org/x/tools/go/analysis/passes/modernize/cmd/modernize@latest -fix ./...
+
+(Do not use "go get -tool" to add gopls as a dependency of your
+module; gopls commands must be built from their release branch.)
+
+If the tool warns of conflicting fixes, you may need to run it more
+than once until it has applied all fixes cleanly. This command is
+not an officially supported interface and may change in the future.
+
+Changes produced by this tool should be reviewed as usual before
+being merged. In some cases, a loop may be replaced by a simple
+function call, causing comments within the loop to be discarded.
+Human judgment may be required to avoid losing comments of value.
+
+The modernize suite contains many analyzers. Diagnostics from some,
+such as "any" (which replaces "interface{}" with "any" where it
+is safe to do so), are particularly numerous. It may ease the burden of
+code review to apply fixes in two steps, the first consisting only of
+fixes from the "any" analyzer, the second consisting of all
+other analyzers. This can be achieved using flags, as in this example:
+
+ $ modernize -any=true -fix ./...
+ $ modernize -any=false -fix ./...
+
+# Analyzer appendclipped
+
+appendclipped: simplify append chains using slices.Concat
+
+The appendclipped analyzer suggests replacing chains of append calls with a
+single call to slices.Concat, which was added in Go 1.21. For example,
+append(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2).
+
+In the simple case of appending to a newly allocated slice, such as
+append([]T(nil), s...), the analyzer suggests the more concise slices.Clone(s).
+For byte slices, it will prefer bytes.Clone if the "bytes" package is
+already imported.
+
+This fix is only applied when the base of the append tower is a
+"clipped" slice, meaning its length and capacity are equal (e.g.
+x[:0:0] or []T{}). This is to avoid changing program behavior by
+eliminating intended side effects on the base slice's underlying
+array.
+
+This analyzer is currently disabled by default as the
+transformation does not preserve the nilness of the base slice in
+all cases; see https://go.dev/issue/73557.
+
+# Analyzer bloop
+
+bloop: replace for-range over b.N with b.Loop
+
+The bloop analyzer suggests replacing benchmark loops of the form
+`for i := 0; i < b.N; i++` or `for range b.N` with the more modern
+`for b.Loop()`, which was added in Go 1.24.
+
+This change makes benchmark code more readable and also removes the need for
+manual timer control, so any preceding calls to b.StartTimer, b.StopTimer,
+or b.ResetTimer within the same function will also be removed.
+
+Caveats: The b.Loop() method is designed to prevent the compiler from
+optimizing away the benchmark loop, which can occasionally result in
+slower execution due to increased allocations in some specific cases.
+
+# Analyzer any
+
+any: replace interface{} with any
+
+The any analyzer suggests replacing uses of the empty interface type,
+`interface{}`, with the `any` alias, which was introduced in Go 1.18.
+This is a purely stylistic change that makes code more readable.
+
+# Analyzer errorsastype
+
+errorsastype: replace errors.As with errors.AsType[T]
+
+This analyzer suggests fixes to simplify uses of [errors.As] of
+this form:
+
+ var myerr *MyErr
+ if errors.As(err, &myerr) {
+ handle(myerr)
+ }
+
+by using the less error-prone generic [errors.AsType] function,
+introduced in Go 1.26:
+
+ if myerr, ok := errors.AsType[*MyErr](err); ok {
+ handle(myerr)
+ }
+
+The fix is only offered if the var declaration has the form shown and
+there are no uses of myerr outside the if statement.
+
+# Analyzer fmtappendf
+
+fmtappendf: replace []byte(fmt.Sprintf) with fmt.Appendf
+
+The fmtappendf analyzer suggests replacing `[]byte(fmt.Sprintf(...))` with
+`fmt.Appendf(nil, ...)`. This avoids the intermediate allocation of a string
+by Sprintf, making the code more efficient. The suggestion also applies to
+fmt.Sprint and fmt.Sprintln.
+
+# Analyzer forvar
+
+forvar: remove redundant re-declaration of loop variables
+
+The forvar analyzer removes unnecessary shadowing of loop variables.
+Before Go 1.22, it was common to write `for _, x := range s { x := x ... }`
+to create a fresh variable for each iteration. Go 1.22 changed the semantics
+of `for` loops, making this pattern redundant. This analyzer removes the
+unnecessary `x := x` statement.
+
+This fix only applies to `range` loops.
+
+# Analyzer mapsloop
+
+mapsloop: replace explicit loops over maps with calls to maps package
+
+The mapsloop analyzer replaces loops of the form
+
+ for k, v := range x { m[k] = v }
+
+with a single call to a function from the `maps` package, added in Go 1.23.
+Depending on the context, this could be `maps.Copy`, `maps.Insert`,
+`maps.Clone`, or `maps.Collect`.
+
+The transformation to `maps.Clone` is applied conservatively, as it
+preserves the nilness of the source map, which may be a subtle change in
+behavior if the original code did not handle a nil map in the same way.
+
+# Analyzer minmax
+
+minmax: replace if/else statements with calls to min or max
+
+The minmax analyzer simplifies conditional assignments by suggesting the use
+of the built-in `min` and `max` functions, introduced in Go 1.21. For example,
+
+ if a < b { x = a } else { x = b }
+
+is replaced by
+
+ x = min(a, b).
+
+This analyzer avoids making suggestions for floating-point types,
+as the behavior of `min` and `max` with NaN values can differ from
+the original if/else statement.
+
+# Analyzer newexpr
+
+newexpr: simplify code by using go1.26's new(expr)
+
+This analyzer finds declarations of functions of this form:
+
+ func varOf(x int) *int { return &x }
+
+and suggests a fix to turn them into inlinable wrappers around
+go1.26's built-in new(expr) function:
+
+ func varOf(x int) *int { return new(x) }
+
+In addition, this analyzer suggests a fix for each call
+to one of the functions before it is transformed, so that
+
+ use(varOf(123))
+
+is replaced by:
+
+ use(new(123))
+
+(Wrapper functions such as varOf are common when working with Go
+serialization packages such as for JSON or protobuf, where pointers
+are often used to express optionality.)
+
+# Analyzer omitzero
+
+omitzero: suggest replacing omitempty with omitzero for struct fields
+
+The omitzero analyzer identifies uses of the `omitempty` JSON struct tag on
+fields that are themselves structs. The `omitempty` tag has no effect on
+struct-typed fields. The analyzer offers two suggestions: either remove the
+tag, or replace it with `omitzero` (added in Go 1.24), which correctly
+omits the field if the struct value is zero.
+
+Replacing `omitempty` with `omitzero` is a change in behavior. The
+original code would always encode the struct field, whereas the
+modified code will omit it if it is a zero-value.
+
+# Analyzer plusbuild
+
+plusbuild: remove obsolete //+build comments
+
+The plusbuild analyzer suggests a fix to remove obsolete build tags
+of the form:
+
+ //+build linux,amd64
+
+in files that also contain a Go 1.18-style tag such as:
+
+ //go:build linux && amd64
+
+(It does not check that the old and new tags are consistent;
+that is the job of the 'buildtag' analyzer in the vet suite.)
+
+# Analyzer rangeint
+
+rangeint: replace 3-clause for loops with for-range over integers
+
+The rangeint analyzer suggests replacing traditional for loops such
+as
+
+ for i := 0; i < n; i++ { ... }
+
+with the more idiomatic Go 1.22 style:
+
+ for i := range n { ... }
+
+This transformation is applied only if (a) the loop variable is not
+modified within the loop body and (b) the loop's limit expression
+is not modified within the loop, as `for range` evaluates its
+operand only once.
+
+# Analyzer reflecttypefor
+
+reflecttypefor: replace reflect.TypeOf(x) with TypeFor[T]()
+
+This analyzer suggests fixes to replace uses of reflect.TypeOf(x) with
+reflect.TypeFor, introduced in go1.22, when the desired runtime type
+is known at compile time, for example:
+
+ reflect.TypeOf(uint32(0)) -> reflect.TypeFor[uint32]()
+ reflect.TypeOf((*ast.File)(nil)) -> reflect.TypeFor[*ast.File]()
+
+It also offers a fix to simplify the construction below, which uses
+reflect.TypeOf to return the runtime type for an interface type,
+
+ reflect.TypeOf((*io.Reader)(nil)).Elem()
+
+to:
+
+ reflect.TypeFor[io.Reader]()
+
+No fix is offered in cases when the runtime type is dynamic, such as:
+
+ var r io.Reader = ...
+ reflect.TypeOf(r)
+
+or when the operand has potential side effects.
+
+# Analyzer slicescontains
+
+slicescontains: replace loops with slices.Contains or slices.ContainsFunc
+
+The slicescontains analyzer simplifies loops that check for the existence of
+an element in a slice. It replaces them with calls to `slices.Contains` or
+`slices.ContainsFunc`, which were added in Go 1.21.
+
+If the expression for the target element has side effects, this
+transformation will cause those effects to occur only once, not
+once per tested slice element.
+
+# Analyzer slicesdelete
+
+slicesdelete: replace append-based slice deletion with slices.Delete
+
+The slicesdelete analyzer suggests replacing the idiom
+
+ s = append(s[:i], s[j:]...)
+
+with the more explicit
+
+ s = slices.Delete(s, i, j)
+
+introduced in Go 1.21.
+
+This analyzer is disabled by default. The `slices.Delete` function
+zeros the elements between the new length and the old length of the
+slice to prevent memory leaks, which is a subtle difference in
+behavior compared to the append-based idiom; see https://go.dev/issue/73686.
+
+# Analyzer slicessort
+
+slicessort: replace sort.Slice with slices.Sort for basic types
+
+The slicessort analyzer simplifies sorting slices of basic ordered
+types. It replaces
+
+ sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
+
+with the simpler `slices.Sort(s)`, which was added in Go 1.21.
+
+# Analyzer stditerators
+
+stditerators: use iterators instead of Len/At-style APIs
+
+This analyzer suggests a fix to replace each loop of the form:
+
+ for i := 0; i < x.Len(); i++ {
+ use(x.At(i))
+ }
+
+or its "for elem := range x.Len()" equivalent by a range loop over an
+iterator offered by the same data type:
+
+ for elem := range x.All() {
+ use(x.At(i)
+ }
+
+where x is one of various well-known types in the standard library.
+
+# Analyzer stringscutprefix
+
+stringscutprefix: replace HasPrefix/TrimPrefix with CutPrefix
+
+The stringscutprefix analyzer simplifies a common pattern where code first
+checks for a prefix with `strings.HasPrefix` and then removes it with
+`strings.TrimPrefix`. It replaces this two-step process with a single call
+to `strings.CutPrefix`, introduced in Go 1.20. The analyzer also handles
+the equivalent functions in the `bytes` package.
+
+For example, this input:
+
+ if strings.HasPrefix(s, prefix) {
+ use(strings.TrimPrefix(s, prefix))
+ }
+
+is fixed to:
+
+ if after, ok := strings.CutPrefix(s, prefix); ok {
+ use(after)
+ }
+
+The analyzer also offers fixes to use CutSuffix in a similar way.
+This input:
+
+ if strings.HasSuffix(s, suffix) {
+ use(strings.TrimSuffix(s, suffix))
+ }
+
+is fixed to:
+
+ if before, ok := strings.CutSuffix(s, suffix); ok {
+ use(before)
+ }
+
+# Analyzer stringsseq
+
+stringsseq: replace ranging over Split/Fields with SplitSeq/FieldsSeq
+
+The stringsseq analyzer improves the efficiency of iterating over substrings.
+It replaces
+
+ for range strings.Split(...)
+
+with the more efficient
+
+ for range strings.SplitSeq(...)
+
+which was added in Go 1.24 and avoids allocating a slice for the
+substrings. The analyzer also handles strings.Fields and the
+equivalent functions in the bytes package.
+
+# Analyzer stringsbuilder
+
+stringsbuilder: replace += with strings.Builder
+
+This analyzer replaces repeated string += string concatenation
+operations with calls to Go 1.10's strings.Builder.
+
+For example:
+
+ var s = "["
+ for x := range seq {
+ s += x
+ s += "."
+ }
+ s += "]"
+ use(s)
+
+is replaced by:
+
+ var s strings.Builder
+ s.WriteString("[")
+ for x := range seq {
+ s.WriteString(x)
+ s.WriteString(".")
+ }
+ s.WriteString("]")
+ use(s.String())
+
+This avoids quadratic memory allocation and improves performance.
+
+The analyzer requires that all references to s except the final one
+are += operations. To avoid warning about trivial cases, at least one
+must appear within a loop. The variable s must be a local
+variable, not a global or parameter.
+
+The sole use of the finished string must be the last reference to the
+variable s. (It may appear within an intervening loop or function literal,
+since even s.String() is called repeatedly, it does not allocate memory.)
+
+# Analyzer testingcontext
+
+testingcontext: replace context.WithCancel with t.Context in tests
+
+The testingcontext analyzer simplifies context management in tests. It
+replaces the manual creation of a cancellable context,
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+with a single call to t.Context(), which was added in Go 1.24.
+
+This change is only suggested if the `cancel` function is not used
+for any other purpose.
+
+# Analyzer waitgroup
+
+waitgroup: replace wg.Add(1)/go/wg.Done() with wg.Go
+
+The waitgroup analyzer simplifies goroutine management with `sync.WaitGroup`.
+It replaces the common pattern
+
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ ...
+ }()
+
+with a single call to
+
+ wg.Go(func(){ ... })
+
+which was added in Go 1.25.
+*/
+package modernize
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "fmt"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/goplsexport"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var errorsastypeAnalyzer = &analysis.Analyzer{
+ Name: "errorsastype",
+ Doc: analysisinternal.MustExtractDoc(doc, "errorsastype"),
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype",
+ Requires: []*analysis.Analyzer{generated.Analyzer, typeindexanalyzer.Analyzer},
+ Run: errorsastype,
+}
+
+func init() {
+ // Export to gopls until this is a published modernizer.
+ goplsexport.ErrorsAsTypeModernizer = errorsastypeAnalyzer
+}
+
+// errorsastype offers a fix to replace error.As with the newer
+// errors.AsType[T] following this pattern:
+//
+// var myerr *MyErr
+// if errors.As(err, &myerr) { ... }
+//
+// =>
+//
+// if myerr, ok := errors.AsType[*MyErr](err); ok { ... }
+//
+// (In principle several of these can then be chained using if/else,
+// but we don't attempt that.)
+//
+// We offer the fix only within an if statement, but not within a
+// switch case such as:
+//
+// var myerr *MyErr
+// switch {
+// case errors.As(err, &myerr):
+// }
+//
+// because the transformation in that case would be ungainly.
+//
+// Note that the cmd/vet suite includes the "errorsas" analyzer, which
+// detects actual mistakes in the use of errors.As. This logic does
+// not belong in errorsas because the problems it fixes are merely
+// stylistic.
+//
+// TODO(adonovan): support more cases:
+//
+// - Negative cases
+// var myerr E
+// if !errors.As(err, &myerr) { ... }
+// =>
+// myerr, ok := errors.AsType[E](err)
+// if !ok { ... }
+//
+// - if myerr := new(E); errors.As(err, myerr); { ... }
+//
+// - if errors.As(err, myerr) && othercond { ... }
+func errorsastype(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+ )
+
+ for curCall := range index.Calls(index.Object("errors", "As")) {
+ call := curCall.Node().(*ast.CallExpr)
+ if len(call.Args) < 2 {
+ continue // spread call: errors.As(pair())
+ }
+
+ v, curDeclStmt := canUseErrorsAsType(info, index, curCall)
+ if v == nil {
+ continue
+ }
+
+ file := astutil.EnclosingFile(curDeclStmt)
+ if !fileUses(info, file, "go1.26") {
+ continue // errors.AsType is too new
+ }
+
+ // Locate identifier "As" in errors.As.
+ var asIdent *ast.Ident
+ switch n := ast.Unparen(call.Fun).(type) {
+ case *ast.Ident:
+ asIdent = n // "errors" was dot-imported
+ case *ast.SelectorExpr:
+ asIdent = n.Sel
+ default:
+ panic("no Ident for errors.As")
+ }
+
+ // Format the type as valid Go syntax.
+ // TODO(adonovan): fix: FileQualifier needs to respect
+ // visibility at the current point, and either fail
+ // or edit the imports as needed.
+ // TODO(adonovan): fix: TypeString is not a sound way
+ // to print types as Go syntax as it does not respect
+ // symbol visibility, etc. We need something loosely
+ // integrated with FileQualifier that accumulates
+ // import edits, and may fail (e.g. for unexported
+ // type or field names from other packages).
+ // See https://go.dev/issues/75604.
+ qual := typesinternal.FileQualifier(file, pass.Pkg)
+ errtype := types.TypeString(v.Type(), qual)
+
+ // Choose a name for the "ok" variable.
+ okName := "ok"
+ if okVar := lookup(info, curCall, "ok"); okVar != nil {
+ // The name 'ok' is already declared, but
+ // don't choose a fresh name unless okVar
+ // is also used within the if-statement.
+ curIf := curCall.Parent()
+ for curUse := range index.Uses(okVar) {
+ if curIf.Contains(curUse) {
+ scope := info.Scopes[curIf.Node().(*ast.IfStmt)]
+ okName = refactor.FreshName(scope, v.Pos(), "ok")
+ break
+ }
+ }
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ Message: fmt.Sprintf("errors.As can be simplified using AsType[%s]", errtype),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace errors.As with AsType[%s]", errtype),
+ TextEdits: append(
+ // delete "var myerr *MyErr"
+ refactor.DeleteStmt(pass.Fset.File(call.Fun.Pos()), curDeclStmt),
+ // if errors.As (err, &myerr) { ... }
+ // ------------- -------------- -------- ----
+ // if myerr, ok := errors.AsType[*MyErr](err ); ok { ... }
+ analysis.TextEdit{
+ // insert "myerr, ok := "
+ Pos: call.Pos(),
+ End: call.Pos(),
+ NewText: fmt.Appendf(nil, "%s, %s := ", v.Name(), okName),
+ },
+ analysis.TextEdit{
+ // replace As with AsType[T]
+ Pos: asIdent.Pos(),
+ End: asIdent.End(),
+ NewText: fmt.Appendf(nil, "AsType[%s]", errtype),
+ },
+ analysis.TextEdit{
+ // delete ", &myerr"
+ Pos: call.Args[0].End(),
+ End: call.Args[1].End(),
+ },
+ analysis.TextEdit{
+ // insert "; ok"
+ Pos: call.End(),
+ End: call.End(),
+ NewText: fmt.Appendf(nil, "; %s", okName),
+ },
+ ),
+ }},
+ })
+ }
+ return nil, nil
+}
+
+// canUseErrorsAsType reports whether curCall is a call to
+// errors.As beneath an if statement, preceded by a
+// declaration of the typed error var. The var must not be
+// used outside the if statement.
+func canUseErrorsAsType(info *types.Info, index *typeindex.Index, curCall inspector.Cursor) (_ *types.Var, _ inspector.Cursor) {
+ if !astutil.IsChildOf(curCall, edge.IfStmt_Cond) {
+ return // not beneath if statement
+ }
+ var (
+ curIfStmt = curCall.Parent()
+ ifStmt = curIfStmt.Node().(*ast.IfStmt)
+ )
+ if ifStmt.Init != nil {
+ return // if statement already has an init part
+ }
+ unary, ok := curCall.Node().(*ast.CallExpr).Args[1].(*ast.UnaryExpr)
+ if !ok || unary.Op != token.AND {
+ return // 2nd arg is not &var
+ }
+ id, ok := unary.X.(*ast.Ident)
+ if !ok {
+ return // not a simple ident (local var)
+ }
+ v := info.Uses[id].(*types.Var)
+ curDef, ok := index.Def(v)
+ if !ok {
+ return // var is not local (e.g. dot-imported)
+ }
+ // Have: if errors.As(err, &v) { ... }
+
+ // Reject if v is used outside (before or after) the
+ // IfStmt, since that will become its new scope.
+ for curUse := range index.Uses(v) {
+ if !curIfStmt.Contains(curUse) {
+ return // v used before/after if statement
+ }
+ }
+ if !astutil.IsChildOf(curDef, edge.ValueSpec_Names) {
+ return // v not declared by "var v T"
+ }
+ var (
+ curSpec = curDef.Parent() // ValueSpec
+ curDecl = curSpec.Parent() // GenDecl
+ spec = curSpec.Node().(*ast.ValueSpec)
+ )
+ if len(spec.Names) != 1 || len(spec.Values) != 0 ||
+ len(curDecl.Node().(*ast.GenDecl).Specs) != 1 {
+ return // not a simple "var v T" decl
+ }
+
+ // Have:
+ // var v *MyErr
+ // ...
+ // if errors.As(err, &v) { ... }
+ // with no uses of v outside the IfStmt.
+ return v, curDecl.Parent() // DeclStmt
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var FmtAppendfAnalyzer = &analysis.Analyzer{
+ Name: "fmtappendf",
+ Doc: analysisinternal.MustExtractDoc(doc, "fmtappendf"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: fmtappendf,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#fmtappendf",
+}
+
+// The fmtappend function replaces []byte(fmt.Sprintf(...)) by
+// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln.
+func fmtappendf(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ for _, fn := range []types.Object{
+ index.Object("fmt", "Sprintf"),
+ index.Object("fmt", "Sprintln"),
+ index.Object("fmt", "Sprint"),
+ } {
+ for curCall := range index.Calls(fn) {
+ call := curCall.Node().(*ast.CallExpr)
+ if ek, idx := curCall.ParentEdge(); ek == edge.CallExpr_Args && idx == 0 {
+ // Is parent a T(fmt.SprintX(...)) conversion?
+ conv := curCall.Parent().Node().(*ast.CallExpr)
+ tv := pass.TypesInfo.Types[conv.Fun]
+ if tv.IsType() && types.Identical(tv.Type, byteSliceType) &&
+ fileUses(pass.TypesInfo, astutil.EnclosingFile(curCall), "go1.19") {
+ // Have: []byte(fmt.SprintX(...))
+
+ // Find "Sprint" identifier.
+ var id *ast.Ident
+ switch e := ast.Unparen(call.Fun).(type) {
+ case *ast.SelectorExpr:
+ id = e.Sel // "fmt.Sprint"
+ case *ast.Ident:
+ id = e // "Sprint" after `import . "fmt"`
+ }
+
+ old, new := fn.Name(), strings.Replace(fn.Name(), "Sprint", "Append", 1)
+ edits := []analysis.TextEdit{
+ {
+ // delete "[]byte("
+ Pos: conv.Pos(),
+ End: conv.Lparen + 1,
+ },
+ {
+ // remove ")"
+ Pos: conv.Rparen,
+ End: conv.Rparen + 1,
+ },
+ {
+ Pos: id.Pos(),
+ End: id.End(),
+ NewText: []byte(new),
+ },
+ {
+ Pos: call.Lparen + 1,
+ NewText: []byte("nil, "),
+ },
+ }
+ if len(conv.Args) == 1 {
+ arg := conv.Args[0]
+ // Determine if we have T(fmt.SprintX(...)<non-args,
+ // like a space or a comma>). If so, delete the non-args
+ // that come before the right parenthesis. Leaving an
+ // extra comma here produces invalid code. (See
+ // golang/go#74709)
+ if arg.End() < conv.Rparen {
+ edits = append(edits, analysis.TextEdit{
+ Pos: arg.End(),
+ End: conv.Rparen,
+ })
+ }
+ }
+ pass.Report(analysis.Diagnostic{
+ Pos: conv.Pos(),
+ End: conv.End(),
+ Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new),
+ TextEdits: edits,
+ }},
+ })
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/token"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+)
+
+var ForVarAnalyzer = &analysis.Analyzer{
+ Name: "forvar",
+ Doc: analysisinternal.MustExtractDoc(doc, "forvar"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: forvar,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar",
+}
+
+// forvar offers to fix unnecessary copying of a for variable
+//
+// for _, x := range foo {
+// x := x // offer to remove this superfluous assignment
+// }
+//
+// Prerequisites:
+// First statement in a range loop has to be <ident> := <ident>
+// where the two idents are the same,
+// and the ident is defined (:=) as a variable in the for statement.
+// (Note that this 'fix' does not work for three clause loops
+// because the Go specification says "The variable used by each subsequent iteration
+// is declared implicitly before executing the post statement and initialized to the
+// value of the previous iteration's variable at that moment.")
+func forvar(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.22") {
+ for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) {
+ loop := curLoop.Node().(*ast.RangeStmt)
+ if loop.Tok != token.DEFINE {
+ continue
+ }
+ isLoopVarRedecl := func(assign *ast.AssignStmt) bool {
+ for i, lhs := range assign.Lhs {
+ if !(astutil.EqualSyntax(lhs, assign.Rhs[i]) &&
+ (astutil.EqualSyntax(lhs, loop.Key) || astutil.EqualSyntax(lhs, loop.Value))) {
+ return false
+ }
+ }
+ return true
+ }
+ // Have: for k, v := range x { stmts }
+ //
+ // Delete the prefix of stmts that are
+ // of the form k := k; v := v; k, v := k, v; v, k := v, k.
+ for _, stmt := range loop.Body.List {
+ if assign, ok := stmt.(*ast.AssignStmt); ok &&
+ assign.Tok == token.DEFINE &&
+ len(assign.Lhs) == len(assign.Rhs) &&
+ isLoopVarRedecl(assign) {
+
+ curStmt, _ := curLoop.FindNode(stmt)
+ edits := refactor.DeleteStmt(pass.Fset.File(stmt.Pos()), curStmt)
+ if len(edits) > 0 {
+ pass.Report(analysis.Diagnostic{
+ Pos: stmt.Pos(),
+ End: stmt.End(),
+ Message: "copying variable is unneeded",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Remove unneeded redeclaration",
+ TextEdits: edits,
+ }},
+ })
+ }
+ } else {
+ break // stop at first other statement
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+// This file defines modernizers that use the "maps" package.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+var MapsLoopAnalyzer = &analysis.Analyzer{
+ Name: "mapsloop",
+ Doc: analysisinternal.MustExtractDoc(doc, "mapsloop"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: mapsloop,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop",
+}
+
+// The mapsloop pass offers to simplify a loop of map insertions:
+//
+// for k, v := range x {
+// m[k] = v
+// }
+//
+// by a call to go1.23's maps package. There are four variants, the
+// product of two axes: whether the source x is a map or an iter.Seq2,
+// and whether the destination m is a newly created map:
+//
+// maps.Copy(m, x) (x is map)
+// maps.Insert(m, x) (x is iter.Seq2)
+// m = maps.Clone(x) (x is a non-nil map, m is a new map)
+// m = maps.Collect(x) (x is iter.Seq2, m is a new map)
+//
+// A map is newly created if the preceding statement has one of these
+// forms, where M is a map type:
+//
+// m = make(M)
+// m = M{}
+func mapsloop(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "maps", "bytes", "runtime") {
+ return nil, nil
+ }
+
+ info := pass.TypesInfo
+
+ // check is called for each statement of this form:
+ // for k, v := range x { m[k] = v }
+ check := func(file *ast.File, curRange inspector.Cursor, assign *ast.AssignStmt, m, x ast.Expr) {
+
+ // Is x a map or iter.Seq2?
+ tx := types.Unalias(info.TypeOf(x))
+ var xmap bool
+ switch typeparams.CoreType(tx).(type) {
+ case *types.Map:
+ xmap = true
+
+ case *types.Signature:
+ k, v, ok := assignableToIterSeq2(tx)
+ if !ok {
+ return // a named isomer of Seq2
+ }
+ xmap = false
+
+ // Record in tx the unnamed map[K]V type
+ // derived from the yield function.
+ // This is the type of maps.Collect(x).
+ tx = types.NewMap(k, v)
+
+ default:
+ return // e.g. slice, channel (or no core type!)
+ }
+
+ // Is the preceding statement of the form
+ // m = make(M) or M{}
+ // and can we replace its RHS with slices.{Clone,Collect}?
+ //
+ // Beware: if x may be nil, we cannot use Clone as it preserves nilness.
+ var mrhs ast.Expr // make(M) or M{}, or nil
+ if curPrev, ok := curRange.PrevSibling(); ok {
+ if assign, ok := curPrev.Node().(*ast.AssignStmt); ok &&
+ len(assign.Lhs) == 1 &&
+ len(assign.Rhs) == 1 &&
+ astutil.EqualSyntax(assign.Lhs[0], m) {
+
+ // Have: m = rhs; for k, v := range x { m[k] = v }
+ var newMap bool
+ rhs := assign.Rhs[0]
+ switch rhs := ast.Unparen(rhs).(type) {
+ case *ast.CallExpr:
+ if id, ok := ast.Unparen(rhs.Fun).(*ast.Ident); ok &&
+ info.Uses[id] == builtinMake {
+ // Have: m = make(...)
+ newMap = true
+ }
+ case *ast.CompositeLit:
+ if len(rhs.Elts) == 0 {
+ // Have m = M{}
+ newMap = true
+ }
+ }
+
+ // Take care not to change type of m's RHS expression.
+ if newMap {
+ trhs := info.TypeOf(rhs)
+
+ // Inv: tx is the type of maps.F(x)
+ // - maps.Clone(x) has the same type as x.
+ // - maps.Collect(x) returns an unnamed map type.
+
+ if assign.Tok == token.DEFINE {
+ // DEFINE (:=): we must not
+ // change the type of RHS.
+ if types.Identical(tx, trhs) {
+ mrhs = rhs
+ }
+ } else {
+ // ASSIGN (=): the types of LHS
+ // and RHS may differ in namedness.
+ if types.AssignableTo(tx, trhs) {
+ mrhs = rhs
+ }
+ }
+
+ // Temporarily disable the transformation to the
+ // (nil-preserving) maps.Clone until we can prove
+ // that x is non-nil. This is rarely possible,
+ // and may require control flow analysis
+ // (e.g. a dominating "if len(x)" check).
+ // See #71844.
+ if xmap {
+ mrhs = nil
+ }
+ }
+ }
+ }
+
+ // Choose function.
+ var funcName string
+ if mrhs != nil {
+ funcName = cond(xmap, "Clone", "Collect")
+ } else {
+ funcName = cond(xmap, "Copy", "Insert")
+ }
+
+ // Report diagnostic, and suggest fix.
+ rng := curRange.Node()
+ prefix, importEdits := refactor.AddImport(info, file, "maps", "maps", funcName, rng.Pos())
+ var (
+ newText []byte
+ start, end token.Pos
+ )
+ if mrhs != nil {
+ // Replace assignment and loop with expression.
+ //
+ // m = make(...)
+ // for k, v := range x { /* comments */ m[k] = v }
+ //
+ // ->
+ //
+ // /* comments */
+ // m = maps.Copy(x)
+ curPrev, _ := curRange.PrevSibling()
+ start, end = curPrev.Node().Pos(), rng.End()
+ newText = fmt.Appendf(nil, "%s%s = %s%s(%s)",
+ allComments(file, start, end),
+ astutil.Format(pass.Fset, m),
+ prefix,
+ funcName,
+ astutil.Format(pass.Fset, x))
+ } else {
+ // Replace loop with call statement.
+ //
+ // for k, v := range x { /* comments */ m[k] = v }
+ //
+ // ->
+ //
+ // /* comments */
+ // maps.Copy(m, x)
+ start, end = rng.Pos(), rng.End()
+ newText = fmt.Appendf(nil, "%s%s%s(%s, %s)",
+ allComments(file, start, end),
+ prefix,
+ funcName,
+ astutil.Format(pass.Fset, m),
+ astutil.Format(pass.Fset, x))
+ }
+ pass.Report(analysis.Diagnostic{
+ Pos: assign.Lhs[0].Pos(),
+ End: assign.Lhs[0].End(),
+ Message: "Replace m[k]=v loop with maps." + funcName,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace m[k]=v loop with maps." + funcName,
+ TextEdits: append(importEdits, []analysis.TextEdit{{
+ Pos: start,
+ End: end,
+ NewText: newText,
+ }}...),
+ }},
+ })
+
+ }
+
+ // Find all range loops around m[k] = v.
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.23") {
+ file := curFile.Node().(*ast.File)
+
+ for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
+ rng := curRange.Node().(*ast.RangeStmt)
+
+ if rng.Tok == token.DEFINE &&
+ rng.Key != nil &&
+ rng.Value != nil &&
+ isAssignBlock(rng.Body) {
+ // Have: for k, v := range x { lhs = rhs }
+
+ assign := rng.Body.List[0].(*ast.AssignStmt)
+ if index, ok := assign.Lhs[0].(*ast.IndexExpr); ok &&
+ astutil.EqualSyntax(rng.Key, index.Index) &&
+ astutil.EqualSyntax(rng.Value, assign.Rhs[0]) &&
+ is[*types.Map](typeparams.CoreType(info.TypeOf(index.X))) &&
+ types.Identical(info.TypeOf(index), info.TypeOf(rng.Value)) { // m[k], v
+
+ // Have: for k, v := range x { m[k] = v }
+ // where there is no implicit conversion.
+ check(file, curRange, assign, index.X, rng.X)
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// assignableToIterSeq2 reports whether t is assignable to
+// iter.Seq[K, V] and returns K and V if so.
+func assignableToIterSeq2(t types.Type) (k, v types.Type, ok bool) {
+ // The only named type assignable to iter.Seq2 is iter.Seq2.
+ if is[*types.Named](t) {
+ if !typesinternal.IsTypeNamed(t, "iter", "Seq2") {
+ return
+ }
+ t = t.Underlying()
+ }
+
+ if t, ok := t.(*types.Signature); ok {
+ // func(yield func(K, V) bool)?
+ if t.Params().Len() == 1 && t.Results().Len() == 0 {
+ if yield, ok := t.Params().At(0).Type().(*types.Signature); ok { // sic, no Underlying/CoreType
+ if yield.Params().Len() == 2 &&
+ yield.Results().Len() == 1 &&
+ types.Identical(yield.Results().At(0).Type(), builtinBool.Type()) {
+ return yield.Params().At(0).Type(), yield.Params().At(1).Type(), true
+ }
+ }
+ }
+ }
+ return
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var MinMaxAnalyzer = &analysis.Analyzer{
+ Name: "minmax",
+ Doc: analysisinternal.MustExtractDoc(doc, "minmax"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: minmax,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#minmax",
+}
+
+// The minmax pass replaces if/else statements with calls to min or max,
+// and removes user-defined min/max functions that are equivalent to built-ins.
+//
+// If/else replacement patterns:
+//
+// 1. if a < b { x = a } else { x = b } => x = min(a, b)
+// 2. x = a; if a < b { x = b } => x = max(a, b)
+//
+// Pattern 1 requires that a is not NaN, and pattern 2 requires that b
+// is not Nan. Since this is hard to prove, we reject floating-point
+// numbers.
+//
+// Function removal:
+// User-defined min/max functions are suggested for removal if they may
+// be safely replaced by their built-in namesake.
+//
+// Variants:
+// - all four ordered comparisons
+// - "x := a" or "x = a" or "var x = a" in pattern 2
+// - "x < b" or "a < b" in pattern 2
+func minmax(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Check for user-defined min/max functions that can be removed
+ checkUserDefinedMinMax(pass)
+
+ // check is called for all statements of this form:
+ // if a < b { lhs = rhs }
+ check := func(file *ast.File, curIfStmt inspector.Cursor, compare *ast.BinaryExpr) {
+ var (
+ ifStmt = curIfStmt.Node().(*ast.IfStmt)
+ tassign = ifStmt.Body.List[0].(*ast.AssignStmt)
+ a = compare.X
+ b = compare.Y
+ lhs = tassign.Lhs[0]
+ rhs = tassign.Rhs[0]
+ sign = isInequality(compare.Op)
+
+ // callArg formats a call argument, preserving comments from [start-end).
+ callArg = func(arg ast.Expr, start, end token.Pos) string {
+ comments := allComments(file, start, end)
+ return cond(arg == b, ", ", "") + // second argument needs a comma
+ cond(comments != "", "\n", "") + // comments need their own line
+ comments +
+ astutil.Format(pass.Fset, arg)
+ }
+ )
+
+ if fblock, ok := ifStmt.Else.(*ast.BlockStmt); ok && isAssignBlock(fblock) {
+ fassign := fblock.List[0].(*ast.AssignStmt)
+
+ // Have: if a < b { lhs = rhs } else { lhs2 = rhs2 }
+ lhs2 := fassign.Lhs[0]
+ rhs2 := fassign.Rhs[0]
+
+ // For pattern 1, check that:
+ // - lhs = lhs2
+ // - {rhs,rhs2} = {a,b}
+ if astutil.EqualSyntax(lhs, lhs2) {
+ if astutil.EqualSyntax(rhs, a) && astutil.EqualSyntax(rhs2, b) {
+ sign = +sign
+ } else if astutil.EqualSyntax(rhs2, a) && astutil.EqualSyntax(rhs, b) {
+ sign = -sign
+ } else {
+ return
+ }
+
+ sym := cond(sign < 0, "min", "max")
+
+ if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) {
+ return // min/max function is shadowed
+ }
+
+ // pattern 1
+ //
+ // TODO(adonovan): if lhs is declared "var lhs T" on preceding line,
+ // simplify the whole thing to "lhs := min(a, b)".
+ pass.Report(analysis.Diagnostic{
+ // Highlight the condition a < b.
+ Pos: compare.Pos(),
+ End: compare.End(),
+ Message: fmt.Sprintf("if/else statement can be modernized using %s", sym),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace if statement with %s", sym),
+ TextEdits: []analysis.TextEdit{{
+ // Replace IfStmt with lhs = min(a, b).
+ Pos: ifStmt.Pos(),
+ End: ifStmt.End(),
+ NewText: fmt.Appendf(nil, "%s = %s(%s%s)",
+ astutil.Format(pass.Fset, lhs),
+ sym,
+ callArg(a, ifStmt.Pos(), ifStmt.Else.Pos()),
+ callArg(b, ifStmt.Else.Pos(), ifStmt.End()),
+ ),
+ }},
+ }},
+ })
+ }
+
+ } else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) && ifStmt.Else == nil {
+ fassign := prev.Node().(*ast.AssignStmt)
+
+ // Have: lhs0 = rhs0; if a < b { lhs = rhs }
+ //
+ // For pattern 2, check that
+ // - lhs = lhs0
+ // - {a,b} = {rhs,rhs0} or {rhs,lhs0}
+ // The replacement must use rhs0 not lhs0 though.
+ // For example, we accept this variant:
+ // lhs = x; if lhs < y { lhs = y } => lhs = min(x, y), not min(lhs, y)
+ //
+ // TODO(adonovan): accept "var lhs0 = rhs0" form too.
+ lhs0 := fassign.Lhs[0]
+ rhs0 := fassign.Rhs[0]
+
+ if astutil.EqualSyntax(lhs, lhs0) {
+ if astutil.EqualSyntax(rhs, a) && (astutil.EqualSyntax(rhs0, b) || astutil.EqualSyntax(lhs0, b)) {
+ sign = +sign
+ } else if (astutil.EqualSyntax(rhs0, a) || astutil.EqualSyntax(lhs0, a)) && astutil.EqualSyntax(rhs, b) {
+ sign = -sign
+ } else {
+ return
+ }
+ sym := cond(sign < 0, "min", "max")
+
+ if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) {
+ return // min/max function is shadowed
+ }
+
+ // Permit lhs0 to stand for rhs0 in the matching,
+ // but don't actually reduce to lhs0 = min(lhs0, rhs)
+ // since the "=" could be a ":=". Use min(rhs0, rhs).
+ if astutil.EqualSyntax(lhs0, a) {
+ a = rhs0
+ } else if astutil.EqualSyntax(lhs0, b) {
+ b = rhs0
+ }
+
+ // pattern 2
+ pass.Report(analysis.Diagnostic{
+ // Highlight the condition a < b.
+ Pos: compare.Pos(),
+ End: compare.End(),
+ Message: fmt.Sprintf("if statement can be modernized using %s", sym),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace if/else with %s", sym),
+ TextEdits: []analysis.TextEdit{{
+ Pos: fassign.Pos(),
+ End: ifStmt.End(),
+ // Replace "x := a; if ... {}" with "x = min(...)", preserving comments.
+ NewText: fmt.Appendf(nil, "%s %s %s(%s%s)",
+ astutil.Format(pass.Fset, lhs),
+ fassign.Tok.String(),
+ sym,
+ callArg(a, fassign.Pos(), ifStmt.Pos()),
+ callArg(b, ifStmt.Pos(), ifStmt.End()),
+ ),
+ }},
+ }},
+ })
+ }
+ }
+ }
+
+ // Find all "if a < b { lhs = rhs }" statements.
+ info := pass.TypesInfo
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ for curFile := range filesUsing(inspect, info, "go1.21") {
+ astFile := curFile.Node().(*ast.File)
+ for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
+ ifStmt := curIfStmt.Node().(*ast.IfStmt)
+
+ // Don't bother handling "if a < b { lhs = rhs }" when it appears
+ // as the "else" branch of another if-statement.
+ // if cond { ... } else if a < b { lhs = rhs }
+ // (This case would require introducing another block
+ // if cond { ... } else { if a < b { lhs = rhs } }
+ // and checking that there is no following "else".)
+ if astutil.IsChildOf(curIfStmt, edge.IfStmt_Else) {
+ continue
+ }
+
+ if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok &&
+ ifStmt.Init == nil &&
+ isInequality(compare.Op) != 0 &&
+ isAssignBlock(ifStmt.Body) {
+ // a blank var has no type.
+ if tLHS := info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0]); tLHS != nil && !maybeNaN(tLHS) {
+ // Have: if a < b { lhs = rhs }
+ check(astFile, curIfStmt, compare)
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// allComments collects all the comments from start to end.
+func allComments(file *ast.File, start, end token.Pos) string {
+ var buf strings.Builder
+ for co := range astutil.Comments(file, start, end) {
+ _, _ = fmt.Fprintf(&buf, "%s\n", co.Text)
+ }
+ return buf.String()
+}
+
+// isInequality reports non-zero if tok is one of < <= => >:
+// +1 for > and -1 for <.
+func isInequality(tok token.Token) int {
+ switch tok {
+ case token.LEQ, token.LSS:
+ return -1
+ case token.GEQ, token.GTR:
+ return +1
+ }
+ return 0
+}
+
+// isAssignBlock reports whether b is a block of the form { lhs = rhs }.
+func isAssignBlock(b *ast.BlockStmt) bool {
+ if len(b.List) != 1 {
+ return false
+ }
+ // Inv: the sole statement cannot be { lhs := rhs }.
+ return isSimpleAssign(b.List[0])
+}
+
+// isSimpleAssign reports whether n has the form "lhs = rhs" or "lhs := rhs".
+func isSimpleAssign(n ast.Node) bool {
+ assign, ok := n.(*ast.AssignStmt)
+ return ok &&
+ (assign.Tok == token.ASSIGN || assign.Tok == token.DEFINE) &&
+ len(assign.Lhs) == 1 &&
+ len(assign.Rhs) == 1
+}
+
+// maybeNaN reports whether t is (or may be) a floating-point type.
+func maybeNaN(t types.Type) bool {
+ // For now, we rely on core types.
+ // TODO(adonovan): In the post-core-types future,
+ // follow the approach of types.Checker.applyTypeFunc.
+ t = typeparams.CoreType(t)
+ if t == nil {
+ return true // fail safe
+ }
+ if basic, ok := t.(*types.Basic); ok && basic.Info()&types.IsFloat != 0 {
+ return true
+ }
+ return false
+}
+
+// checkUserDefinedMinMax looks for user-defined min/max functions that are
+// equivalent to the built-in functions and suggests removing them.
+func checkUserDefinedMinMax(pass *analysis.Pass) {
+ index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+
+ // Look up min and max functions by name in package scope
+ for _, funcName := range []string{"min", "max"} {
+ if fn, ok := pass.Pkg.Scope().Lookup(funcName).(*types.Func); ok {
+ // Use typeindex to get the FuncDecl directly
+ if def, ok := index.Def(fn); ok {
+ decl := def.Parent().Node().(*ast.FuncDecl)
+ // Check if this function matches the built-in min/max signature and behavior
+ if canUseBuiltinMinMax(fn, decl.Body) {
+ // Expand to include leading doc comment
+ pos := decl.Pos()
+ if docs := astutil.DocComment(decl); docs != nil {
+ pos = docs.Pos()
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: decl.Pos(),
+ End: decl.End(),
+ Message: fmt.Sprintf("user-defined %s function is equivalent to built-in %s and can be removed", funcName, funcName),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Remove user-defined %s function", funcName),
+ TextEdits: []analysis.TextEdit{{
+ Pos: pos,
+ End: decl.End(),
+ }},
+ }},
+ })
+ }
+ }
+ }
+ }
+}
+
+// canUseBuiltinMinMax reports whether it is safe to replace a call
+// to this min or max function by its built-in namesake.
+func canUseBuiltinMinMax(fn *types.Func, body *ast.BlockStmt) bool {
+ sig := fn.Type().(*types.Signature)
+
+ // Only consider the most common case: exactly 2 parameters
+ if sig.Params().Len() != 2 {
+ return false
+ }
+
+ // Check if any parameter might be floating-point
+ for param := range sig.Params().Variables() {
+ if maybeNaN(param.Type()) {
+ return false // Don't suggest removal for float types due to NaN handling
+ }
+ }
+
+ // Must have exactly one return value
+ if sig.Results().Len() != 1 {
+ return false
+ }
+
+ // Check that the function body implements the expected min/max logic
+ if body == nil {
+ return false
+ }
+
+ return hasMinMaxLogic(body, fn.Name())
+}
+
+// hasMinMaxLogic checks if the function body implements simple min/max logic.
+func hasMinMaxLogic(body *ast.BlockStmt, funcName string) bool {
+ // Pattern 1: Single if/else statement
+ if len(body.List) == 1 {
+ if ifStmt, ok := body.List[0].(*ast.IfStmt); ok {
+ // Get the "false" result from the else block
+ if elseBlock, ok := ifStmt.Else.(*ast.BlockStmt); ok && len(elseBlock.List) == 1 {
+ if elseRet, ok := elseBlock.List[0].(*ast.ReturnStmt); ok && len(elseRet.Results) == 1 {
+ return checkMinMaxPattern(ifStmt, elseRet.Results[0], funcName)
+ }
+ }
+ }
+ }
+
+ // Pattern 2: if statement followed by return
+ if len(body.List) == 2 {
+ if ifStmt, ok := body.List[0].(*ast.IfStmt); ok && ifStmt.Else == nil {
+ if retStmt, ok := body.List[1].(*ast.ReturnStmt); ok && len(retStmt.Results) == 1 {
+ return checkMinMaxPattern(ifStmt, retStmt.Results[0], funcName)
+ }
+ }
+ }
+
+ return false
+}
+
+// checkMinMaxPattern checks if an if statement implements min/max logic.
+// ifStmt: the if statement to check
+// falseResult: the expression returned when the condition is false
+// funcName: "min" or "max"
+func checkMinMaxPattern(ifStmt *ast.IfStmt, falseResult ast.Expr, funcName string) bool {
+ // Must have condition with comparison
+ cmp, ok := ifStmt.Cond.(*ast.BinaryExpr)
+ if !ok {
+ return false
+ }
+
+ // Check if then branch returns one of the compared values
+ if len(ifStmt.Body.List) != 1 {
+ return false
+ }
+
+ thenRet, ok := ifStmt.Body.List[0].(*ast.ReturnStmt)
+ if !ok || len(thenRet.Results) != 1 {
+ return false
+ }
+
+ // Use the same logic as the existing minmax analyzer
+ sign := isInequality(cmp.Op)
+ if sign == 0 {
+ return false // Not a comparison operator
+ }
+
+ t := thenRet.Results[0] // "true" result
+ f := falseResult // "false" result
+ x := cmp.X // left operand
+ y := cmp.Y // right operand
+
+ // Check operand order and adjust sign accordingly
+ if astutil.EqualSyntax(t, x) && astutil.EqualSyntax(f, y) {
+ sign = +sign
+ } else if astutil.EqualSyntax(t, y) && astutil.EqualSyntax(f, x) {
+ sign = -sign
+ } else {
+ return false
+ }
+
+ // Check if the sign matches the function name
+ return cond(sign < 0, "min", "max") == funcName
+}
+
+// -- utils --
+
+func is[T any](x any) bool {
+ _, ok := x.(T)
+ return ok
+}
+
+func cond[T any](cond bool, t, f T) T {
+ if cond {
+ return t
+ } else {
+ return f
+ }
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ _ "embed"
+ "go/ast"
+ "go/constant"
+ "go/format"
+ "go/token"
+ "go/types"
+ "iter"
+ "regexp"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/moreiters"
+ "golang.org/x/tools/internal/packagepath"
+ "golang.org/x/tools/internal/stdlib"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
+)
+
+//go:embed doc.go
+var doc string
+
+// Suite lists all modernize analyzers.
+var Suite = []*analysis.Analyzer{
+ AnyAnalyzer,
+ // AppendClippedAnalyzer, // not nil-preserving!
+ BLoopAnalyzer,
+ FmtAppendfAnalyzer,
+ ForVarAnalyzer,
+ MapsLoopAnalyzer,
+ MinMaxAnalyzer,
+ NewExprAnalyzer,
+ OmitZeroAnalyzer,
+ plusBuildAnalyzer,
+ RangeIntAnalyzer,
+ ReflectTypeForAnalyzer,
+ SlicesContainsAnalyzer,
+ // SlicesDeleteAnalyzer, // not nil-preserving!
+ SlicesSortAnalyzer,
+ stditeratorsAnalyzer,
+ StringsCutPrefixAnalyzer,
+ StringsSeqAnalyzer,
+ StringsBuilderAnalyzer,
+ TestingContextAnalyzer,
+ WaitGroupAnalyzer,
+}
+
+// -- helpers --
+
+// skipGenerated decorates pass.Report to suppress diagnostics in generated files.
+func skipGenerated(pass *analysis.Pass) {
+ report := pass.Report
+ pass.Report = func(diag analysis.Diagnostic) {
+ generated := pass.ResultOf[generated.Analyzer].(*generated.Result)
+ if generated.IsGenerated(diag.Pos) {
+ return // skip
+ }
+ report(diag)
+ }
+}
+
+// formatExprs formats a comma-separated list of expressions.
+func formatExprs(fset *token.FileSet, exprs []ast.Expr) string {
+ var buf strings.Builder
+ for i, e := range exprs {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ format.Node(&buf, fset, e) // ignore errors
+ }
+ return buf.String()
+}
+
+// isZeroIntLiteral reports whether e is an integer whose value is 0.
+func isZeroIntLiteral(info *types.Info, e ast.Expr) bool {
+ return isIntLiteral(info, e, 0)
+}
+
+// isIntLiteral reports whether e is an integer with given value.
+func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool {
+ return info.Types[e].Value == constant.MakeInt64(n)
+}
+
+// filesUsing returns a cursor for each *ast.File in the inspector
+// that uses at least the specified version of Go (e.g. "go1.24").
+//
+// TODO(adonovan): opt: eliminate this function, instead following the
+// approach of [fmtappendf], which uses typeindex and [fileUses].
+// See "Tip" at [fileUses] for motivation.
+func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[inspector.Cursor] {
+ return func(yield func(inspector.Cursor) bool) {
+ for curFile := range inspect.Root().Children() {
+ file := curFile.Node().(*ast.File)
+ if !versions.Before(info.FileVersions[file], version) && !yield(curFile) {
+ break
+ }
+ }
+ }
+}
+
+// fileUses reports whether the specified file uses at least the
+// specified version of Go (e.g. "go1.24").
+//
+// Tip: we recommend using this check "late", just before calling
+// pass.Report, rather than "early" (when entering each ast.File, or
+// each candidate node of interest, during the traversal), because the
+// operation is not free, yet is not a highly selective filter: the
+// fraction of files that pass most version checks is high and
+// increases over time.
+func fileUses(info *types.Info, file *ast.File, version string) bool {
+ return !versions.Before(info.FileVersions[file], version)
+}
+
+// within reports whether the current pass is analyzing one of the
+// specified standard packages or their dependencies.
+func within(pass *analysis.Pass, pkgs ...string) bool {
+ path := pass.Pkg.Path()
+ return packagepath.IsStdPackage(path) &&
+ moreiters.Contains(stdlib.Dependencies(pkgs...), path)
+}
+
+// unparenEnclosing removes enclosing parens from cur in
+// preparation for a call to [Cursor.ParentEdge].
+func unparenEnclosing(cur inspector.Cursor) inspector.Cursor {
+ for astutil.IsChildOf(cur, edge.ParenExpr_X) {
+ cur = cur.Parent()
+ }
+ return cur
+}
+
+var (
+ builtinAny = types.Universe.Lookup("any")
+ builtinAppend = types.Universe.Lookup("append")
+ builtinBool = types.Universe.Lookup("bool")
+ builtinInt = types.Universe.Lookup("int")
+ builtinFalse = types.Universe.Lookup("false")
+ builtinLen = types.Universe.Lookup("len")
+ builtinMake = types.Universe.Lookup("make")
+ builtinNew = types.Universe.Lookup("new")
+ builtinNil = types.Universe.Lookup("nil")
+ builtinString = types.Universe.Lookup("string")
+ builtinTrue = types.Universe.Lookup("true")
+ byteSliceType = types.NewSlice(types.Typ[types.Byte])
+ omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`)
+)
+
+// lookup returns the symbol denoted by name at the position of the cursor.
+func lookup(info *types.Info, cur inspector.Cursor, name string) types.Object {
+ scope := typesinternal.EnclosingScope(info, cur)
+ _, obj := scope.LookupParent(name, cur.Node().Pos())
+ return obj
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ _ "embed"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+
+ "fmt"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/astutil"
+)
+
+var NewExprAnalyzer = &analysis.Analyzer{
+ Name: "newexpr",
+ Doc: analysisinternal.MustExtractDoc(doc, "newexpr"),
+ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: run,
+ FactTypes: []analysis.Fact{&newLike{}},
+}
+
+func run(pass *analysis.Pass) (any, error) {
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ info = pass.TypesInfo
+ )
+
+ // Detect functions that are new-like, i.e. have the form:
+ //
+ // func f(x T) *T { return &x }
+ //
+ // meaning that it is equivalent to new(x), if x has type T.
+ for curFuncDecl := range inspect.Root().Preorder((*ast.FuncDecl)(nil)) {
+ decl := curFuncDecl.Node().(*ast.FuncDecl)
+ fn := info.Defs[decl.Name].(*types.Func)
+ if decl.Body != nil && len(decl.Body.List) == 1 {
+ if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
+ if unary, ok := ret.Results[0].(*ast.UnaryExpr); ok && unary.Op == token.AND {
+ if id, ok := unary.X.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.Var); ok {
+ sig := fn.Signature()
+ if sig.Results().Len() == 1 &&
+ is[*types.Pointer](sig.Results().At(0).Type()) && // => no iface conversion
+ sig.Params().Len() == 1 &&
+ sig.Params().At(0) == v {
+
+ // Export a fact for each one.
+ pass.ExportObjectFact(fn, &newLike{})
+
+ // Check file version.
+ file := astutil.EnclosingFile(curFuncDecl)
+ if !fileUses(info, file, "go1.26") {
+ continue // new(expr) not available in this file
+ }
+
+ var edits []analysis.TextEdit
+
+ // If 'new' is not shadowed, replace func body: &x -> new(x).
+ // This makes it safely and cleanly inlinable.
+ curRet, _ := curFuncDecl.FindNode(ret)
+ if lookup(info, curRet, "new") == builtinNew {
+ edits = []analysis.TextEdit{
+ // return &x
+ // ---- -
+ // return new(x)
+ {
+ Pos: unary.OpPos,
+ End: unary.OpPos + token.Pos(len("&")),
+ NewText: []byte("new("),
+ },
+ {
+ Pos: unary.X.End(),
+ End: unary.X.End(),
+ NewText: []byte(")"),
+ },
+ }
+ }
+
+ // Disabled until we resolve https://go.dev/issue/75726
+ // (Go version skew between caller and callee in inliner.)
+ // TODO(adonovan): fix and reenable.
+ //
+ // Also, restore these lines to our section of doc.go:
+ // //go:fix inline
+ // ...
+ // (The directive comment causes the inline analyzer to suggest
+ // that calls to such functions are inlined.)
+ if false {
+ // Add a //go:fix inline annotation, if not already present.
+ // TODO(adonovan): use ast.ParseDirective when go1.26 is assured.
+ if !strings.Contains(decl.Doc.Text(), "go:fix inline") {
+ edits = append(edits, analysis.TextEdit{
+ Pos: decl.Pos(),
+ End: decl.Pos(),
+ NewText: []byte("//go:fix inline\n"),
+ })
+ }
+ }
+
+ if len(edits) > 0 {
+ pass.Report(analysis.Diagnostic{
+ Pos: decl.Name.Pos(),
+ End: decl.Name.End(),
+ Message: fmt.Sprintf("%s can be an inlinable wrapper around new(expr)", decl.Name),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: "Make %s an inlinable wrapper around new(expr)",
+ TextEdits: edits,
+ },
+ },
+ })
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Report and transform calls, when safe.
+ // In effect, this is inlining the new-like function
+ // even before we have marked the callee with //go:fix inline.
+ for curCall := range inspect.Root().Preorder((*ast.CallExpr)(nil)) {
+ call := curCall.Node().(*ast.CallExpr)
+ var fact newLike
+ if fn, ok := typeutil.Callee(info, call).(*types.Func); ok &&
+ pass.ImportObjectFact(fn, &fact) {
+
+ // Check file version.
+ file := astutil.EnclosingFile(curCall)
+ if !fileUses(info, file, "go1.26") {
+ continue // new(expr) not available in this file
+ }
+
+ // Check new is not shadowed.
+ if lookup(info, curCall, "new") != builtinNew {
+ continue
+ }
+
+ // The return type *T must exactly match the argument type T.
+ // (We formulate it this way--not in terms of the parameter
+ // type--to support generics.)
+ var targ types.Type
+ {
+ arg := call.Args[0]
+ tvarg := info.Types[arg]
+
+ // Constants: we must work around the type checker
+ // bug that causes info.Types to wrongly report the
+ // "typed" type for an untyped constant.
+ // (See "historical reasons" in issue go.dev/issue/70638.)
+ //
+ // We don't have a reliable way to do this but we can attempt
+ // to re-typecheck the constant expression on its own, in
+ // the original lexical environment but not as a part of some
+ // larger expression that implies a conversion to some "typed" type.
+ // (For the genesis of this idea see (*state).arguments
+ // in ../../../../internal/refactor/inline/inline.go.)
+ if tvarg.Value != nil {
+ info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
+ if err := types.CheckExpr(token.NewFileSet(), pass.Pkg, token.NoPos, arg, info2); err != nil {
+ continue // unexpected error
+ }
+ tvarg = info2.Types[arg]
+ }
+
+ targ = types.Default(tvarg.Type)
+ }
+ if !types.Identical(types.NewPointer(targ), info.TypeOf(call)) {
+ continue
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: fmt.Sprintf("call of %s(x) can be simplified to new(x)", fn.Name()),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Simplify %s(x) to new(x)", fn.Name()),
+ TextEdits: []analysis.TextEdit{{
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ NewText: []byte("new"),
+ }},
+ }},
+ })
+ }
+ }
+
+ return nil, nil
+}
+
+// A newLike fact records that its associated function is "new-like".
+type newLike struct{}
+
+func (*newLike) AFact() {}
+func (*newLike) String() string { return "newlike" }
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/types"
+ "reflect"
+ "strconv"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+)
+
+var OmitZeroAnalyzer = &analysis.Analyzer{
+ Name: "omitzero",
+ Doc: analysisinternal.MustExtractDoc(doc, "omitzero"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: omitzero,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero",
+}
+
+func checkOmitEmptyField(pass *analysis.Pass, info *types.Info, curField *ast.Field) {
+ typ := info.TypeOf(curField.Type)
+ _, ok := typ.Underlying().(*types.Struct)
+ if !ok {
+ // Not a struct
+ return
+ }
+ tag := curField.Tag
+ if tag == nil {
+ // No tag to check
+ return
+ }
+ // The omitempty tag may be used by other packages besides json, but we should only modify its use with json
+ tagconv, _ := strconv.Unquote(tag.Value)
+ match := omitemptyRegex.FindStringSubmatchIndex(tagconv)
+ if match == nil {
+ // No omitempty in json tag
+ return
+ }
+ omitEmptyPos, omitEmptyEnd, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3])
+ if err != nil {
+ return
+ }
+ removePos, removeEnd := omitEmptyPos, omitEmptyEnd
+
+ jsonTag := reflect.StructTag(tagconv).Get("json")
+ if jsonTag == ",omitempty" {
+ // Remove the entire struct tag if json is the only package used
+ if match[1]-match[0] == len(tagconv) {
+ removePos = curField.Tag.Pos()
+ removeEnd = curField.Tag.End()
+ } else {
+ // Remove the json tag if omitempty is the only field
+ removePos, err = astutil.PosInStringLiteral(curField.Tag, match[0])
+ if err != nil {
+ return
+ }
+ removeEnd, err = astutil.PosInStringLiteral(curField.Tag, match[1])
+ if err != nil {
+ return
+ }
+ }
+ }
+ pass.Report(analysis.Diagnostic{
+ Pos: curField.Tag.Pos(),
+ End: curField.Tag.End(),
+ Message: "Omitempty has no effect on nested struct fields",
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: "Remove redundant omitempty tag",
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: removePos,
+ End: removeEnd,
+ },
+ },
+ },
+ {
+ Message: "Replace omitempty with omitzero (behavior change)",
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: omitEmptyPos,
+ End: omitEmptyEnd,
+ NewText: []byte(",omitzero"),
+ },
+ },
+ },
+ }})
+}
+
+// The omitzero pass searches for instances of "omitempty" in a json field tag on a
+// struct. Since "omitempty" does not have any effect when applied to a struct field,
+// it suggests either deleting "omitempty" or replacing it with "omitzero", which
+// correctly excludes structs from a json encoding.
+func omitzero(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ info := pass.TypesInfo
+ for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curStruct := range curFile.Preorder((*ast.StructType)(nil)) {
+ for _, curField := range curStruct.Node().(*ast.StructType).Fields.List {
+ checkOmitEmptyField(pass, info, curField)
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/parser"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/goplsexport"
+)
+
+var plusBuildAnalyzer = &analysis.Analyzer{
+ Name: "plusbuild",
+ Doc: analysisinternal.MustExtractDoc(doc, "plusbuild"),
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#plusbuild",
+ Run: plusbuild,
+}
+
+func init() {
+ // Export to gopls until this is a published modernizer.
+ goplsexport.PlusBuildModernizer = plusBuildAnalyzer
+}
+
+func plusbuild(pass *analysis.Pass) (any, error) {
+ check := func(f *ast.File) {
+ if !fileUses(pass.TypesInfo, f, "go1.18") {
+ return
+ }
+
+ // When gofmt sees a +build comment, it adds a
+ // preceding equivalent //go:build directive, so in
+ // formatted files we can assume that a +build line is
+ // part of a comment group that starts with a
+ // //go:build line and is followed by a blank line.
+ //
+ // While we cannot delete comments from an AST and
+ // expect consistent output in general, this specific
+ // case--deleting only some lines from a comment
+ // block--does format correctly.
+ for _, g := range f.Comments {
+ sawGoBuild := false
+ for _, c := range g.List {
+ if sawGoBuild && strings.HasPrefix(c.Text, "// +build ") {
+ pass.Report(analysis.Diagnostic{
+ Pos: c.Pos(),
+ End: c.End(),
+ Message: "+build line is no longer needed",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Remove obsolete +build line",
+ TextEdits: []analysis.TextEdit{{
+ Pos: c.Pos(),
+ End: c.End(),
+ }},
+ }},
+ })
+ break
+ }
+ if strings.HasPrefix(c.Text, "//go:build ") {
+ sawGoBuild = true
+ }
+ }
+ }
+ }
+
+ for _, f := range pass.Files {
+ check(f)
+ }
+ for _, name := range pass.IgnoredFiles {
+ if strings.HasSuffix(name, ".go") {
+ f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ continue // parse error: ignore
+ }
+ check(f)
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var RangeIntAnalyzer = &analysis.Analyzer{
+ Name: "rangeint",
+ Doc: analysisinternal.MustExtractDoc(doc, "rangeint"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: rangeint,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#rangeint",
+}
+
+// rangeint offers a fix to replace a 3-clause 'for' loop:
+//
+// for i := 0; i < limit; i++ {}
+//
+// by a range loop with an integer operand:
+//
+// for i := range limit {}
+//
+// Variants:
+// - The ':=' may be replaced by '='.
+// - The fix may remove "i :=" if it would become unused.
+//
+// Restrictions:
+// - The variable i must not be assigned or address-taken within the
+// loop, because a "for range int" loop does not respect assignments
+// to the loop index.
+// - The limit must not be b.N, to avoid redundancy with bloop's fixes.
+//
+// Caveats:
+//
+// The fix causes the limit expression to be evaluated exactly once,
+// instead of once per iteration. So, to avoid changing the
+// cardinality of side effects, the limit expression must not involve
+// function calls (e.g. seq.Len()) or channel receives. Moreover, the
+// value of the limit expression must be loop invariant, which in
+// practice means it must take one of the following forms:
+//
+// - a local variable that is assigned only once and not address-taken;
+// - a constant; or
+// - len(s), where s has the above properties.
+func rangeint(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ info := pass.TypesInfo
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+
+ for curFile := range filesUsing(inspect, info, "go1.22") {
+ nextLoop:
+ for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) {
+ loop := curLoop.Node().(*ast.ForStmt)
+ if init, ok := loop.Init.(*ast.AssignStmt); ok &&
+ isSimpleAssign(init) &&
+ is[*ast.Ident](init.Lhs[0]) &&
+ isZeroIntLiteral(info, init.Rhs[0]) {
+ // Have: for i = 0; ... (or i := 0)
+ index := init.Lhs[0].(*ast.Ident)
+
+ if compare, ok := loop.Cond.(*ast.BinaryExpr); ok &&
+ compare.Op == token.LSS &&
+ astutil.EqualSyntax(compare.X, init.Lhs[0]) {
+ // Have: for i = 0; i < limit; ... {}
+
+ limit := compare.Y
+
+ // If limit is "len(slice)", simplify it to "slice".
+ //
+ // (Don't replace "for i := 0; i < len(map); i++"
+ // with "for range m" because it's too hard to prove
+ // that len(m) is loop-invariant).
+ if call, ok := limit.(*ast.CallExpr); ok &&
+ typeutil.Callee(info, call) == builtinLen &&
+ is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) {
+ limit = call.Args[0]
+ }
+
+ // Check the form of limit: must be a constant,
+ // or a local var that is not assigned or address-taken.
+ limitOK := false
+ if info.Types[limit].Value != nil {
+ limitOK = true // constant
+ } else if id, ok := limit.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.Var); ok &&
+ !(v.Exported() && typesinternal.IsPackageLevel(v)) {
+ // limit is a local or unexported global var.
+ // (An exported global may have uses we can't see.)
+ for cur := range typeindex.Uses(v) {
+ if isScalarLvalue(info, cur) {
+ // Limit var is assigned or address-taken.
+ continue nextLoop
+ }
+ }
+ limitOK = true
+ }
+ }
+ if !limitOK {
+ continue nextLoop
+ }
+
+ if inc, ok := loop.Post.(*ast.IncDecStmt); ok &&
+ inc.Tok == token.INC &&
+ astutil.EqualSyntax(compare.X, inc.X) {
+ // Have: for i = 0; i < limit; i++ {}
+
+ // Find references to i within the loop body.
+ v := info.ObjectOf(index).(*types.Var)
+ // TODO(adonovan): use go1.25 v.Kind() == types.PackageVar
+ if typesinternal.IsPackageLevel(v) {
+ continue nextLoop
+ }
+ used := false
+ for curId := range curLoop.Child(loop.Body).Preorder((*ast.Ident)(nil)) {
+ id := curId.Node().(*ast.Ident)
+ if info.Uses[id] == v {
+ used = true
+
+ // Reject if any is an l-value (assigned or address-taken):
+ // a "for range int" loop does not respect assignments to
+ // the loop variable.
+ if isScalarLvalue(info, curId) {
+ continue nextLoop
+ }
+ }
+ }
+
+ // If i is no longer used, delete "i := ".
+ var edits []analysis.TextEdit
+ if !used && init.Tok == token.DEFINE {
+ edits = append(edits, analysis.TextEdit{
+ Pos: index.Pos(),
+ End: init.Rhs[0].Pos(),
+ })
+ }
+
+ // If i is used after the loop,
+ // don't offer a fix, as a range loop
+ // leaves i with a different final value (limit-1).
+ if init.Tok == token.ASSIGN {
+ for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) {
+ id := curId.Node().(*ast.Ident)
+ if info.Uses[id] == v {
+ // Is i used after loop?
+ if id.Pos() > loop.End() {
+ continue nextLoop
+ }
+ // Is i used within a defer statement
+ // that is within the scope of i?
+ // var i int
+ // defer func() { print(i)}
+ // for i = ... { ... }
+ for curDefer := range curId.Enclosing((*ast.DeferStmt)(nil)) {
+ if curDefer.Node().Pos() > v.Pos() {
+ continue nextLoop
+ }
+ }
+ }
+ }
+ }
+
+ // If limit is len(slice),
+ // simplify "range len(slice)" to "range slice".
+ if call, ok := limit.(*ast.CallExpr); ok &&
+ typeutil.Callee(info, call) == builtinLen &&
+ is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) {
+ limit = call.Args[0]
+ }
+
+ // If the limit is a untyped constant of non-integer type,
+ // such as "const limit = 1e3", its effective type may
+ // differ between the two forms.
+ // In a for loop, it must be comparable with int i,
+ // for i := 0; i < limit; i++
+ // but in a range loop it would become a float,
+ // for i := range limit {}
+ // which is a type error. We need to convert it to int
+ // in this case.
+ //
+ // Unfortunately go/types discards the untyped type
+ // (but see Untyped in golang/go#70638) so we must
+ // re-type check the expression to detect this case.
+ var beforeLimit, afterLimit string
+ if v := info.Types[limit].Value; v != nil {
+ tVar := info.TypeOf(init.Rhs[0])
+ file := curFile.Node().(*ast.File)
+ // TODO(mkalil): use a types.Qualifier that respects the existing
+ // imports of this file that are visible (not shadowed) at the current position.
+ qual := typesinternal.FileQualifier(file, pass.Pkg)
+ beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, qual)), ")"
+ info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
+ if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil {
+ tLimit := types.Default(info2.TypeOf(limit))
+ if types.AssignableTo(tLimit, tVar) {
+ beforeLimit, afterLimit = "", ""
+ }
+ }
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: init.Pos(),
+ End: inc.End(),
+ Message: "for loop can be modernized using range over int",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace for loop with range %s",
+ astutil.Format(pass.Fset, limit)),
+ TextEdits: append(edits, []analysis.TextEdit{
+ // for i := 0; i < limit; i++ {}
+ // ----- ---
+ // -------
+ // for i := range limit {}
+
+ // Delete init.
+ {
+ Pos: init.Rhs[0].Pos(),
+ End: limit.Pos(),
+ NewText: []byte("range "),
+ },
+ // Add "int(" before limit, if needed.
+ {
+ Pos: limit.Pos(),
+ End: limit.Pos(),
+ NewText: []byte(beforeLimit),
+ },
+ // Delete inc.
+ {
+ Pos: limit.End(),
+ End: inc.End(),
+ },
+ // Add ")" after limit, if needed.
+ {
+ Pos: limit.End(),
+ End: limit.End(),
+ NewText: []byte(afterLimit),
+ },
+ }...),
+ }},
+ })
+ }
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// isScalarLvalue reports whether the specified identifier is
+// address-taken or appears on the left side of an assignment.
+//
+// This function is valid only for scalars (x = ...),
+// not for aggregates (x.a[i] = ...)
+func isScalarLvalue(info *types.Info, curId inspector.Cursor) bool {
+ // Unfortunately we can't simply use info.Types[e].Assignable()
+ // as it is always true for a variable even when that variable is
+ // used only as an r-value. So we must inspect enclosing syntax.
+
+ cur := curId
+
+ // Strip enclosing parens.
+ ek, _ := cur.ParentEdge()
+ for ek == edge.ParenExpr_X {
+ cur = cur.Parent()
+ ek, _ = cur.ParentEdge()
+ }
+
+ switch ek {
+ case edge.AssignStmt_Lhs:
+ assign := cur.Parent().Node().(*ast.AssignStmt)
+ if assign.Tok != token.DEFINE {
+ return true // i = j or i += j
+ }
+ id := curId.Node().(*ast.Ident)
+ if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() {
+ return true // reassignment of i (i, j := 1, 2)
+ }
+ case edge.IncDecStmt_X:
+ return true // i++, i--
+ case edge.UnaryExpr_X:
+ if cur.Parent().Node().(*ast.UnaryExpr).Op == token.AND {
+ return true // &i
+ }
+ }
+ return false
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+// This file defines modernizers that use the "reflect" package.
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
+)
+
+var ReflectTypeForAnalyzer = &analysis.Analyzer{
+ Name: "reflecttypefor",
+ Doc: analysisinternal.MustExtractDoc(doc, "reflecttypefor"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: reflecttypefor,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#reflecttypefor",
+}
+
+func reflecttypefor(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+
+ reflectTypeOf = index.Object("reflect", "TypeOf")
+ )
+
+ for curCall := range index.Calls(reflectTypeOf) {
+ call := curCall.Node().(*ast.CallExpr)
+ // Have: reflect.TypeOf(expr)
+
+ expr := call.Args[0]
+ if !typesinternal.NoEffects(info, expr) {
+ continue // don't eliminate operand: may have effects
+ }
+
+ t := info.TypeOf(expr)
+ var edits []analysis.TextEdit
+
+ // Special case for TypeOf((*T)(nil)).Elem(),
+ // needed when T is an interface type.
+ if astutil.IsChildOf(curCall, edge.SelectorExpr_X) {
+ curSel := unparenEnclosing(curCall).Parent()
+ if astutil.IsChildOf(curSel, edge.CallExpr_Fun) {
+ call2 := unparenEnclosing(curSel).Parent().Node().(*ast.CallExpr)
+ obj := typeutil.Callee(info, call2)
+ if typesinternal.IsMethodNamed(obj, "reflect", "Type", "Elem") {
+ if ptr, ok := t.(*types.Pointer); ok {
+ // Have: TypeOf(expr).Elem() where expr : *T
+ t = ptr.Elem()
+ // reflect.TypeOf(expr).Elem()
+ // -------
+ // reflect.TypeOf(expr)
+ edits = []analysis.TextEdit{{
+ Pos: call.End(),
+ End: call2.End(),
+ }}
+ }
+ }
+ }
+ }
+
+ // TypeOf(x) where x has an interface type is a
+ // dynamic operation; don't transform it to TypeFor.
+ // (edits == nil means "not the Elem() special case".)
+ if types.IsInterface(t) && edits == nil {
+ continue
+ }
+
+ file := astutil.EnclosingFile(curCall)
+ if versions.Before(info.FileVersions[file], "go1.22") {
+ continue // TypeFor requires go1.22
+ }
+ tokFile := pass.Fset.File(file.Pos())
+
+ // Format the type as valid Go syntax.
+ // TODO(adonovan): FileQualifier needs to respect
+ // visibility at the current point, and either fail
+ // or edit the imports as needed.
+ qual := typesinternal.FileQualifier(file, pass.Pkg)
+ tstr := types.TypeString(t, qual)
+
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ continue // e.g. reflect was dot-imported
+ }
+
+ // If the call argument contains the last use
+ // of a variable, as in:
+ // var zero T
+ // reflect.TypeOf(zero)
+ // remove the declaration of that variable.
+ curArg0 := curCall.ChildAt(edge.CallExpr_Args, 0)
+ edits = append(edits, refactor.DeleteUnusedVars(index, info, tokFile, curArg0)...)
+
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ Message: "reflect.TypeOf call can be simplified using TypeFor",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ // reflect.TypeOf (...T value...)
+ // ------ -------------
+ // reflect.TypeFor[T]( )
+ Message: "Replace TypeOf by TypeFor",
+ TextEdits: append([]analysis.TextEdit{
+ {
+ Pos: sel.Sel.Pos(),
+ End: sel.Sel.End(),
+ NewText: []byte("TypeFor[" + tstr + "]"),
+ },
+ // delete (pure) argument
+ {
+ Pos: call.Lparen + 1,
+ End: call.Rparen,
+ },
+ }, edits...),
+ }},
+ })
+ }
+
+ return nil, nil
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "slices"
+ "strconv"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// Warning: this analyzer is not safe to enable by default.
+var AppendClippedAnalyzer = &analysis.Analyzer{
+ Name: "appendclipped",
+ Doc: analysisinternal.MustExtractDoc(doc, "appendclipped"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: appendclipped,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped",
+}
+
+// The appendclipped pass offers to simplify a tower of append calls:
+//
+// append(append(append(base, a...), b..., c...)
+//
+// with a call to go1.21's slices.Concat(base, a, b, c), or simpler
+// replacements such as slices.Clone(a) in degenerate cases.
+//
+// We offer bytes.Clone in preference to slices.Clone where
+// appropriate, if the package already imports "bytes";
+// their behaviors are identical.
+//
+// The base expression must denote a clipped slice (see [isClipped]
+// for definition), otherwise the replacement might eliminate intended
+// side effects to the base slice's array.
+//
+// Examples:
+//
+// append(append(append(x[:0:0], a...), b...), c...) -> slices.Concat(a, b, c)
+// append(append(slices.Clip(a), b...) -> slices.Concat(a, b)
+// append([]T{}, a...) -> slices.Clone(a)
+// append([]string(nil), os.Environ()...) -> os.Environ()
+//
+// The fix does not always preserve nilness the of base slice when the
+// addends (a, b, c) are all empty (see #73557).
+func appendclipped(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "slices", "bytes", "runtime") {
+ return nil, nil
+ }
+
+ info := pass.TypesInfo
+
+ // sliceArgs is a non-empty (reversed) list of slices to be concatenated.
+ simplifyAppendEllipsis := func(file *ast.File, call *ast.CallExpr, base ast.Expr, sliceArgs []ast.Expr) {
+ // Only appends whose base is a clipped slice can be simplified:
+ // We must conservatively assume an append to an unclipped slice
+ // such as append(y[:0], x...) is intended to have effects on y.
+ clipped, empty := clippedSlice(info, base)
+ if clipped == nil {
+ return
+ }
+
+ // If any slice arg has a different type from the base
+ // (and thus the result) don't offer a fix, to avoid
+ // changing the return type, e.g:
+ //
+ // type S []int
+ // - x := append([]int(nil), S{}...) // x : []int
+ // + x := slices.Clone(S{}) // x : S
+ //
+ // We could do better by inserting an explicit generic
+ // instantiation:
+ //
+ // x := slices.Clone[[]int](S{})
+ //
+ // but this is often unnecessary and unwanted, such as
+ // when the value is used an in assignment context that
+ // provides an explicit type:
+ //
+ // var x []int = slices.Clone(S{})
+ baseType := info.TypeOf(base)
+ for _, arg := range sliceArgs {
+ if !types.Identical(info.TypeOf(arg), baseType) {
+ return
+ }
+ }
+
+ // If the (clipped) base is empty, it may be safely ignored.
+ // Otherwise treat it (or its unclipped subexpression, if possible)
+ // as just another arg (the first) to Concat.
+ //
+ // TODO(adonovan): not so fast! If all the operands
+ // are empty, then the nilness of base matters, because
+ // append preserves nilness whereas Concat does not (#73557).
+ if !empty {
+ sliceArgs = append(sliceArgs, clipped)
+ }
+ slices.Reverse(sliceArgs)
+
+ // TODO(adonovan): simplify sliceArgs[0] further: slices.Clone(s) -> s
+
+ // Concat of a single (non-trivial) slice degenerates to Clone.
+ if len(sliceArgs) == 1 {
+ s := sliceArgs[0]
+
+ // Special case for common but redundant clone of os.Environ().
+ // append(zerocap, os.Environ()...) -> os.Environ()
+ if scall, ok := s.(*ast.CallExpr); ok {
+ obj := typeutil.Callee(info, scall)
+ if typesinternal.IsFunctionNamed(obj, "os", "Environ") {
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: "Redundant clone of os.Environ()",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Eliminate redundant clone",
+ TextEdits: []analysis.TextEdit{{
+ Pos: call.Pos(),
+ End: call.End(),
+ NewText: []byte(astutil.Format(pass.Fset, s)),
+ }},
+ }},
+ })
+ return
+ }
+ }
+
+ // If the slice type is []byte, and the file imports
+ // "bytes" but not "slices", prefer the (behaviorally
+ // identical) bytes.Clone for local consistency.
+ // https://go.dev/issue/70815#issuecomment-2671572984
+ fileImports := func(path string) bool {
+ return slices.ContainsFunc(file.Imports, func(spec *ast.ImportSpec) bool {
+ value, _ := strconv.Unquote(spec.Path.Value)
+ return value == path
+ })
+ }
+ clonepkg := cond(
+ types.Identical(info.TypeOf(call), byteSliceType) &&
+ !fileImports("slices") && fileImports("bytes"),
+ "bytes",
+ "slices")
+
+ // append(zerocap, s...) -> slices.Clone(s) or bytes.Clone(s)
+ //
+ // This is unsound if s is empty and its nilness
+ // differs from zerocap (#73557).
+ prefix, importEdits := refactor.AddImport(info, file, clonepkg, clonepkg, "Clone", call.Pos())
+ message := fmt.Sprintf("Replace append with %s.Clone", clonepkg)
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: message,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: message,
+ TextEdits: append(importEdits, []analysis.TextEdit{{
+ Pos: call.Pos(),
+ End: call.End(),
+ NewText: fmt.Appendf(nil, "%sClone(%s)", prefix, astutil.Format(pass.Fset, s)),
+ }}...),
+ }},
+ })
+ return
+ }
+
+ // append(append(append(base, a...), b..., c...) -> slices.Concat(base, a, b, c)
+ //
+ // This is unsound if all slices are empty and base is non-nil (#73557).
+ prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", "Concat", call.Pos())
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: "Replace append with slices.Concat",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace append with slices.Concat",
+ TextEdits: append(importEdits, []analysis.TextEdit{{
+ Pos: call.Pos(),
+ End: call.End(),
+ NewText: fmt.Appendf(nil, "%sConcat(%s)", prefix, formatExprs(pass.Fset, sliceArgs)),
+ }}...),
+ }},
+ })
+ }
+
+ // Mark nested calls to append so that we don't emit diagnostics for them.
+ skip := make(map[*ast.CallExpr]bool)
+
+ // Visit calls of form append(x, y...).
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ for curFile := range filesUsing(inspect, info, "go1.21") {
+ file := curFile.Node().(*ast.File)
+
+ for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
+ call := curCall.Node().(*ast.CallExpr)
+ if skip[call] {
+ continue
+ }
+
+ // Recursively unwrap ellipsis calls to append, so
+ // append(append(append(base, a...), b..., c...)
+ // yields (base, [c b a]).
+ base, slices := ast.Expr(call), []ast.Expr(nil) // base case: (call, nil)
+ again:
+ if call, ok := base.(*ast.CallExpr); ok {
+ if id, ok := call.Fun.(*ast.Ident); ok &&
+ call.Ellipsis.IsValid() &&
+ len(call.Args) == 2 &&
+ info.Uses[id] == builtinAppend {
+
+ // Have: append(base, s...)
+ base, slices = call.Args[0], append(slices, call.Args[1])
+ skip[call] = true
+ goto again
+ }
+ }
+
+ if len(slices) > 0 {
+ simplifyAppendEllipsis(file, call, base, slices)
+ }
+ }
+ }
+ return nil, nil
+}
+
+// clippedSlice returns res != nil if e denotes a slice that is
+// definitely clipped, that is, its len(s)==cap(s).
+//
+// The value of res is either the same as e or is a subexpression of e
+// that denotes the same slice but without the clipping operation.
+//
+// In addition, it reports whether the slice is definitely empty.
+//
+// Examples of clipped slices:
+//
+// x[:0:0] (empty)
+// []T(nil) (empty)
+// Slice{} (empty)
+// x[:len(x):len(x)] (nonempty) res=x
+// x[:k:k] (nonempty)
+// slices.Clip(x) (nonempty) res=x
+//
+// TODO(adonovan): Add a check that the expression x has no side effects in
+// case x[:len(x):len(x)] -> x. Now the program behavior may change.
+func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) {
+ switch e := e.(type) {
+ case *ast.SliceExpr:
+ // x[:0:0], x[:len(x):len(x)], x[:k:k]
+ if e.Slice3 && e.High != nil && e.Max != nil && astutil.EqualSyntax(e.High, e.Max) { // x[:k:k]
+ res = e
+ empty = isZeroIntLiteral(info, e.High) // x[:0:0]
+ if call, ok := e.High.(*ast.CallExpr); ok &&
+ typeutil.Callee(info, call) == builtinLen &&
+ astutil.EqualSyntax(call.Args[0], e.X) {
+ res = e.X // x[:len(x):len(x)] -> x
+ }
+ return
+ }
+ return
+
+ case *ast.CallExpr:
+ // []T(nil)?
+ if info.Types[e.Fun].IsType() &&
+ is[*ast.Ident](e.Args[0]) &&
+ info.Uses[e.Args[0].(*ast.Ident)] == builtinNil {
+ return e, true
+ }
+
+ // slices.Clip(x)?
+ obj := typeutil.Callee(info, e)
+ if typesinternal.IsFunctionNamed(obj, "slices", "Clip") {
+ return e.Args[0], false // slices.Clip(x) -> x
+ }
+
+ case *ast.CompositeLit:
+ // Slice{}?
+ if len(e.Elts) == 0 {
+ return e, true
+ }
+ }
+ return nil, false
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var SlicesContainsAnalyzer = &analysis.Analyzer{
+ Name: "slicescontains",
+ Doc: analysisinternal.MustExtractDoc(doc, "slicescontains"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: slicescontains,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicescontains",
+}
+
+// The slicescontains pass identifies loops that can be replaced by a
+// call to slices.Contains{,Func}. For example:
+//
+// for i, elem := range s {
+// if elem == needle {
+// ...
+// break
+// }
+// }
+//
+// =>
+//
+// if slices.Contains(s, needle) { ... }
+//
+// Variants:
+// - if the if-condition is f(elem), the replacement
+// uses slices.ContainsFunc(s, f).
+// - if the if-body is "return true" and the fallthrough
+// statement is "return false" (or vice versa), the
+// loop becomes "return [!]slices.Contains(...)".
+// - if the if-body is "found = true" and the previous
+// statement is "found = false" (or vice versa), the
+// loop becomes "found = [!]slices.Contains(...)".
+//
+// It may change cardinality of effects of the "needle" expression.
+// (Mostly this appears to be a desirable optimization, avoiding
+// redundantly repeated evaluation.)
+//
+// TODO(adonovan): Add a check that needle/predicate expression from
+// if-statement has no effects. Now the program behavior may change.
+func slicescontains(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "slices", "runtime") {
+ return nil, nil
+ }
+
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+ )
+
+ // check is called for each RangeStmt of this form:
+ // for i, elem := range s { if cond { ... } }
+ check := func(file *ast.File, curRange inspector.Cursor) {
+ rng := curRange.Node().(*ast.RangeStmt)
+ ifStmt := rng.Body.List[0].(*ast.IfStmt)
+
+ // isSliceElem reports whether e denotes the
+ // current slice element (elem or s[i]).
+ isSliceElem := func(e ast.Expr) bool {
+ if rng.Value != nil && astutil.EqualSyntax(e, rng.Value) {
+ return true // "elem"
+ }
+ if x, ok := e.(*ast.IndexExpr); ok &&
+ astutil.EqualSyntax(x.X, rng.X) &&
+ astutil.EqualSyntax(x.Index, rng.Key) {
+ return true // "s[i]"
+ }
+ return false
+ }
+
+ // Examine the condition for one of these forms:
+ //
+ // - if elem or s[i] == needle { ... } => Contains
+ // - if predicate(s[i] or elem) { ... } => ContainsFunc
+ var (
+ funcName string // "Contains" or "ContainsFunc"
+ arg2 ast.Expr // second argument to func (needle or predicate)
+ )
+ switch cond := ifStmt.Cond.(type) {
+ case *ast.BinaryExpr:
+ if cond.Op == token.EQL {
+ var elem ast.Expr
+ if isSliceElem(cond.X) {
+ funcName = "Contains"
+ elem = cond.X
+ arg2 = cond.Y // "if elem == needle"
+ } else if isSliceElem(cond.Y) {
+ funcName = "Contains"
+ elem = cond.Y
+ arg2 = cond.X // "if needle == elem"
+ }
+
+ // Reject if elem and needle have different types.
+ if elem != nil {
+ tElem := info.TypeOf(elem)
+ tNeedle := info.TypeOf(arg2)
+ if !types.Identical(tElem, tNeedle) {
+ // Avoid ill-typed slices.Contains([]error, any).
+ if !types.AssignableTo(tNeedle, tElem) {
+ return
+ }
+ // TODO(adonovan): relax this check to allow
+ // slices.Contains([]error, error(any)),
+ // inserting an explicit widening conversion
+ // around the needle.
+ return
+ }
+ }
+ }
+
+ case *ast.CallExpr:
+ if len(cond.Args) == 1 &&
+ isSliceElem(cond.Args[0]) &&
+ typeutil.Callee(info, cond) != nil { // not a conversion
+
+ // Attempt to get signature
+ sig, isSignature := info.TypeOf(cond.Fun).(*types.Signature)
+ if isSignature {
+ // skip variadic functions
+ if sig.Variadic() {
+ return
+ }
+
+ // Slice element type must match function parameter type.
+ var (
+ tElem = typeparams.CoreType(info.TypeOf(rng.X)).(*types.Slice).Elem()
+ tParam = sig.Params().At(0).Type()
+ )
+ if !types.Identical(tElem, tParam) {
+ return
+ }
+ }
+
+ funcName = "ContainsFunc"
+ arg2 = cond.Fun // "if predicate(elem)"
+ }
+ }
+ if funcName == "" {
+ return // not a candidate for Contains{,Func}
+ }
+
+ // body is the "true" body.
+ body := ifStmt.Body
+ if len(body.List) == 0 {
+ // (We could perhaps delete the loop entirely.)
+ return
+ }
+
+ // Reject if the body, needle or predicate references either range variable.
+ usesRangeVar := func(n ast.Node) bool {
+ cur, ok := curRange.FindNode(n)
+ if !ok {
+ panic(fmt.Sprintf("FindNode(%T) failed", n))
+ }
+ return uses(index, cur, info.Defs[rng.Key.(*ast.Ident)]) ||
+ rng.Value != nil && uses(index, cur, info.Defs[rng.Value.(*ast.Ident)])
+ }
+ if usesRangeVar(body) {
+ // Body uses range var "i" or "elem".
+ //
+ // (The check for "i" could be relaxed when we
+ // generalize this to support slices.Index;
+ // and the check for "elem" could be relaxed
+ // if "elem" can safely be replaced in the
+ // body by "needle".)
+ return
+ }
+ if usesRangeVar(arg2) {
+ return
+ }
+
+ // Prepare slices.Contains{,Func} call.
+ prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", funcName, rng.Pos())
+ contains := fmt.Sprintf("%s%s(%s, %s)",
+ prefix,
+ funcName,
+ astutil.Format(pass.Fset, rng.X),
+ astutil.Format(pass.Fset, arg2))
+
+ report := func(edits []analysis.TextEdit) {
+ pass.Report(analysis.Diagnostic{
+ Pos: rng.Pos(),
+ End: rng.End(),
+ Message: fmt.Sprintf("Loop can be simplified using slices.%s", funcName),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace loop by call to slices." + funcName,
+ TextEdits: append(edits, importEdits...),
+ }},
+ })
+ }
+
+ // Last statement of body must return/break out of the loop.
+ //
+ // TODO(adonovan): opt:consider avoiding FindNode with new API of form:
+ // curRange.Get(edge.RangeStmt_Body, -1).
+ // Get(edge.BodyStmt_List, 0).
+ // Get(edge.IfStmt_Body)
+ curBody, _ := curRange.FindNode(body)
+ curLastStmt, _ := curBody.LastChild()
+
+ // Reject if any statement in the body except the
+ // last has a free continuation (continue or break)
+ // that might affected by melting down the loop.
+ //
+ // TODO(adonovan): relax check by analyzing branch target.
+ for curBodyStmt := range curBody.Children() {
+ if curBodyStmt != curLastStmt {
+ for range curBodyStmt.Preorder((*ast.BranchStmt)(nil), (*ast.ReturnStmt)(nil)) {
+ return
+ }
+ }
+ }
+
+ switch lastStmt := curLastStmt.Node().(type) {
+ case *ast.ReturnStmt:
+ // Have: for ... range seq { if ... { stmts; return x } }
+
+ // Special case:
+ // body={ return true } next="return false" (or negation)
+ // => return [!]slices.Contains(...)
+ if curNext, ok := curRange.NextSibling(); ok {
+ nextStmt := curNext.Node().(ast.Stmt)
+ tval := isReturnTrueOrFalse(info, lastStmt)
+ fval := isReturnTrueOrFalse(info, nextStmt)
+ if len(body.List) == 1 && tval*fval < 0 {
+ // for ... { if ... { return true/false } }
+ // => return [!]slices.Contains(...)
+ report([]analysis.TextEdit{
+ // Delete the range statement and following space.
+ {
+ Pos: rng.Pos(),
+ End: nextStmt.Pos(),
+ },
+ // Change return to [!]slices.Contains(...).
+ {
+ Pos: nextStmt.Pos(),
+ End: nextStmt.End(),
+ NewText: fmt.Appendf(nil, "return %s%s",
+ cond(tval > 0, "", "!"),
+ contains),
+ },
+ })
+ return
+ }
+ }
+
+ // General case:
+ // => if slices.Contains(...) { stmts; return x }
+ report([]analysis.TextEdit{
+ // Replace "for ... { if ... " with "if slices.Contains(...)".
+ {
+ Pos: rng.Pos(),
+ End: ifStmt.Body.Pos(),
+ NewText: fmt.Appendf(nil, "if %s ", contains),
+ },
+ // Delete '}' of range statement and preceding space.
+ {
+ Pos: ifStmt.Body.End(),
+ End: rng.End(),
+ },
+ })
+ return
+
+ case *ast.BranchStmt:
+ if lastStmt.Tok == token.BREAK && lastStmt.Label == nil { // unlabeled break
+ // Have: for ... { if ... { stmts; break } }
+
+ var prevStmt ast.Stmt // previous statement to range (if any)
+ if curPrev, ok := curRange.PrevSibling(); ok {
+ // If the RangeStmt's previous sibling is a Stmt,
+ // the RangeStmt must be among the Body list of
+ // a BlockStmt, CauseClause, or CommClause.
+ // In all cases, the prevStmt is the immediate
+ // predecessor of the RangeStmt during execution.
+ //
+ // (This is not true for Stmts in general;
+ // see [Cursor.Children] and #71074.)
+ prevStmt, _ = curPrev.Node().(ast.Stmt)
+ }
+
+ // Special case:
+ // prev="lhs = false" body={ lhs = true; break }
+ // => lhs = slices.Contains(...) (or negation)
+ if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
+ len(body.List) == 2 &&
+ assign.Tok == token.ASSIGN &&
+ len(assign.Lhs) == 1 &&
+ len(assign.Rhs) == 1 {
+
+ // Have: body={ lhs = rhs; break }
+
+ if prevAssign, ok := prevStmt.(*ast.AssignStmt); ok &&
+ len(prevAssign.Lhs) == 1 &&
+ len(prevAssign.Rhs) == 1 &&
+ astutil.EqualSyntax(prevAssign.Lhs[0], assign.Lhs[0]) &&
+ is[*ast.Ident](assign.Rhs[0]) &&
+ info.Uses[assign.Rhs[0].(*ast.Ident)] == builtinTrue {
+
+ // Have:
+ // lhs = false
+ // for ... { if ... { lhs = true; break } }
+ // =>
+ // lhs = slices.Contains(...)
+ //
+ // TODO(adonovan):
+ // - support "var lhs bool = false" and variants.
+ // - support negation.
+ // Both these variants seem quite significant.
+ // - allow the break to be omitted.
+ report([]analysis.TextEdit{
+ // Replace "rhs" of previous assignment by slices.Contains(...)
+ {
+ Pos: prevAssign.Rhs[0].Pos(),
+ End: prevAssign.Rhs[0].End(),
+ NewText: []byte(contains),
+ },
+ // Delete the loop and preceding space.
+ {
+ Pos: prevAssign.Rhs[0].End(),
+ End: rng.End(),
+ },
+ })
+ return
+ }
+ }
+
+ // General case:
+ // for ... { if ... { stmts; break } }
+ // => if slices.Contains(...) { stmts }
+ report([]analysis.TextEdit{
+ // Replace "for ... { if ... " with "if slices.Contains(...)".
+ {
+ Pos: rng.Pos(),
+ End: ifStmt.Body.Pos(),
+ NewText: fmt.Appendf(nil, "if %s ", contains),
+ },
+ // Delete break statement and preceding space.
+ {
+ Pos: func() token.Pos {
+ if len(body.List) > 1 {
+ beforeBreak, _ := curLastStmt.PrevSibling()
+ return beforeBreak.Node().End()
+ }
+ return lastStmt.Pos()
+ }(),
+ End: lastStmt.End(),
+ },
+ // Delete '}' of range statement and preceding space.
+ {
+ Pos: ifStmt.Body.End(),
+ End: rng.End(),
+ },
+ })
+ return
+ }
+ }
+ }
+
+ for curFile := range filesUsing(inspect, info, "go1.21") {
+ file := curFile.Node().(*ast.File)
+
+ for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
+ rng := curRange.Node().(*ast.RangeStmt)
+
+ if is[*ast.Ident](rng.Key) &&
+ rng.Tok == token.DEFINE &&
+ len(rng.Body.List) == 1 &&
+ is[*types.Slice](typeparams.CoreType(info.TypeOf(rng.X))) {
+
+ // Have:
+ // - for _, elem := range s { S }
+ // - for i := range s { S }
+
+ if ifStmt, ok := rng.Body.List[0].(*ast.IfStmt); ok &&
+ ifStmt.Init == nil && ifStmt.Else == nil {
+
+ // Have: for i, elem := range s { if cond { ... } }
+ check(file, curRange)
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// -- helpers --
+
+// isReturnTrueOrFalse returns nonzero if stmt returns true (+1) or false (-1).
+func isReturnTrueOrFalse(info *types.Info, stmt ast.Stmt) int {
+ if ret, ok := stmt.(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
+ if id, ok := ret.Results[0].(*ast.Ident); ok {
+ switch info.Uses[id] {
+ case builtinTrue:
+ return +1
+ case builtinFalse:
+ return -1
+ }
+ }
+ }
+ return 0
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// Warning: this analyzer is not safe to enable by default (not nil-preserving).
+var SlicesDeleteAnalyzer = &analysis.Analyzer{
+ Name: "slicesdelete",
+ Doc: analysisinternal.MustExtractDoc(doc, "slicesdelete"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ },
+ Run: slicesdelete,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete",
+}
+
+// The slicesdelete pass attempts to replace instances of append(s[:i], s[i+k:]...)
+// with slices.Delete(s, i, i+k) where k is some positive constant.
+// Other variations that will also have suggested replacements include:
+// append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1.
+func slicesdelete(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "slices", "runtime") {
+ return nil, nil
+ }
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ info := pass.TypesInfo
+ report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) {
+ insert := func(pos token.Pos, text string) analysis.TextEdit {
+ return analysis.TextEdit{Pos: pos, End: pos, NewText: []byte(text)}
+ }
+ isIntExpr := func(e ast.Expr) bool {
+ return types.Identical(types.Default(info.TypeOf(e)), builtinInt.Type())
+ }
+ isIntShadowed := func() bool {
+ scope := pass.TypesInfo.Scopes[file].Innermost(call.Lparen)
+ if _, obj := scope.LookupParent("int", call.Lparen); obj != builtinInt {
+ return true // int type is shadowed
+ }
+ return false
+ }
+
+ prefix, edits := refactor.AddImport(info, file, "slices", "slices", "Delete", call.Pos())
+ // append's indices may be any integer type; slices.Delete requires int.
+ // Insert int conversions as needed (and if possible).
+ if isIntShadowed() && (!isIntExpr(slice1.High) || !isIntExpr(slice2.Low)) {
+ return
+ }
+ if !isIntExpr(slice1.High) {
+ edits = append(edits,
+ insert(slice1.High.Pos(), "int("),
+ insert(slice1.High.End(), ")"),
+ )
+ }
+ if !isIntExpr(slice2.Low) {
+ edits = append(edits,
+ insert(slice2.Low.Pos(), "int("),
+ insert(slice2.Low.End(), ")"),
+ )
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: "Replace append with slices.Delete",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace append with slices.Delete",
+ TextEdits: append(edits, []analysis.TextEdit{
+ // Change name of called function.
+ {
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ NewText: []byte(prefix + "Delete"),
+ },
+ // Delete ellipsis.
+ {
+ Pos: call.Ellipsis,
+ End: call.Ellipsis + token.Pos(len("...")), // delete ellipsis
+ },
+ // Remove second slice variable name.
+ {
+ Pos: slice2.X.Pos(),
+ End: slice2.X.End(),
+ },
+ // Insert after first slice variable name.
+ {
+ Pos: slice1.X.End(),
+ NewText: []byte(", "),
+ },
+ // Remove brackets and colons.
+ {
+ Pos: slice1.Lbrack,
+ End: slice1.High.Pos(),
+ },
+ {
+ Pos: slice1.Rbrack,
+ End: slice1.Rbrack + 1,
+ },
+ {
+ Pos: slice2.Lbrack,
+ End: slice2.Lbrack + 1,
+ },
+ {
+ Pos: slice2.Low.End(),
+ End: slice2.Rbrack + 1,
+ },
+ }...),
+ }},
+ })
+ }
+ for curFile := range filesUsing(inspect, info, "go1.21") {
+ file := curFile.Node().(*ast.File)
+ for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
+ call := curCall.Node().(*ast.CallExpr)
+ if id, ok := call.Fun.(*ast.Ident); ok && len(call.Args) == 2 {
+ // Verify we have append with two slices and ... operator,
+ // the first slice has no low index and second slice has no
+ // high index, and not a three-index slice.
+ if call.Ellipsis.IsValid() && info.Uses[id] == builtinAppend {
+ slice1, ok1 := call.Args[0].(*ast.SliceExpr)
+ slice2, ok2 := call.Args[1].(*ast.SliceExpr)
+ if ok1 && slice1.Low == nil && !slice1.Slice3 &&
+ ok2 && slice2.High == nil && !slice2.Slice3 &&
+ astutil.EqualSyntax(slice1.X, slice2.X) &&
+ typesinternal.NoEffects(info, slice1.X) &&
+ increasingSliceIndices(info, slice1.High, slice2.Low) {
+ // Have append(s[:a], s[b:]...) where we can verify a < b.
+ report(file, call, slice1, slice2)
+ }
+ }
+ }
+ }
+ }
+ return nil, nil
+}
+
+// Given two slice indices a and b, returns true if we can verify that a < b.
+// It recognizes certain forms such as i+k1 < i+k2 where k1 < k2.
+func increasingSliceIndices(info *types.Info, a, b ast.Expr) bool {
+ // Given an expression of the form i±k, returns (i, k)
+ // where k is a signed constant. Otherwise it returns (e, 0).
+ split := func(e ast.Expr) (ast.Expr, constant.Value) {
+ if binary, ok := e.(*ast.BinaryExpr); ok && (binary.Op == token.SUB || binary.Op == token.ADD) {
+ // Negate constants if operation is subtract instead of add
+ if k := info.Types[binary.Y].Value; k != nil {
+ return binary.X, constant.UnaryOp(binary.Op, k, 0) // i ± k
+ }
+ }
+ return e, constant.MakeInt64(0)
+ }
+
+ // Handle case where either a or b is a constant
+ ak := info.Types[a].Value
+ bk := info.Types[b].Value
+ if ak != nil || bk != nil {
+ return ak != nil && bk != nil && constant.Compare(ak, token.LSS, bk)
+ }
+
+ ai, ak := split(a)
+ bi, bk := split(b)
+ return astutil.EqualSyntax(ai, bi) && constant.Compare(ak, token.LSS, bk)
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+// (Not to be confused with go/analysis/passes/sortslice.)
+var SlicesSortAnalyzer = &analysis.Analyzer{
+ Name: "slicessort",
+ Doc: analysisinternal.MustExtractDoc(doc, "slicessort"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: slicessort,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicessort",
+}
+
+// The slicessort pass replaces sort.Slice(slice, less) with
+// slices.Sort(slice) when slice is a []T and less is a FuncLit
+// equivalent to cmp.Ordered[T].
+//
+// sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
+// => slices.Sort(s)
+//
+// There is no slices.SortStable.
+//
+// TODO(adonovan): support
+//
+// - sort.Slice(s, func(i, j int) bool { return s[i] ... s[j] })
+// -> slices.SortFunc(s, func(x, y T) int { return x ... y })
+// iff all uses of i, j can be replaced by s[i], s[j] and "<" can be replaced with cmp.Compare.
+//
+// - As above for sort.SliceStable -> slices.SortStableFunc.
+//
+// - sort.Sort(x) where x has a named slice type whose Less method is the natural order.
+// -> sort.Slice(x)
+func slicessort(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "slices", "sort", "runtime") {
+ return nil, nil
+ }
+
+ var (
+ info = pass.TypesInfo
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ sortSlice = index.Object("sort", "Slice")
+ )
+ for curCall := range index.Calls(sortSlice) {
+ call := curCall.Node().(*ast.CallExpr)
+ if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 {
+ sig := info.Types[lit.Type].Type.(*types.Signature)
+
+ // Have: sort.Slice(s, func(i, j int) bool { return ... })
+ s := call.Args[0]
+ i := sig.Params().At(0)
+ j := sig.Params().At(1)
+
+ if ret, ok := lit.Body.List[0].(*ast.ReturnStmt); ok {
+ if compare, ok := ret.Results[0].(*ast.BinaryExpr); ok && compare.Op == token.LSS {
+ // isIndex reports whether e is s[v].
+ isIndex := func(e ast.Expr, v *types.Var) bool {
+ index, ok := e.(*ast.IndexExpr)
+ return ok &&
+ astutil.EqualSyntax(index.X, s) &&
+ is[*ast.Ident](index.Index) &&
+ info.Uses[index.Index.(*ast.Ident)] == v
+ }
+ file := astutil.EnclosingFile(curCall)
+ if isIndex(compare.X, i) && isIndex(compare.Y, j) &&
+ fileUses(info, file, "go1.21") {
+ // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
+
+ prefix, importEdits := refactor.AddImport(
+ info, file, "slices", "slices", "Sort", call.Pos())
+
+ pass.Report(analysis.Diagnostic{
+ // Highlight "sort.Slice".
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ Message: "sort.Slice can be modernized using slices.Sort",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace sort.Slice call by slices.Sort",
+ TextEdits: append(importEdits, []analysis.TextEdit{
+ {
+ // Replace sort.Slice with slices.Sort.
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ NewText: []byte(prefix + "Sort"),
+ },
+ {
+ // Eliminate FuncLit.
+ Pos: call.Args[0].End(),
+ End: call.Rparen,
+ },
+ }...),
+ }},
+ })
+ }
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/goplsexport"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/stdlib"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var stditeratorsAnalyzer = &analysis.Analyzer{
+ Name: "stditerators",
+ Doc: analysisinternal.MustExtractDoc(doc, "stditerators"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: stditerators,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stditerators",
+}
+
+func init() {
+ // Export to gopls until this is a published modernizer.
+ goplsexport.StdIteratorsModernizer = stditeratorsAnalyzer
+}
+
+// stditeratorsTable records std types that have legacy T.{Len,At}
+// iteration methods as well as a newer T.All method that returns an
+// iter.Seq.
+var stditeratorsTable = [...]struct {
+ pkgpath, typename, lenmethod, atmethod, itermethod, elemname string
+}{
+ // Example: in go/types, (*Tuple).Variables returns an
+ // iterator that replaces a loop over (*Tuple).{Len,At}.
+ // The loop variable is named "v".
+ {"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp"},
+ {"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method"},
+ {"go/types", "Interface", "NumMethods", "Method", "Methods", "method"},
+ {"go/types", "MethodSet", "Len", "At", "Methods", "method"},
+ {"go/types", "Named", "NumMethods", "Method", "Methods", "method"},
+ {"go/types", "Scope", "NumChildren", "Child", "Children", "child"},
+ {"go/types", "Struct", "NumFields", "Field", "Fields", "field"},
+ {"go/types", "Tuple", "Len", "At", "Variables", "v"},
+ {"go/types", "TypeList", "Len", "At", "Types", "t"},
+ {"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam"},
+ {"go/types", "Union", "Len", "Term", "Terms", "term"},
+ // TODO(adonovan): support Seq2. Bonus: transform uses of both key and value.
+ // {"reflect", "Value", "NumFields", "Field", "Fields", "field"},
+}
+
+// stditerators suggests fixes to replace loops using Len/At-style
+// iterator APIs by a range loop over an iterator. The set of
+// participating types and methods is defined by [iteratorsTable].
+//
+// Pattern:
+//
+// for i := 0; i < x.Len(); i++ {
+// use(x.At(i))
+// }
+//
+// =>
+//
+// for elem := range x.All() {
+// use(elem)
+// }
+//
+// Variant:
+//
+// for i := range x.Len() { ... }
+//
+// Note: Iterators have a dynamic cost. How do we know that
+// the user hasn't intentionally chosen not to use an
+// iterator for that reason? We don't want to go fix to
+// undo optimizations. Do we need a suppression mechanism?
+func stditerators(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+ )
+
+ for _, row := range stditeratorsTable {
+ // Don't offer fixes within the package
+ // that defines the iterator in question.
+ if within(pass, row.pkgpath) {
+ continue
+ }
+
+ var (
+ lenMethod = index.Selection(row.pkgpath, row.typename, row.lenmethod)
+ atMethod = index.Selection(row.pkgpath, row.typename, row.atmethod)
+ )
+
+ // chooseName returns an appropriate fresh name
+ // for the index variable of the iterator loop
+ // whose body is specified.
+ //
+ // If the loop body starts with
+ //
+ // for ... { e := x.At(i); use(e) }
+ //
+ // then chooseName prefers the name e and additionally
+ // returns the var's symbol. We'll transform this to:
+ //
+ // for e := range x.Len() { e := e; use(e) }
+ //
+ // which leaves a redundant assignment that a
+ // subsequent 'forvar' pass will eliminate.
+ chooseName := func(curBody inspector.Cursor, x ast.Expr, i *types.Var) (string, *types.Var) {
+ // Is body { elem := x.At(i); ... } ?
+ body := curBody.Node().(*ast.BlockStmt)
+ if len(body.List) > 0 {
+ if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
+ assign.Tok == token.DEFINE &&
+ len(assign.Lhs) == 1 &&
+ len(assign.Rhs) == 1 &&
+ is[*ast.Ident](assign.Lhs[0]) {
+ // call to x.At(i)?
+ if call, ok := assign.Rhs[0].(*ast.CallExpr); ok &&
+ typeutil.Callee(info, call) == atMethod &&
+ astutil.EqualSyntax(ast.Unparen(call.Fun).(*ast.SelectorExpr).X, x) &&
+ is[*ast.Ident](call.Args[0]) &&
+ info.Uses[call.Args[0].(*ast.Ident)] == i {
+ // Have: { elem := x.At(i); ... }
+ id := assign.Lhs[0].(*ast.Ident)
+ return id.Name, info.Defs[id].(*types.Var)
+ }
+ }
+ }
+
+ loop := curBody.Parent().Node()
+ return refactor.FreshName(info.Scopes[loop], loop.Pos(), row.elemname), nil
+ }
+
+ // Process each call of x.Len().
+ nextCall:
+ for curLenCall := range index.Calls(lenMethod) {
+ lenSel, ok := ast.Unparen(curLenCall.Node().(*ast.CallExpr).Fun).(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+ // lenSel is "x.Len"
+
+ var (
+ rng analysis.Range // where to report diagnostic
+ curBody inspector.Cursor // loop body
+ indexVar *types.Var // old loop index var
+ elemVar *types.Var // existing "elem := x.At(i)" var, if present
+ elem string // name for new loop var
+ edits []analysis.TextEdit
+ )
+
+ // Analyze enclosing loop.
+ switch ek, _ := curLenCall.ParentEdge(); ek {
+ case edge.BinaryExpr_Y:
+ // pattern 1: for i := 0; i < x.Len(); i++ { ... }
+ var (
+ curCmp = curLenCall.Parent()
+ cmp = curCmp.Node().(*ast.BinaryExpr)
+ )
+ if cmp.Op != token.LSS ||
+ !astutil.IsChildOf(curCmp, edge.ForStmt_Cond) {
+ continue
+ }
+ if id, ok := cmp.X.(*ast.Ident); ok {
+ // Have: for _; i < x.Len(); _ { ... }
+ var (
+ v = info.Uses[id].(*types.Var)
+ curFor = curCmp.Parent()
+ loop = curFor.Node().(*ast.ForStmt)
+ )
+ if v != isIncrementLoop(info, loop) {
+ continue
+ }
+ // Have: for i := 0; i < x.Len(); i++ { ... }.
+ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ rng = analysisinternal.Range(loop.For, loop.Post.End())
+ indexVar = v
+ curBody = curFor.ChildAt(edge.ForStmt_Body, -1)
+ elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
+
+ // for i := 0; i < x.Len(); i++ {
+ // ---- ------- --- -----
+ // for elem := range x.All() {
+ edits = []analysis.TextEdit{
+ {
+ Pos: v.Pos(),
+ End: v.Pos() + token.Pos(len(v.Name())),
+ NewText: []byte(elem),
+ },
+ {
+ Pos: loop.Init.(*ast.AssignStmt).Rhs[0].Pos(),
+ End: cmp.Y.Pos(),
+ NewText: []byte("range "),
+ },
+ {
+ Pos: lenSel.Sel.Pos(),
+ End: lenSel.Sel.End(),
+ NewText: []byte(row.itermethod),
+ },
+ {
+ Pos: curLenCall.Node().End(),
+ End: loop.Post.End(),
+ },
+ }
+ }
+
+ case edge.RangeStmt_X:
+ // pattern 2: for i := range x.Len() { ... }
+ var (
+ curRange = curLenCall.Parent()
+ loop = curRange.Node().(*ast.RangeStmt)
+ )
+ if id, ok := loop.Key.(*ast.Ident); ok &&
+ loop.Value == nil &&
+ loop.Tok == token.DEFINE {
+ // Have: for i := range x.Len() { ... }
+ // ~~~~~~~~~~~~~
+
+ rng = analysisinternal.Range(loop.Range, loop.X.End())
+ indexVar = info.Defs[id].(*types.Var)
+ curBody = curRange.ChildAt(edge.RangeStmt_Body, -1)
+ elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
+
+ // for i := range x.Len() {
+ // ---- ---
+ // for elem := range x.All() {
+ edits = []analysis.TextEdit{
+ {
+ Pos: loop.Key.Pos(),
+ End: loop.Key.End(),
+ NewText: []byte(elem),
+ },
+ {
+ Pos: lenSel.Sel.Pos(),
+ End: lenSel.Sel.End(),
+ NewText: []byte(row.itermethod),
+ },
+ }
+ }
+ }
+
+ if indexVar == nil {
+ continue // no loop of the required form
+ }
+
+ // TODO(adonovan): what about possible
+ // modifications of x within the loop?
+ // Aliasing seems to make a conservative
+ // treatment impossible.
+
+ // Check that all uses of var i within loop body are x.At(i).
+ for curUse := range index.Uses(indexVar) {
+ if !curBody.Contains(curUse) {
+ continue
+ }
+ if ek, argidx := curUse.ParentEdge(); ek != edge.CallExpr_Args || argidx != 0 {
+ continue nextCall // use is not arg of call
+ }
+ curAtCall := curUse.Parent()
+ atCall := curAtCall.Node().(*ast.CallExpr)
+ if typeutil.Callee(info, atCall) != atMethod {
+ continue nextCall // use is not arg of call to T.At
+ }
+ atSel := ast.Unparen(atCall.Fun).(*ast.SelectorExpr)
+
+ // Check receivers of Len, At calls match (syntactically).
+ if !astutil.EqualSyntax(lenSel.X, atSel.X) {
+ continue nextCall
+ }
+
+ // At each point of use, check that
+ // the fresh variable is not shadowed
+ // by an intervening local declaration
+ // (or by the idiomatic elemVar optionally
+ // found by chooseName).
+ if obj := lookup(info, curAtCall, elem); obj != nil && obj != elemVar && obj.Pos() > indexVar.Pos() {
+ // (Ideally, instead of giving up, we would
+ // embellish the name and try again.)
+ continue nextCall
+ }
+
+ // use(x.At(i))
+ // -------
+ // use(elem )
+ edits = append(edits, analysis.TextEdit{
+ Pos: atCall.Pos(),
+ End: atCall.End(),
+ NewText: []byte(elem),
+ })
+ }
+
+ // Check file Go version is new enough for the iterator method.
+ // (In the long run, version filters are not highly selective,
+ // so there's no need to do them first, especially as this check
+ // may be somewhat expensive.)
+ if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok {
+ panic("no version found")
+ } else if file := astutil.EnclosingFile(curLenCall); !fileUses(info, file, v.String()) {
+ continue nextCall
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: rng.Pos(),
+ End: rng.End(),
+ Message: fmt.Sprintf("%s/%s loop can simplified using %s.%s iteration",
+ row.lenmethod, row.atmethod, row.typename, row.itermethod),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf(
+ "Replace %s/%s loop with %s.%s iteration",
+ row.lenmethod, row.atmethod, row.typename, row.itermethod),
+ TextEdits: edits,
+ }},
+ })
+ }
+ }
+ return nil, nil
+}
+
+// -- helpers --
+
+// methodGoVersion reports the version at which the method
+// (pkgpath.recvtype).method appeared in the standard library.
+func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
+ // TODO(adonovan): opt: this might be inefficient for large packages
+ // like go/types. If so, memoize using a map (and kill two birds with
+ // one stone by also memoizing the 'within' check above).
+ for _, sym := range stdlib.PackageSymbols[pkgpath] {
+ if sym.Kind == stdlib.Method {
+ _, recv, name := sym.SplitMethod()
+ if recv == recvtype && name == method {
+ return sym.Version, true
+ }
+ }
+ }
+ return 0, false
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var StringsBuilderAnalyzer = &analysis.Analyzer{
+ Name: "stringsbuilder",
+ Doc: analysisinternal.MustExtractDoc(doc, "stringsbuilder"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: stringsbuilder,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringbuilder",
+}
+
+// stringsbuilder replaces string += string in a loop by strings.Builder.
+func stringsbuilder(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ // Skip the analyzer in packages where its
+ // fixes would create an import cycle.
+ if within(pass, "strings", "runtime") {
+ return nil, nil
+ }
+
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ )
+
+ // Gather all local string variables that appear on the
+ // LHS of some string += string assignment.
+ candidates := make(map[*types.Var]bool)
+ for curAssign := range inspect.Root().Preorder((*ast.AssignStmt)(nil)) {
+ assign := curAssign.Node().(*ast.AssignStmt)
+ if assign.Tok == token.ADD_ASSIGN && is[*ast.Ident](assign.Lhs[0]) {
+ if v, ok := pass.TypesInfo.Uses[assign.Lhs[0].(*ast.Ident)].(*types.Var); ok &&
+ !typesinternal.IsPackageLevel(v) && // TODO(adonovan): in go1.25, use v.Kind() == types.LocalVar &&
+ types.Identical(v.Type(), builtinString.Type()) {
+ candidates[v] = true
+ }
+ }
+ }
+
+ // Now check each candidate variable's decl and uses.
+nextcand:
+ for v := range candidates {
+ var edits []analysis.TextEdit
+
+ // Check declaration of s:
+ //
+ // s := expr
+ // var s [string] [= expr]
+ //
+ // and transform to:
+ //
+ // var s strings.Builder; s.WriteString(expr)
+ //
+ def, ok := index.Def(v)
+ if !ok {
+ continue
+ }
+ ek, _ := def.ParentEdge()
+ if ek == edge.AssignStmt_Lhs &&
+ len(def.Parent().Node().(*ast.AssignStmt).Lhs) == 1 {
+ // Have: s := expr
+ // => var s strings.Builder; s.WriteString(expr)
+
+ assign := def.Parent().Node().(*ast.AssignStmt)
+
+ // Reject "if s := f(); ..." since in that context
+ // we can't replace the assign with two statements.
+ switch def.Parent().Parent().Node().(type) {
+ case *ast.BlockStmt, *ast.CaseClause, *ast.CommClause:
+ // OK: these are the parts of syntax that
+ // allow unrestricted statement lists.
+ default:
+ continue
+ }
+
+ // Add strings import.
+ prefix, importEdits := refactor.AddImport(
+ pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos())
+ edits = append(edits, importEdits...)
+
+ if isEmptyString(pass.TypesInfo, assign.Rhs[0]) {
+ // s := ""
+ // ---------------------
+ // var s strings.Builder
+ edits = append(edits, analysis.TextEdit{
+ Pos: assign.Pos(),
+ End: assign.End(),
+ NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder", v.Name(), prefix),
+ })
+
+ } else {
+ // s := expr
+ // ------------------------------------- -
+ // var s strings.Builder; s.WriteString(expr)
+ edits = append(edits, []analysis.TextEdit{
+ {
+ Pos: assign.Pos(),
+ End: assign.Rhs[0].Pos(),
+ NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder; %[1]s.WriteString(", v.Name(), prefix),
+ },
+ {
+ Pos: assign.End(),
+ End: assign.End(),
+ NewText: []byte(")"),
+ },
+ }...)
+
+ }
+
+ } else if ek == edge.ValueSpec_Names &&
+ len(def.Parent().Node().(*ast.ValueSpec).Names) == 1 {
+ // Have: var s [string] [= expr]
+ // => var s strings.Builder; s.WriteString(expr)
+
+ // Add strings import.
+ prefix, importEdits := refactor.AddImport(
+ pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos())
+ edits = append(edits, importEdits...)
+
+ spec := def.Parent().Node().(*ast.ValueSpec)
+ decl := def.Parent().Parent().Node().(*ast.GenDecl)
+
+ init := spec.Names[0].End() // start of " = expr"
+ if spec.Type != nil {
+ init = spec.Type.End()
+ }
+
+ // var s [string]
+ // ----------------
+ // var s strings.Builder
+ edits = append(edits, analysis.TextEdit{
+ Pos: spec.Names[0].End(),
+ End: init,
+ NewText: fmt.Appendf(nil, " %sBuilder", prefix),
+ })
+
+ if len(spec.Values) > 0 && !isEmptyString(pass.TypesInfo, spec.Values[0]) {
+ // = expr
+ // ---------------- -
+ // ; s.WriteString(expr)
+ edits = append(edits, []analysis.TextEdit{
+ {
+ Pos: init,
+ End: spec.Values[0].Pos(),
+ NewText: fmt.Appendf(nil, "; %s.WriteString(", v.Name()),
+ },
+ {
+ Pos: decl.End(),
+ End: decl.End(),
+ NewText: []byte(")"),
+ },
+ }...)
+ } else {
+ // delete "= expr"
+ edits = append(edits, analysis.TextEdit{
+ Pos: init,
+ End: spec.End(),
+ })
+ }
+
+ } else {
+ continue
+ }
+
+ // Check uses of s.
+ //
+ // - All uses of s except the final one must be of the form
+ //
+ // s += expr
+ //
+ // Each of these will become s.WriteString(expr).
+ // At least one of them must be in an intervening loop
+ // w.r.t. the declaration of s:
+ //
+ // var s string
+ // for ... { s += expr }
+ //
+ // - The final use of s must be as an rvalue (e.g. use(s), not &s).
+ // This will become s.String().
+ //
+ // Perhaps surprisingly, it is fine for there to be an
+ // intervening loop or lambda w.r.t. the declaration of s:
+ //
+ // var s strings.Builder
+ // for range kSmall { s.WriteString(expr) }
+ // for range kLarge { use(s.String()) } // called repeatedly
+ //
+ // Even though that might cause the s.String() operation to be
+ // executed repeatedly, this is not a deoptimization because,
+ // by design, (*strings.Builder).String does not allocate.
+ var (
+ numLoopAssigns int // number of += assignments within a loop
+ loopAssign *ast.AssignStmt // first += assignment within a loop
+ seenRvalueUse bool // => we've seen the sole final use of s as an rvalue
+ )
+ for curUse := range index.Uses(v) {
+ // Strip enclosing parens around Ident.
+ ek, _ := curUse.ParentEdge()
+ for ek == edge.ParenExpr_X {
+ curUse = curUse.Parent()
+ ek, _ = curUse.ParentEdge()
+ }
+
+ // The rvalueUse must be the lexically last use.
+ if seenRvalueUse {
+ continue nextcand
+ }
+
+ // intervening reports whether cur has an ancestor of
+ // one of the given types that is within the scope of v.
+ intervening := func(types ...ast.Node) bool {
+ for cur := range curUse.Enclosing(types...) {
+ if v.Pos() <= cur.Node().Pos() { // in scope of v
+ return true
+ }
+ }
+ return false
+ }
+
+ if ek == edge.AssignStmt_Lhs {
+ assign := curUse.Parent().Node().(*ast.AssignStmt)
+ if assign.Tok != token.ADD_ASSIGN {
+ continue nextcand
+ }
+ // Have: s += expr
+
+ // At least one of the += operations
+ // must appear within a loop.
+ // relative to the declaration of s.
+ if intervening((*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)) {
+ numLoopAssigns++
+ if loopAssign == nil {
+ loopAssign = assign
+ }
+ }
+
+ // s += expr
+ // ------------- -
+ // s.WriteString(expr)
+ edits = append(edits, []analysis.TextEdit{
+ // replace += with .WriteString()
+ {
+ Pos: assign.TokPos,
+ End: assign.Rhs[0].Pos(),
+ NewText: []byte(".WriteString("),
+ },
+ // insert ")"
+ {
+ Pos: assign.End(),
+ End: assign.End(),
+ NewText: []byte(")"),
+ },
+ }...)
+
+ } else if ek == edge.UnaryExpr_X &&
+ curUse.Parent().Node().(*ast.UnaryExpr).Op == token.AND {
+ // Have: use(&s)
+ continue nextcand // s is used as an lvalue; reject
+
+ } else {
+ // The only possible l-value uses of a string variable
+ // are assignments (s=expr, s+=expr, etc) and &s.
+ // (For strings, we can ignore method calls s.m().)
+ // All other uses are r-values.
+ seenRvalueUse = true
+
+ edits = append(edits, analysis.TextEdit{
+ // insert ".String()"
+ Pos: curUse.Node().End(),
+ End: curUse.Node().End(),
+ NewText: []byte(".String()"),
+ })
+ }
+ }
+ if !seenRvalueUse {
+ continue nextcand // no rvalue use; reject
+ }
+ if numLoopAssigns == 0 {
+ continue nextcand // no += in a loop; reject
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: loopAssign.Pos(),
+ End: loopAssign.End(),
+ Message: "using string += string in a loop is inefficient",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Replace string += string with strings.Builder",
+ TextEdits: edits,
+ }},
+ })
+ }
+
+ return nil, nil
+}
+
+// isEmptyString reports whether e (a string-typed expression) has constant value "".
+func isEmptyString(info *types.Info, e ast.Expr) bool {
+ tv, ok := info.Types[e]
+ return ok && tv.Value != nil && constant.StringVal(tv.Value) == ""
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var StringsCutPrefixAnalyzer = &analysis.Analyzer{
+ Name: "stringscutprefix",
+ Doc: analysisinternal.MustExtractDoc(doc, "stringscutprefix"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: stringscutprefix,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringscutprefix",
+}
+
+// stringscutprefix offers a fix to replace an if statement which
+// calls to the 2 patterns below with strings.CutPrefix or strings.CutSuffix.
+//
+// Patterns:
+//
+// 1. if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre) }
+// =>
+// if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
+//
+// 2. if after := strings.TrimPrefix(s, pre); after != s { use(after) }
+// =>
+// if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
+//
+// Similar patterns apply for CutSuffix.
+//
+// The use must occur within the first statement of the block, and the offered fix
+// only replaces the first occurrence of strings.TrimPrefix/TrimSuffix.
+//
+// Variants:
+// - bytes.HasPrefix/HasSuffix usage as pattern 1.
+func stringscutprefix(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+
+ stringsTrimPrefix = index.Object("strings", "TrimPrefix")
+ bytesTrimPrefix = index.Object("bytes", "TrimPrefix")
+ stringsTrimSuffix = index.Object("strings", "TrimSuffix")
+ bytesTrimSuffix = index.Object("bytes", "TrimSuffix")
+ )
+ if !index.Used(stringsTrimPrefix, bytesTrimPrefix, stringsTrimSuffix, bytesTrimSuffix) {
+ return nil, nil
+ }
+
+ for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") {
+ for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
+ ifStmt := curIfStmt.Node().(*ast.IfStmt)
+
+ // pattern1
+ if call, ok := ifStmt.Cond.(*ast.CallExpr); ok && ifStmt.Init == nil && len(ifStmt.Body.List) > 0 {
+
+ obj := typeutil.Callee(info, call)
+ if !typesinternal.IsFunctionNamed(obj, "strings", "HasPrefix", "HasSuffix") &&
+ !typesinternal.IsFunctionNamed(obj, "bytes", "HasPrefix", "HasSuffix") {
+ continue
+ }
+ isPrefix := strings.HasSuffix(obj.Name(), "Prefix")
+
+ // Replace the first occurrence of strings.TrimPrefix(s, pre) in the first statement only,
+ // but not later statements in case s or pre are modified by intervening logic (ditto Suffix).
+ firstStmt := curIfStmt.Child(ifStmt.Body).Child(ifStmt.Body.List[0])
+ for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) {
+ call1 := curCall.Node().(*ast.CallExpr)
+ obj1 := typeutil.Callee(info, call1)
+ // bytesTrimPrefix or stringsTrimPrefix might be nil if the file doesn't import it,
+ // so we need to ensure the obj1 is not nil otherwise the call1 is not TrimPrefix and cause a panic (ditto Suffix).
+ if obj1 == nil ||
+ obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix &&
+ obj1 != stringsTrimSuffix && obj1 != bytesTrimSuffix {
+ continue
+ }
+
+ isPrefix1 := strings.HasSuffix(obj1.Name(), "Prefix")
+ var cutFuncName, varName, message, fixMessage string
+ if isPrefix && isPrefix1 {
+ cutFuncName = "CutPrefix"
+ varName = "after"
+ message = "HasPrefix + TrimPrefix can be simplified to CutPrefix"
+ fixMessage = "Replace HasPrefix/TrimPrefix with CutPrefix"
+ } else if !isPrefix && !isPrefix1 {
+ cutFuncName = "CutSuffix"
+ varName = "before"
+ message = "HasSuffix + TrimSuffix can be simplified to CutSuffix"
+ fixMessage = "Replace HasSuffix/TrimSuffix with CutSuffix"
+ } else {
+ continue
+ }
+
+ // Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } (ditto Suffix)
+ var (
+ s0 = call.Args[0]
+ pre0 = call.Args[1]
+ s = call1.Args[0]
+ pre = call1.Args[1]
+ )
+
+ // check whether the obj1 uses the exact the same argument with strings.HasPrefix
+ // shadow variables won't be valid because we only access the first statement (ditto Suffix).
+ if astutil.EqualSyntax(s0, s) && astutil.EqualSyntax(pre0, pre) {
+ after := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), varName)
+ prefix, importEdits := refactor.AddImport(
+ info,
+ curFile.Node().(*ast.File),
+ obj1.Pkg().Name(),
+ obj1.Pkg().Path(),
+ cutFuncName,
+ call.Pos(),
+ )
+ okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok")
+ pass.Report(analysis.Diagnostic{
+ // highlight at HasPrefix call (ditto Suffix).
+ Pos: call.Pos(),
+ End: call.End(),
+ Message: message,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fixMessage,
+ // if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre)) }
+ // ------------ ----------------- ----- --------------------------
+ // if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
+ // (ditto Suffix)
+ TextEdits: append(importEdits, []analysis.TextEdit{
+ {
+ Pos: call.Fun.Pos(),
+ End: call.Fun.Pos(),
+ NewText: fmt.Appendf(nil, "%s, %s :=", after, okVarName),
+ },
+ {
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName),
+ },
+ {
+ Pos: call.End(),
+ End: call.End(),
+ NewText: fmt.Appendf(nil, "; %s ", okVarName),
+ },
+ {
+ Pos: call1.Pos(),
+ End: call1.End(),
+ NewText: []byte(after),
+ },
+ }...),
+ }}},
+ )
+ break
+ }
+ }
+ }
+
+ // pattern2
+ if bin, ok := ifStmt.Cond.(*ast.BinaryExpr); ok &&
+ bin.Op == token.NEQ &&
+ ifStmt.Init != nil &&
+ isSimpleAssign(ifStmt.Init) {
+ assign := ifStmt.Init.(*ast.AssignStmt)
+ if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE {
+ lhs := assign.Lhs[0]
+ obj := typeutil.Callee(info, call)
+
+ if obj == nil ||
+ obj != stringsTrimPrefix && obj != bytesTrimPrefix && obj != stringsTrimSuffix && obj != bytesTrimSuffix {
+ continue
+ }
+
+ isPrefix1 := strings.HasSuffix(obj.Name(), "Prefix")
+ var cutFuncName, message, fixMessage string
+ if isPrefix1 {
+ cutFuncName = "CutPrefix"
+ message = "TrimPrefix can be simplified to CutPrefix"
+ fixMessage = "Replace TrimPrefix with CutPrefix"
+ } else {
+ cutFuncName = "CutSuffix"
+ message = "TrimSuffix can be simplified to CutSuffix"
+ fixMessage = "Replace TrimSuffix with CutSuffix"
+ }
+
+ if astutil.EqualSyntax(lhs, bin.X) && astutil.EqualSyntax(call.Args[0], bin.Y) ||
+ (astutil.EqualSyntax(lhs, bin.Y) && astutil.EqualSyntax(call.Args[0], bin.X)) {
+ okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok")
+ // Have one of:
+ // if rest := TrimPrefix(s, prefix); rest != s { (ditto Suffix)
+ // if rest := TrimPrefix(s, prefix); s != rest { (ditto Suffix)
+
+ // We use AddImport not to add an import (since it exists already)
+ // but to compute the correct prefix in the dot-import case.
+ prefix, importEdits := refactor.AddImport(
+ info,
+ curFile.Node().(*ast.File),
+ obj.Pkg().Name(),
+ obj.Pkg().Path(),
+ cutFuncName,
+ call.Pos(),
+ )
+
+ pass.Report(analysis.Diagnostic{
+ // highlight from the init and the condition end.
+ Pos: ifStmt.Init.Pos(),
+ End: ifStmt.Cond.End(),
+ Message: message,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fixMessage,
+ // if x := strings.TrimPrefix(s, pre); x != s ...
+ // ---- ---------- ------
+ // if x, ok := strings.CutPrefix (s, pre); ok ...
+ // (ditto Suffix)
+ TextEdits: append(importEdits, []analysis.TextEdit{
+ {
+ Pos: assign.Lhs[0].End(),
+ End: assign.Lhs[0].End(),
+ NewText: fmt.Appendf(nil, ", %s", okVarName),
+ },
+ {
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName),
+ },
+ {
+ Pos: ifStmt.Cond.Pos(),
+ End: ifStmt.Cond.End(),
+ NewText: []byte(okVarName),
+ },
+ }...),
+ }},
+ })
+ }
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var StringsSeqAnalyzer = &analysis.Analyzer{
+ Name: "stringsseq",
+ Doc: analysisinternal.MustExtractDoc(doc, "stringsseq"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: stringsseq,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringsseq",
+}
+
+// stringsseq offers a fix to replace a call to strings.Split with
+// SplitSeq or strings.Fields with FieldsSeq
+// when it is the operand of a range loop, either directly:
+//
+// for _, line := range strings.Split() {...}
+//
+// or indirectly, if the variable's sole use is the range statement:
+//
+// lines := strings.Split()
+// for _, line := range lines {...}
+//
+// Variants:
+// - bytes.SplitSeq
+// - bytes.FieldsSeq
+func stringsseq(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+
+ stringsSplit = index.Object("strings", "Split")
+ stringsFields = index.Object("strings", "Fields")
+ bytesSplit = index.Object("bytes", "Split")
+ bytesFields = index.Object("bytes", "Fields")
+ )
+ if !index.Used(stringsSplit, stringsFields, bytesSplit, bytesFields) {
+ return nil, nil
+ }
+
+ for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
+ rng := curRange.Node().(*ast.RangeStmt)
+
+ // Reject "for i, line := ..." since SplitSeq is not an iter.Seq2.
+ // (We require that i is blank.)
+ if id, ok := rng.Key.(*ast.Ident); ok && id.Name != "_" {
+ continue
+ }
+
+ // Find the call operand of the range statement,
+ // whether direct or indirect.
+ call, ok := rng.X.(*ast.CallExpr)
+ if !ok {
+ if id, ok := rng.X.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.Var); ok {
+ if ek, idx := curRange.ParentEdge(); ek == edge.BlockStmt_List && idx > 0 {
+ curPrev, _ := curRange.PrevSibling()
+ if assign, ok := curPrev.Node().(*ast.AssignStmt); ok &&
+ assign.Tok == token.DEFINE &&
+ len(assign.Lhs) == 1 &&
+ len(assign.Rhs) == 1 &&
+ info.Defs[assign.Lhs[0].(*ast.Ident)] == v &&
+ soleUseIs(index, v, id) {
+ // Have:
+ // lines := ...
+ // for _, line := range lines {...}
+ // and no other uses of lines.
+ call, _ = assign.Rhs[0].(*ast.CallExpr)
+ }
+ }
+ }
+ }
+ }
+
+ if call != nil {
+ var edits []analysis.TextEdit
+ if rng.Key != nil {
+ // Delete (blank) RangeStmt.Key:
+ // for _, line := -> for line :=
+ // for _, _ := -> for
+ // for _ := -> for
+ end := rng.Range
+ if rng.Value != nil {
+ end = rng.Value.Pos()
+ }
+ edits = append(edits, analysis.TextEdit{
+ Pos: rng.Key.Pos(),
+ End: end,
+ })
+ }
+
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+
+ switch obj := typeutil.Callee(info, call); obj {
+ case stringsSplit, stringsFields, bytesSplit, bytesFields:
+ oldFnName := obj.Name()
+ seqFnName := fmt.Sprintf("%sSeq", oldFnName)
+ pass.Report(analysis.Diagnostic{
+ Pos: sel.Pos(),
+ End: sel.End(),
+ Message: fmt.Sprintf("Ranging over %s is more efficient", seqFnName),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace %s with %s", oldFnName, seqFnName),
+ TextEdits: append(edits, analysis.TextEdit{
+ Pos: sel.Sel.Pos(),
+ End: sel.Sel.End(),
+ NewText: []byte(seqFnName)}),
+ }},
+ })
+ }
+ }
+ }
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var TestingContextAnalyzer = &analysis.Analyzer{
+ Name: "testingcontext",
+ Doc: analysisinternal.MustExtractDoc(doc, "testingcontext"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: testingContext,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#testingcontext",
+}
+
+// The testingContext pass replaces calls to context.WithCancel from within
+// tests to a use of testing.{T,B,F}.Context(), added in Go 1.24.
+//
+// Specifically, the testingContext pass suggests to replace:
+//
+// ctx, cancel := context.WithCancel(context.Background()) // or context.TODO
+// defer cancel()
+//
+// with:
+//
+// ctx := t.Context()
+//
+// provided:
+//
+// - ctx and cancel are declared by the assignment
+// - the deferred call is the only use of cancel
+// - the call is within a test or subtest function
+// - the relevant testing.{T,B,F} is named and not shadowed at the call
+func testingContext(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+
+ contextWithCancel = index.Object("context", "WithCancel")
+ )
+
+calls:
+ for cur := range index.Calls(contextWithCancel) {
+ call := cur.Node().(*ast.CallExpr)
+ // Have: context.WithCancel(...)
+
+ arg, ok := call.Args[0].(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ if !typesinternal.IsFunctionNamed(typeutil.Callee(info, arg), "context", "Background", "TODO") {
+ continue
+ }
+ // Have: context.WithCancel(context.{Background,TODO}())
+
+ parent := cur.Parent()
+ assign, ok := parent.Node().(*ast.AssignStmt)
+ if !ok || assign.Tok != token.DEFINE {
+ continue
+ }
+ // Have: a, b := context.WithCancel(context.{Background,TODO}())
+
+ // Check that both a and b are declared, not redeclarations.
+ var lhs []types.Object
+ for _, expr := range assign.Lhs {
+ id, ok := expr.(*ast.Ident)
+ if !ok {
+ continue calls
+ }
+ obj, ok := info.Defs[id]
+ if !ok {
+ continue calls
+ }
+ lhs = append(lhs, obj)
+ }
+
+ next, ok := parent.NextSibling()
+ if !ok {
+ continue
+ }
+ defr, ok := next.Node().(*ast.DeferStmt)
+ if !ok {
+ continue
+ }
+ deferId, ok := defr.Call.Fun.(*ast.Ident)
+ if !ok || !soleUseIs(index, lhs[1], deferId) {
+ continue // b is used elsewhere
+ }
+ // Have:
+ // a, b := context.WithCancel(context.{Background,TODO}())
+ // defer b()
+
+ // Check that we are in a test func.
+ var testObj types.Object // relevant testing.{T,B,F}, or nil
+ if curFunc, ok := enclosingFunc(cur); ok {
+ switch n := curFunc.Node().(type) {
+ case *ast.FuncLit:
+ if ek, idx := curFunc.ParentEdge(); ek == edge.CallExpr_Args && idx == 1 {
+ // Have: call(..., func(...) { ...context.WithCancel(...)... })
+ obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr))
+ if (typesinternal.IsMethodNamed(obj, "testing", "T", "Run") ||
+ typesinternal.IsMethodNamed(obj, "testing", "B", "Run")) &&
+ len(n.Type.Params.List[0].Names) == 1 {
+
+ // Have tb.Run(..., func(..., tb *testing.[TB]) { ...context.WithCancel(...)... }
+ testObj = info.Defs[n.Type.Params.List[0].Names[0]]
+ }
+ }
+
+ case *ast.FuncDecl:
+ testObj = isTestFn(info, n)
+ }
+ }
+ if testObj != nil && fileUses(info, astutil.EnclosingFile(cur), "go1.24") {
+ // Have a test function. Check that we can resolve the relevant
+ // testing.{T,B,F} at the current position.
+ if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj {
+ pass.Report(analysis.Diagnostic{
+ Pos: call.Fun.Pos(),
+ End: call.Fun.End(),
+ Message: fmt.Sprintf("context.WithCancel can be modernized using %s.Context", testObj.Name()),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Replace context.WithCancel with %s.Context", testObj.Name()),
+ TextEdits: []analysis.TextEdit{{
+ Pos: assign.Pos(),
+ End: defr.End(),
+ NewText: fmt.Appendf(nil, "%s := %s.Context()", lhs[0].Name(), testObj.Name()),
+ }},
+ }},
+ })
+ }
+ }
+ }
+ return nil, nil
+}
+
+// soleUseIs reports whether id is the sole Ident that uses obj.
+// (It returns false if there were no uses of obj.)
+func soleUseIs(index *typeindex.Index, obj types.Object, id *ast.Ident) bool {
+ empty := true
+ for use := range index.Uses(obj) {
+ empty = false
+ if use.Node() != id {
+ return false
+ }
+ }
+ return !empty
+}
+
+// isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX),
+// returning the corresponding types.Object of the *testing.{T,B,F} argument.
+// Returns nil if fn is a test function, but the testing.{T,B,F} argument is
+// unnamed (or _).
+//
+// TODO(rfindley): consider handling the case of an unnamed argument, by adding
+// an edit to give the argument a name.
+//
+// Adapted from go/analysis/passes/tests.
+// TODO(rfindley): consider refactoring to share logic.
+func isTestFn(info *types.Info, fn *ast.FuncDecl) types.Object {
+ // Want functions with 0 results and 1 parameter.
+ if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
+ fn.Type.Params == nil ||
+ len(fn.Type.Params.List) != 1 ||
+ len(fn.Type.Params.List[0].Names) != 1 {
+
+ return nil
+ }
+
+ prefix := testKind(fn.Name.Name)
+ if prefix == "" {
+ return nil
+ }
+
+ if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 {
+ return nil // test functions must not be generic
+ }
+
+ obj := info.Defs[fn.Type.Params.List[0].Names[0]]
+ if obj == nil {
+ return nil // e.g. _ *testing.T
+ }
+
+ var name string
+ switch prefix {
+ case "Test":
+ name = "T"
+ case "Benchmark":
+ name = "B"
+ case "Fuzz":
+ name = "F"
+ }
+
+ if !typesinternal.IsPointerToNamed(obj.Type(), "testing", name) {
+ return nil
+ }
+ return obj
+}
+
+// testKind returns "Test", "Benchmark", or "Fuzz" if name is a valid resp.
+// test, benchmark, or fuzz function name. Otherwise, isTestName returns "".
+//
+// Adapted from go/analysis/passes/tests.isTestName.
+func testKind(name string) string {
+ var prefix string
+ switch {
+ case strings.HasPrefix(name, "Test"):
+ prefix = "Test"
+ case strings.HasPrefix(name, "Benchmark"):
+ prefix = "Benchmark"
+ case strings.HasPrefix(name, "Fuzz"):
+ prefix = "Fuzz"
+ }
+ if prefix == "" {
+ return ""
+ }
+ suffix := name[len(prefix):]
+ if len(suffix) == 0 {
+ // "Test" is ok.
+ return prefix
+ }
+ r, _ := utf8.DecodeRuneInString(suffix)
+ if unicode.IsLower(r) {
+ return ""
+ }
+ return prefix
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/printer"
+ "slices"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysisinternal/generated"
+ typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+)
+
+var WaitGroupAnalyzer = &analysis.Analyzer{
+ Name: "waitgroup",
+ Doc: analysisinternal.MustExtractDoc(doc, "waitgroup"),
+ Requires: []*analysis.Analyzer{
+ generated.Analyzer,
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: waitgroup,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#waitgroup",
+}
+
+// The waitgroup pass replaces old more complex code with
+// go1.25 added API WaitGroup.Go.
+//
+// Patterns:
+//
+// 1. wg.Add(1); go func() { defer wg.Done(); ... }()
+// =>
+// wg.Go(go func() { ... })
+//
+// 2. wg.Add(1); go func() { ...; wg.Done() }()
+// =>
+// wg.Go(go func() { ... })
+//
+// The wg.Done must occur within the first statement of the block in a
+// defer format or last statement of the block, and the offered fix
+// only removes the first/last wg.Done call. It doesn't fix existing
+// wrong usage of sync.WaitGroup.
+//
+// The use of WaitGroup.Go in pattern 1 implicitly introduces a
+// 'defer', which may change the behavior in the case of panic from
+// the "..." logic. In this instance, the change is safe: before and
+// after the transformation, an unhandled panic inevitably results in
+// a fatal crash. The fact that the transformed code calls wg.Done()
+// before the crash doesn't materially change anything. (If Done had
+// other effects, or blocked, or if WaitGroup.Go propagated panics
+// from child to parent goroutine, the argument would be different.)
+func waitgroup(pass *analysis.Pass) (any, error) {
+ skipGenerated(pass)
+
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+ syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add")
+ syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done")
+ )
+ if !index.Used(syncWaitGroupDone) {
+ return nil, nil
+ }
+
+ for curAddCall := range index.Calls(syncWaitGroupAdd) {
+ // Extract receiver from wg.Add call.
+ addCall := curAddCall.Node().(*ast.CallExpr)
+ if !isIntLiteral(info, addCall.Args[0], 1) {
+ continue // not a call to wg.Add(1)
+ }
+ // Inv: the Args[0] check ensures addCall is not of
+ // the form sync.WaitGroup.Add(&wg, 1).
+ addCallRecv := ast.Unparen(addCall.Fun).(*ast.SelectorExpr).X
+
+ // Following statement must be go func() { ... } ().
+ curAddStmt := curAddCall.Parent()
+ if !is[*ast.ExprStmt](curAddStmt.Node()) {
+ continue // unnecessary parens?
+ }
+ curNext, ok := curAddCall.Parent().NextSibling()
+ if !ok {
+ continue // no successor
+ }
+ goStmt, ok := curNext.Node().(*ast.GoStmt)
+ if !ok {
+ continue // not a go stmt
+ }
+ lit, ok := goStmt.Call.Fun.(*ast.FuncLit)
+ if !ok || len(goStmt.Call.Args) != 0 {
+ continue // go argument is not func(){...}()
+ }
+ list := lit.Body.List
+ if len(list) == 0 {
+ continue
+ }
+
+ // Body must start with "defer wg.Done()" or end with "wg.Done()".
+ var doneStmt ast.Stmt
+ if deferStmt, ok := list[0].(*ast.DeferStmt); ok &&
+ typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone &&
+ astutil.EqualSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) {
+ doneStmt = deferStmt // "defer wg.Done()"
+
+ } else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok {
+ if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok &&
+ typeutil.Callee(info, doneCall) == syncWaitGroupDone &&
+ astutil.EqualSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) {
+ doneStmt = lastStmt // "wg.Done()"
+ }
+ }
+ if doneStmt == nil {
+ continue
+ }
+ curDoneStmt, ok := curNext.FindNode(doneStmt)
+ if !ok {
+ panic("can't find Cursor for 'done' statement")
+ }
+
+ file := astutil.EnclosingFile(curAddCall)
+ if !fileUses(info, file, "go1.25") {
+ continue
+ }
+ tokFile := pass.Fset.File(file.Pos())
+
+ var addCallRecvText bytes.Buffer
+ err := printer.Fprint(&addCallRecvText, pass.Fset, addCallRecv)
+ if err != nil {
+ continue // error getting text for the edit
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: addCall.Pos(),
+ End: goStmt.End(),
+ Message: "Goroutine creation can be simplified using WaitGroup.Go",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Simplify by using WaitGroup.Go",
+ TextEdits: slices.Concat(
+ // delete "wg.Add(1)"
+ refactor.DeleteStmt(tokFile, curAddStmt),
+ // delete "wg.Done()" or "defer wg.Done()"
+ refactor.DeleteStmt(tokFile, curDoneStmt),
+ []analysis.TextEdit{
+ // go func()
+ // ------
+ // wg.Go(func()
+ {
+ Pos: goStmt.Pos(),
+ End: goStmt.Call.Pos(),
+ NewText: fmt.Appendf(nil, "%s.Go(", addCallRecvText.String()),
+ },
+ // ... }()
+ // -
+ // ... } )
+ {
+ Pos: goStmt.Call.Lparen,
+ End: goStmt.Call.Rparen,
+ },
+ },
+ ),
+ }},
+ })
+ }
+ return nil, nil
+}
--- /dev/null
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// The resulting path is never empty; it always contains at least the
+// 'root' *ast.File. Ideally PathEnclosingInterval would reject
+// intervals that lie wholly or partially outside the range of the
+// file, but unfortunately ast.File records only the token.Pos of
+// the 'package' keyword, but not of the start of the file itself.
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ if is[tokenNode](child) {
+ return true
+ }
+
+ // childrenOf elides the FuncType node beneath FuncDecl.
+ // Add it back here for TypeParams, Params, Results,
+ // all FieldLists). But we don't add it back for the "func" token
+ // even though it is the tree at FuncDecl.Type.Func.
+ if decl, ok := node.(*ast.FuncDecl); ok {
+ if fields, ok := child.(*ast.FieldList); ok && fields != decl.Recv {
+ path = append(path, decl.Type)
+ }
+ }
+
+ return visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ // Ensure [start,end) is nondecreasing.
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // TODO(adonovan): be more careful about missing (!Pos.Valid)
+ // tokens in trees produced from invalid input.
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ if n.Lbrace.IsValid() {
+ children = append(children, tok(n.Lbrace, len("{")))
+ }
+ if n.Rbrace.IsValid() {
+ children = append(children, tok(n.Rbrace, len("}")))
+ }
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ if n.Opening.IsValid() {
+ children = append(children, tok(n.Opening, len("(")))
+ }
+ if n.Closing.IsValid() {
+ children = append(children, tok(n.Closing, len(")")))
+ }
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ // We also need to insert the elided FuncType just
+ // before the 'visit' recursion.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if tparams := n.Type.TypeParams; tparams != nil {
+ children = append(children, tparams)
+ }
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.IndexListExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.IndexListExpr:
+ return "index list expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
+
+func is[T any](x any) bool {
+ _, ok := x.(T)
+ return ok
+}
--- /dev/null
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "slices"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+//
+// AddNamedImport(fset, f, "pathpkg", "path")
+//
+// adds
+//
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = slices.Delete(f.Decls, i, i+1)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var (
+ delspecs = make(map[*ast.ImportSpec]bool)
+ delcomments = make(map[*ast.CommentGroup]bool)
+ )
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ gen, ok := f.Decls[i].(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ impspec := gen.Specs[j].(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs[impspec] = true
+ deleted = true
+ gen.Specs = slices.Delete(gen.Specs, j, j+1)
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ f.Decls = slices.Delete(f.Decls, i, i+1)
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments[impspec.Doc] = true
+ }
+ if impspec.Comment != nil {
+ delcomments[impspec.Comment] = true
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments[cg] = true
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
+ line := fset.PositionFor(impspec.Path.ValuePos, false).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ before := len(f.Imports)
+ f.Imports = slices.DeleteFunc(f.Imports, func(imp *ast.ImportSpec) bool {
+ _, ok := delspecs[imp]
+ return ok
+ })
+ if len(f.Imports)+len(delspecs) != before {
+ // This can happen when the AST is invalid (i.e. imports differ between f.Decls and f.Imports).
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ // Delete comments from f.Comments.
+ f.Comments = slices.DeleteFunc(f.Comments, func(cg *ast.CommentGroup) bool {
+ _, ok := delcomments[cg]
+ return ok
+ })
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+// The provided File must have been parsed with syntactic object resolution
+// (not using go/parser.SkipObjectResolution).
+func UsesImport(f *ast.File, path string) (used bool) {
+ if f.Scope == nil {
+ panic("file f was not parsed with syntactic object resolution")
+ }
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "<nil>":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
--- /dev/null
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+//
+// This type is not to be confused with [inspector.Cursor] from
+// package [golang.org/x/tools/go/ast/inspector], which provides
+// stateless navigation of immutable syntax trees.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Pointer && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.IndexListExpr:
+ a.apply(n, "X", nil, n.X)
+ a.applyList(n, "Indices")
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
--- /dev/null
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+// Deprecated: use [ast.Unparen].
+//
+//go:fix inline
+func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) }
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package generated defines an analyzer whose result makes it
+// convenient to skip diagnostics within generated files.
+package generated
+
+import (
+ "go/ast"
+ "go/token"
+ "reflect"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+var Analyzer = &analysis.Analyzer{
+ Name: "generated",
+ Doc: "detect which Go files are generated",
+ URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/generated",
+ ResultType: reflect.TypeFor[*Result](),
+ Run: func(pass *analysis.Pass) (any, error) {
+ set := make(map[*token.File]bool)
+ for _, file := range pass.Files {
+ if ast.IsGenerated(file) {
+ set[pass.Fset.File(file.FileStart)] = true
+ }
+ }
+ return &Result{fset: pass.Fset, generatedFiles: set}, nil
+ },
+}
+
+type Result struct {
+ fset *token.FileSet
+ generatedFiles map[*token.File]bool
+}
+
+// IsGenerated reports whether the position is within a generated file.
+func (r *Result) IsGenerated(pos token.Pos) bool {
+ return r.generatedFiles[r.fset.File(pos)]
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package goplsexport provides various backdoors to not-yet-published
+// parts of x/tools that are needed by gopls.
+package goplsexport
+
+import "golang.org/x/tools/go/analysis"
+
+var (
+ ErrorsAsTypeModernizer *analysis.Analyzer // = modernize.errorsastypeAnalyzer
+ StdIteratorsModernizer *analysis.Analyzer // = modernize.stditeratorsAnalyzer
+ PlusBuildModernizer *analysis.Analyzer // = modernize.plusbuildAnalyzer
+)
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines the analysis of the callee function.
+
+import (
+ "bytes"
+ "encoding/gob"
+ "fmt"
+ "go/ast"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "slices"
+ "strings"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// A Callee holds information about an inlinable function. Gob-serializable.
+type Callee struct {
+ impl gobCallee
+}
+
+func (callee *Callee) String() string { return callee.impl.Name }
+
+type gobCallee struct {
+ Content []byte // file content, compacted to a single func decl
+
+ // results of type analysis (does not reach go/types data structures)
+ PkgPath string // package path of declaring package
+ Name string // user-friendly name for error messages
+ Unexported []string // names of free objects that are unexported
+ FreeRefs []freeRef // locations of references to free objects
+ FreeObjs []object // descriptions of free objects
+ ValidForCallStmt bool // function body is "return expr" where expr is f() or <-ch
+ NumResults int // number of results (according to type, not ast.FieldList)
+ Params []*paramInfo // information about parameters (incl. receiver)
+ TypeParams []*paramInfo // information about type parameters
+ Results []*paramInfo // information about result variables
+ Effects []int // order in which parameters are evaluated (see calleefx)
+ HasDefer bool // uses defer
+ HasBareReturn bool // uses bare return in non-void function
+ Returns [][]returnOperandFlags // metadata about result expressions for each return
+ Labels []string // names of all control labels
+ Falcon falconResult // falcon constraint system
+}
+
+// returnOperandFlags records metadata about a single result expression in a return
+// statement.
+type returnOperandFlags int
+
+const (
+ nonTrivialResult returnOperandFlags = 1 << iota // return operand has non-trivial conversion to result type
+ untypedNilResult // return operand is nil literal
+)
+
+// A freeRef records a reference to a free object. Gob-serializable.
+// (This means free relative to the FuncDecl as a whole, i.e. excluding parameters.)
+type freeRef struct {
+ Offset int // byte offset of the reference relative to the FuncDecl
+ Object int // index into Callee.freeObjs
+}
+
+// An object abstracts a free types.Object referenced by the callee. Gob-serializable.
+type object struct {
+ Name string // Object.Name()
+ Kind string // one of {var,func,const,type,pkgname,nil,builtin}
+ PkgPath string // path of object's package (or imported package if kind="pkgname")
+ PkgName string // name of object's package (or imported package if kind="pkgname")
+ // TODO(rfindley): should we also track LocalPkgName here? Do we want to
+ // preserve the local package name?
+ ValidPos bool // Object.Pos().IsValid()
+ Shadow shadowMap // shadowing info for the object's refs
+}
+
+// AnalyzeCallee analyzes a function that is a candidate for inlining
+// and returns a Callee that describes it. The Callee object, which is
+// serializable, can be passed to one or more subsequent calls to
+// Inline, each with a different Caller.
+//
+// This design allows separate analysis of callers and callees in the
+// golang.org/x/tools/go/analysis framework: the inlining information
+// about a callee can be recorded as a "fact".
+//
+// The content should be the actual input to the compiler, not the
+// apparent source file according to any //line directives that
+// may be present within it.
+func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Package, info *types.Info, decl *ast.FuncDecl, content []byte) (*Callee, error) {
+ checkInfoFields(info)
+
+ // The client is expected to have determined that the callee
+ // is a function with a declaration (not a built-in or var).
+ fn := info.Defs[decl.Name].(*types.Func)
+ sig := fn.Type().(*types.Signature)
+
+ logf("analyzeCallee %v @ %v", fn, fset.PositionFor(decl.Pos(), false))
+
+ // Create user-friendly name ("pkg.Func" or "(pkg.T).Method")
+ var name string
+ if sig.Recv() == nil {
+ name = fmt.Sprintf("%s.%s", fn.Pkg().Name(), fn.Name())
+ } else {
+ name = fmt.Sprintf("(%s).%s", types.TypeString(sig.Recv().Type(), (*types.Package).Name), fn.Name())
+ }
+
+ if decl.Body == nil {
+ return nil, fmt.Errorf("cannot inline function %s as it has no body", name)
+ }
+
+ // Record the location of all free references in the FuncDecl.
+ // (Parameters are not free by this definition.)
+ var (
+ fieldObjs = fieldObjs(sig)
+ freeObjIndex = make(map[types.Object]int)
+ freeObjs []object
+ freeRefs []freeRef // free refs that may need renaming
+ unexported []string // free refs to unexported objects, for later error checks
+ )
+ var f func(n ast.Node, stack []ast.Node) bool
+ var stack []ast.Node
+ stack = append(stack, decl.Type) // for scope of function itself
+ visit := func(n ast.Node, stack []ast.Node) { astutil.PreorderStack(n, stack, f) }
+ f = func(n ast.Node, stack []ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.SelectorExpr:
+ // Check selections of free fields/methods.
+ if sel, ok := info.Selections[n]; ok &&
+ !within(sel.Obj().Pos(), decl) &&
+ !n.Sel.IsExported() {
+ sym := fmt.Sprintf("(%s).%s", info.TypeOf(n.X), n.Sel.Name)
+ unexported = append(unexported, sym)
+ }
+
+ // Don't recur into SelectorExpr.Sel.
+ visit(n.X, stack)
+ return false
+
+ case *ast.CompositeLit:
+ // Check for struct literals that refer to unexported fields,
+ // whether keyed or unkeyed. (Logic assumes well-typedness.)
+ litType := typeparams.Deref(info.TypeOf(n))
+ if s, ok := typeparams.CoreType(litType).(*types.Struct); ok {
+ if n.Type != nil {
+ visit(n.Type, stack)
+ }
+ for i, elt := range n.Elts {
+ var field *types.Var
+ var value ast.Expr
+ if kv, ok := elt.(*ast.KeyValueExpr); ok {
+ field = info.Uses[kv.Key.(*ast.Ident)].(*types.Var)
+ value = kv.Value
+ } else {
+ field = s.Field(i)
+ value = elt
+ }
+ if !within(field.Pos(), decl) && !field.Exported() {
+ sym := fmt.Sprintf("(%s).%s", litType, field.Name())
+ unexported = append(unexported, sym)
+ }
+
+ // Don't recur into KeyValueExpr.Key.
+ visit(value, stack)
+ }
+ return false
+ }
+
+ case *ast.Ident:
+ if obj, ok := info.Uses[n]; ok {
+ // Methods and fields are handled by SelectorExpr and CompositeLit.
+ if isField(obj) || isMethod(obj) {
+ panic(obj)
+ }
+ // Inv: id is a lexical reference.
+
+ // A reference to an unexported package-level declaration
+ // cannot be inlined into another package.
+ if !n.IsExported() &&
+ obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() {
+ unexported = append(unexported, n.Name)
+ }
+
+ // Record free reference (incl. self-reference).
+ if obj == fn || !within(obj.Pos(), decl) {
+ objidx, ok := freeObjIndex[obj]
+ if !ok {
+ objidx = len(freeObjIndex)
+ var pkgPath, pkgName string
+ if pn, ok := obj.(*types.PkgName); ok {
+ pkgPath = pn.Imported().Path()
+ pkgName = pn.Imported().Name()
+ } else if obj.Pkg() != nil {
+ pkgPath = obj.Pkg().Path()
+ pkgName = obj.Pkg().Name()
+ }
+ freeObjs = append(freeObjs, object{
+ Name: obj.Name(),
+ Kind: objectKind(obj),
+ PkgName: pkgName,
+ PkgPath: pkgPath,
+ ValidPos: obj.Pos().IsValid(),
+ })
+ freeObjIndex[obj] = objidx
+ }
+
+ freeObjs[objidx].Shadow = freeObjs[objidx].Shadow.add(info, fieldObjs, obj.Name(), stack)
+
+ freeRefs = append(freeRefs, freeRef{
+ Offset: int(n.Pos() - decl.Pos()),
+ Object: objidx,
+ })
+ }
+ }
+ }
+ return true
+ }
+ visit(decl, stack)
+
+ // Analyze callee body for "return expr" form,
+ // where expr is f() or <-ch. These forms are
+ // safe to inline as a standalone statement.
+ validForCallStmt := false
+ if len(decl.Body.List) != 1 {
+ // not just a return statement
+ } else if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
+ validForCallStmt = func() bool {
+ switch expr := ast.Unparen(ret.Results[0]).(type) {
+ case *ast.CallExpr: // f(x)
+ callee := typeutil.Callee(info, expr)
+ if callee == nil {
+ return false // conversion T(x)
+ }
+
+ // The only non-void built-in functions that may be
+ // called as a statement are copy and recover
+ // (though arguably a call to recover should never
+ // be inlined as that changes its behavior).
+ if builtin, ok := callee.(*types.Builtin); ok {
+ return builtin.Name() == "copy" ||
+ builtin.Name() == "recover"
+ }
+
+ return true // ordinary call f()
+
+ case *ast.UnaryExpr: // <-x
+ return expr.Op == token.ARROW // channel receive <-ch
+ }
+
+ // No other expressions are valid statements.
+ return false
+ }()
+ }
+
+ // Record information about control flow in the callee
+ // (but not any nested functions).
+ var (
+ hasDefer = false
+ hasBareReturn = false
+ returnInfo [][]returnOperandFlags
+ labels []string
+ )
+ ast.Inspect(decl.Body, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.FuncLit:
+ return false // prune traversal
+ case *ast.DeferStmt:
+ hasDefer = true
+ case *ast.LabeledStmt:
+ labels = append(labels, n.Label.Name)
+ case *ast.ReturnStmt:
+
+ // Are implicit assignment conversions
+ // to result variables all trivial?
+ var resultInfo []returnOperandFlags
+ if len(n.Results) > 0 {
+ argInfo := func(i int) (ast.Expr, types.Type) {
+ expr := n.Results[i]
+ return expr, info.TypeOf(expr)
+ }
+ if len(n.Results) == 1 && sig.Results().Len() > 1 {
+ // Spread return: return f() where f.Results > 1.
+ tuple := info.TypeOf(n.Results[0]).(*types.Tuple)
+ argInfo = func(i int) (ast.Expr, types.Type) {
+ return nil, tuple.At(i).Type()
+ }
+ }
+ for i := range sig.Results().Len() {
+ expr, typ := argInfo(i)
+ var flags returnOperandFlags
+ if typ == types.Typ[types.UntypedNil] { // untyped nil is preserved by go/types
+ flags |= untypedNilResult
+ }
+ if !trivialConversion(info.Types[expr].Value, typ, sig.Results().At(i).Type()) {
+ flags |= nonTrivialResult
+ }
+ resultInfo = append(resultInfo, flags)
+ }
+ } else if sig.Results().Len() > 0 {
+ hasBareReturn = true
+ }
+ returnInfo = append(returnInfo, resultInfo)
+ }
+ return true
+ })
+
+ // Reject attempts to inline cgo-generated functions.
+ for _, obj := range freeObjs {
+ // There are others (iconst fconst sconst fpvar macro)
+ // but this is probably sufficient.
+ if strings.HasPrefix(obj.Name, "_Cfunc_") ||
+ strings.HasPrefix(obj.Name, "_Ctype_") ||
+ strings.HasPrefix(obj.Name, "_Cvar_") {
+ return nil, fmt.Errorf("cannot inline cgo-generated functions")
+ }
+ }
+
+ // Compact content to just the FuncDecl.
+ //
+ // As a space optimization, we don't retain the complete
+ // callee file content; all we need is "package _; func f() { ... }".
+ // This reduces the size of analysis facts.
+ //
+ // Offsets in the callee information are "relocatable"
+ // since they are all relative to the FuncDecl.
+
+ content = append([]byte("package _\n"),
+ content[offsetOf(fset, decl.Pos()):offsetOf(fset, decl.End())]...)
+ // Sanity check: re-parse the compacted content.
+ if _, _, err := parseCompact(content); err != nil {
+ return nil, err
+ }
+
+ params, results, effects, falcon := analyzeParams(logf, fset, info, decl)
+ tparams := analyzeTypeParams(logf, fset, info, decl)
+ return &Callee{gobCallee{
+ Content: content,
+ PkgPath: pkg.Path(),
+ Name: name,
+ Unexported: unexported,
+ FreeObjs: freeObjs,
+ FreeRefs: freeRefs,
+ ValidForCallStmt: validForCallStmt,
+ NumResults: sig.Results().Len(),
+ Params: params,
+ TypeParams: tparams,
+ Results: results,
+ Effects: effects,
+ HasDefer: hasDefer,
+ HasBareReturn: hasBareReturn,
+ Returns: returnInfo,
+ Labels: labels,
+ Falcon: falcon,
+ }}, nil
+}
+
+// parseCompact parses a Go source file of the form "package _\n func f() { ... }"
+// and returns the sole function declaration.
+func parseCompact(content []byte) (*token.FileSet, *ast.FuncDecl, error) {
+ fset := token.NewFileSet()
+ const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
+ f, err := parser.ParseFile(fset, "callee.go", content, mode)
+ if err != nil {
+ return nil, nil, fmt.Errorf("internal error: cannot compact file: %v", err)
+ }
+ return fset, f.Decls[0].(*ast.FuncDecl), nil
+}
+
+// A paramInfo records information about a callee receiver, parameter, or result variable.
+type paramInfo struct {
+ Name string // parameter name (may be blank, or even "")
+ Index int // index within signature
+ IsResult bool // false for receiver or parameter, true for result variable
+ IsInterface bool // parameter has a (non-type parameter) interface type
+ Assigned bool // parameter appears on left side of an assignment statement
+ Escapes bool // parameter has its address taken
+ Refs []refInfo // information about references to parameter within body
+ Shadow shadowMap // shadowing info for the above refs; see [shadowMap]
+ FalconType string // name of this parameter's type (if basic) in the falcon system
+}
+
+type refInfo struct {
+ Offset int // FuncDecl-relative byte offset of parameter ref within body
+ Assignable bool // ref appears in context of assignment to known type
+ IfaceAssignment bool // ref is being assigned to an interface
+ AffectsInference bool // ref type may affect type inference
+ // IsSelectionOperand indicates whether the parameter reference is the
+ // operand of a selection (param.f). If so, and param's argument is itself
+ // a receiver parameter (a common case), we don't need to desugar (&v or *ptr)
+ // the selection: if param.Method is a valid selection, then so is param.fieldOrMethod.
+ IsSelectionOperand bool
+}
+
+// analyzeParams computes information about parameters of the function declared by decl,
+// including a simple "address taken" escape analysis.
+//
+// It returns two new arrays, one of the receiver and parameters, and
+// the other of the result variables of the function.
+//
+// The input must be well-typed.
+func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) (params, results []*paramInfo, effects []int, _ falconResult) {
+ sig := signature(fset, info, decl)
+
+ paramInfos := make(map[*types.Var]*paramInfo)
+ {
+ newParamInfo := func(param *types.Var, isResult bool) *paramInfo {
+ info := ¶mInfo{
+ Name: param.Name(),
+ IsResult: isResult,
+ Index: len(paramInfos),
+ IsInterface: isNonTypeParamInterface(param.Type()),
+ }
+ paramInfos[param] = info
+ return info
+ }
+ if sig.Recv() != nil {
+ params = append(params, newParamInfo(sig.Recv(), false))
+ }
+ for i := 0; i < sig.Params().Len(); i++ {
+ params = append(params, newParamInfo(sig.Params().At(i), false))
+ }
+ for i := 0; i < sig.Results().Len(); i++ {
+ results = append(results, newParamInfo(sig.Results().At(i), true))
+ }
+ }
+
+ // Search function body for operations &x, x.f(), and x = y
+ // where x is a parameter, and record it.
+ escape(info, decl, func(v *types.Var, escapes bool) {
+ if info := paramInfos[v]; info != nil {
+ if escapes {
+ info.Escapes = true
+ } else {
+ info.Assigned = true
+ }
+ }
+ })
+
+ // Record locations of all references to parameters.
+ // And record the set of intervening definitions for each parameter.
+ //
+ // TODO(adonovan): combine this traversal with the one that computes
+ // FreeRefs. The tricky part is that calleefx needs this one first.
+ fieldObjs := fieldObjs(sig)
+ var stack []ast.Node
+ stack = append(stack, decl.Type) // for scope of function itself
+ astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool {
+ if id, ok := n.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.Var); ok {
+ if pinfo, ok := paramInfos[v]; ok {
+ // Record ref information, and any intervening (shadowing) names.
+ //
+ // If the parameter v has an interface type, and the reference id
+ // appears in a context where assignability rules apply, there may be
+ // an implicit interface-to-interface widening. In that case it is
+ // not necessary to insert an explicit conversion from the argument
+ // to the parameter's type.
+ //
+ // Contrapositively, if param is not an interface type, then the
+ // assignment may lose type information, for example in the case that
+ // the substituted expression is an untyped constant or unnamed type.
+ stack = append(stack, n) // (the two calls below want n)
+ assignable, ifaceAssign, affectsInference := analyzeAssignment(info, stack)
+ ref := refInfo{
+ Offset: int(n.Pos() - decl.Pos()),
+ Assignable: assignable,
+ IfaceAssignment: ifaceAssign,
+ AffectsInference: affectsInference,
+ IsSelectionOperand: isSelectionOperand(stack),
+ }
+ pinfo.Refs = append(pinfo.Refs, ref)
+ pinfo.Shadow = pinfo.Shadow.add(info, fieldObjs, pinfo.Name, stack)
+ }
+ }
+ }
+ return true
+ })
+
+ // Compute subset and order of parameters that are strictly evaluated.
+ // (Depends on Refs computed above.)
+ effects = calleefx(info, decl.Body, paramInfos)
+ logf("effects list = %v", effects)
+
+ falcon := falcon(logf, fset, paramInfos, info, decl)
+
+ return params, results, effects, falcon
+}
+
+// analyzeTypeParams computes information about the type parameters of the function declared by decl.
+func analyzeTypeParams(_ logger, fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) []*paramInfo {
+ sig := signature(fset, info, decl)
+ paramInfos := make(map[*types.TypeName]*paramInfo)
+ var params []*paramInfo
+ collect := func(tpl *types.TypeParamList) {
+ for i := range tpl.Len() {
+ typeName := tpl.At(i).Obj()
+ info := ¶mInfo{Name: typeName.Name()}
+ params = append(params, info)
+ paramInfos[typeName] = info
+ }
+ }
+ collect(sig.RecvTypeParams())
+ collect(sig.TypeParams())
+
+ // Find references.
+ // We don't care about most of the properties that matter for parameter references:
+ // a type is immutable, cannot have its address taken, and does not undergo conversions.
+ // TODO(jba): can we nevertheless combine this with the traversal in analyzeParams?
+ var stack []ast.Node
+ stack = append(stack, decl.Type) // for scope of function itself
+ astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool {
+ if id, ok := n.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.TypeName); ok {
+ if pinfo, ok := paramInfos[v]; ok {
+ ref := refInfo{Offset: int(n.Pos() - decl.Pos())}
+ pinfo.Refs = append(pinfo.Refs, ref)
+ pinfo.Shadow = pinfo.Shadow.add(info, nil, pinfo.Name, stack)
+ }
+ }
+ }
+ return true
+ })
+ return params
+}
+
+func signature(fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) *types.Signature {
+ fnobj, ok := info.Defs[decl.Name]
+ if !ok {
+ panic(fmt.Sprintf("%s: no func object for %q",
+ fset.PositionFor(decl.Name.Pos(), false), decl.Name)) // ill-typed?
+ }
+ return fnobj.Type().(*types.Signature)
+}
+
+// -- callee helpers --
+
+// analyzeAssignment looks at the given stack, and analyzes certain
+// attributes of the innermost expression.
+//
+// In all cases we 'fail closed' when we cannot detect (or for simplicity
+// choose not to detect) the condition in question, meaning we err on the side
+// of the more restrictive rule. This is noted for each result below.
+//
+// - assignable reports whether the expression is used in a position where
+// assignability rules apply, such as in an actual assignment, as call
+// argument, or in a send to a channel. Defaults to 'false'. If assignable
+// is false, the other two results are irrelevant.
+// - ifaceAssign reports whether that assignment is to an interface type.
+// This is important as we want to preserve the concrete type in that
+// assignment. Defaults to 'true'. Notably, if the assigned type is a type
+// parameter, we assume that it could have interface type.
+// - affectsInference is (somewhat vaguely) defined as whether or not the
+// type of the operand may affect the type of the surrounding syntax,
+// through type inference. It is infeasible to completely reverse engineer
+// type inference, so we over approximate: if the expression is an argument
+// to a call to a generic function (but not method!) that uses type
+// parameters, assume that unification of that argument may affect the
+// inferred types.
+func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAssign, affectsInference bool) {
+ remaining, parent, expr := exprContext(stack)
+ if parent == nil {
+ return false, false, false
+ }
+
+ // TODO(golang/go#70638): simplify when types.Info records implicit conversions.
+
+ // Types do not need to match for assignment to a variable.
+ if assign, ok := parent.(*ast.AssignStmt); ok {
+ for i, v := range assign.Rhs {
+ if v == expr {
+ if i >= len(assign.Lhs) {
+ return false, false, false // ill typed
+ }
+ // Check to see if the assignment is to an interface type.
+ if i < len(assign.Lhs) {
+ // TODO: We could handle spread calls here, but in current usage expr
+ // is an ident.
+ if id, _ := assign.Lhs[i].(*ast.Ident); id != nil && info.Defs[id] != nil {
+ // Types must match for a defining identifier in a short variable
+ // declaration.
+ return false, false, false
+ }
+ // In all other cases, types should be known.
+ typ := info.TypeOf(assign.Lhs[i])
+ return true, typ == nil || types.IsInterface(typ), false
+ }
+ // Default:
+ return assign.Tok == token.ASSIGN, true, false
+ }
+ }
+ }
+
+ // Types do not need to match for an initializer with known type.
+ if spec, ok := parent.(*ast.ValueSpec); ok && spec.Type != nil {
+ if slices.Contains(spec.Values, expr) {
+ typ := info.TypeOf(spec.Type)
+ return true, typ == nil || types.IsInterface(typ), false
+ }
+ }
+
+ // Types do not need to match for index expressions.
+ if ix, ok := parent.(*ast.IndexExpr); ok {
+ if ix.Index == expr {
+ typ := info.TypeOf(ix.X)
+ if typ == nil {
+ return true, true, false
+ }
+ m, _ := typeparams.CoreType(typ).(*types.Map)
+ return true, m == nil || types.IsInterface(m.Key()), false
+ }
+ }
+
+ // Types do not need to match for composite literal keys, values, or
+ // fields.
+ if kv, ok := parent.(*ast.KeyValueExpr); ok {
+ var under types.Type
+ if len(remaining) > 0 {
+ if complit, ok := remaining[len(remaining)-1].(*ast.CompositeLit); ok {
+ if typ := info.TypeOf(complit); typ != nil {
+ // Unpointer to allow for pointers to slices or arrays, which are
+ // permitted as the types of nested composite literals without a type
+ // name.
+ under = typesinternal.Unpointer(typeparams.CoreType(typ))
+ }
+ }
+ }
+ if kv.Key == expr { // M{expr: ...}: assign to map key
+ m, _ := under.(*types.Map)
+ return true, m == nil || types.IsInterface(m.Key()), false
+ }
+ if kv.Value == expr {
+ switch under := under.(type) {
+ case interface{ Elem() types.Type }: // T{...: expr}: assign to map/array/slice element
+ return true, types.IsInterface(under.Elem()), false
+ case *types.Struct: // Struct{k: expr}
+ if id, _ := kv.Key.(*ast.Ident); id != nil {
+ for fi := range under.NumFields() {
+ field := under.Field(fi)
+ if info.Uses[id] == field {
+ return true, types.IsInterface(field.Type()), false
+ }
+ }
+ }
+ default:
+ return true, true, false
+ }
+ }
+ }
+ if lit, ok := parent.(*ast.CompositeLit); ok {
+ for i, v := range lit.Elts {
+ if v == expr {
+ typ := info.TypeOf(lit)
+ if typ == nil {
+ return true, true, false
+ }
+ // As in the KeyValueExpr case above, unpointer to handle pointers to
+ // array/slice literals.
+ under := typesinternal.Unpointer(typeparams.CoreType(typ))
+ switch under := under.(type) {
+ case interface{ Elem() types.Type }: // T{expr}: assign to map/array/slice element
+ return true, types.IsInterface(under.Elem()), false
+ case *types.Struct: // Struct{expr}: assign to unkeyed struct field
+ if i < under.NumFields() {
+ return true, types.IsInterface(under.Field(i).Type()), false
+ }
+ }
+ return true, true, false
+ }
+ }
+ }
+
+ // Types do not need to match for values sent to a channel.
+ if send, ok := parent.(*ast.SendStmt); ok {
+ if send.Value == expr {
+ typ := info.TypeOf(send.Chan)
+ if typ == nil {
+ return true, true, false
+ }
+ ch, _ := typeparams.CoreType(typ).(*types.Chan)
+ return true, ch == nil || types.IsInterface(ch.Elem()), false
+ }
+ }
+
+ // Types do not need to match for an argument to a call, unless the
+ // corresponding parameter has type parameters, as in that case the
+ // argument type may affect inference.
+ if call, ok := parent.(*ast.CallExpr); ok {
+ if _, ok := isConversion(info, call); ok {
+ return false, false, false // redundant conversions are handled at the call site
+ }
+ // Ordinary call. Could be a call of a func, builtin, or function value.
+ for i, arg := range call.Args {
+ if arg == expr {
+ typ := info.TypeOf(call.Fun)
+ if typ == nil {
+ return true, true, false
+ }
+ sig, _ := typeparams.CoreType(typ).(*types.Signature)
+ if sig != nil {
+ // Find the relevant parameter type, accounting for variadics.
+ paramType := paramTypeAtIndex(sig, call, i)
+ ifaceAssign := paramType == nil || types.IsInterface(paramType)
+ affectsInference := false
+ if fn := typeutil.StaticCallee(info, call); fn != nil {
+ if sig2 := fn.Type().(*types.Signature); sig2.Recv() == nil {
+ originParamType := paramTypeAtIndex(sig2, call, i)
+ affectsInference = originParamType == nil || new(typeparams.Free).Has(originParamType)
+ }
+ }
+ return true, ifaceAssign, affectsInference
+ }
+ }
+ }
+ }
+
+ return false, false, false
+}
+
+// paramTypeAtIndex returns the effective parameter type at the given argument
+// index in call, if valid.
+func paramTypeAtIndex(sig *types.Signature, call *ast.CallExpr, index int) types.Type {
+ if plen := sig.Params().Len(); sig.Variadic() && index >= plen-1 && !call.Ellipsis.IsValid() {
+ if s, ok := sig.Params().At(plen - 1).Type().(*types.Slice); ok {
+ return s.Elem()
+ }
+ } else if index < plen {
+ return sig.Params().At(index).Type()
+ }
+ return nil // ill typed
+}
+
+// exprContext returns the innermost parent->child expression nodes for the
+// given outer-to-inner stack, after stripping parentheses, along with the
+// remaining stack up to the parent node.
+//
+// If no such context exists, returns (nil, nil, nil).
+func exprContext(stack []ast.Node) (remaining []ast.Node, parent ast.Node, expr ast.Expr) {
+ expr, _ = stack[len(stack)-1].(ast.Expr)
+ if expr == nil {
+ return nil, nil, nil
+ }
+ i := len(stack) - 2
+ for ; i >= 0; i-- {
+ if pexpr, ok := stack[i].(*ast.ParenExpr); ok {
+ expr = pexpr
+ } else {
+ parent = stack[i]
+ break
+ }
+ }
+ if parent == nil {
+ return nil, nil, nil
+ }
+ // inv: i is the index of parent in the stack.
+ return stack[:i], parent, expr
+}
+
+// isSelectionOperand reports whether the innermost node of stack is operand
+// (x) of a selection x.f.
+func isSelectionOperand(stack []ast.Node) bool {
+ _, parent, expr := exprContext(stack)
+ if parent == nil {
+ return false
+ }
+ sel, ok := parent.(*ast.SelectorExpr)
+ return ok && sel.X == expr
+}
+
+// A shadowMap records information about shadowing at any of the parameter's
+// references within the callee decl.
+//
+// For each name shadowed at a reference to the parameter within the callee
+// body, shadow map records the 1-based index of the callee decl parameter
+// causing the shadowing, or -1, if the shadowing is not due to a callee decl.
+// A value of zero (or missing) indicates no shadowing. By convention,
+// self-shadowing is excluded from the map.
+//
+// For example, in the following callee
+//
+// func f(a, b int) int {
+// c := 2 + b
+// return a + c
+// }
+//
+// the shadow map of a is {b: 2, c: -1}, because b is shadowed by the 2nd
+// parameter. The shadow map of b is {a: 1}, because c is not shadowed at the
+// use of b.
+type shadowMap map[string]int
+
+// add returns the [shadowMap] augmented by the set of names
+// locally shadowed at the location of the reference in the callee
+// (identified by the stack). The name of the reference itself is
+// excluded.
+//
+// These shadowed names may not be used in a replacement expression
+// for the reference.
+func (s shadowMap) add(info *types.Info, paramIndexes map[types.Object]int, exclude string, stack []ast.Node) shadowMap {
+ for _, n := range stack {
+ if scope := scopeFor(info, n); scope != nil {
+ for _, name := range scope.Names() {
+ if name != exclude {
+ if s == nil {
+ s = make(shadowMap)
+ }
+ obj := scope.Lookup(name)
+ if idx, ok := paramIndexes[obj]; ok {
+ s[name] = idx + 1
+ } else {
+ s[name] = -1
+ }
+ }
+ }
+ }
+ }
+ return s
+}
+
+// fieldObjs returns a map of each types.Object defined by the given signature
+// to its index in the parameter list. Parameters with missing or blank name
+// are skipped.
+func fieldObjs(sig *types.Signature) map[types.Object]int {
+ m := make(map[types.Object]int)
+ for i := range sig.Params().Len() {
+ if p := sig.Params().At(i); p.Name() != "" && p.Name() != "_" {
+ m[p] = i
+ }
+ }
+ return m
+}
+
+func isField(obj types.Object) bool {
+ if v, ok := obj.(*types.Var); ok && v.IsField() {
+ return true
+ }
+ return false
+}
+
+func isMethod(obj types.Object) bool {
+ if f, ok := obj.(*types.Func); ok && f.Type().(*types.Signature).Recv() != nil {
+ return true
+ }
+ return false
+}
+
+// -- serialization --
+
+var (
+ _ gob.GobEncoder = (*Callee)(nil)
+ _ gob.GobDecoder = (*Callee)(nil)
+)
+
+func (callee *Callee) GobEncode() ([]byte, error) {
+ var out bytes.Buffer
+ if err := gob.NewEncoder(&out).Encode(callee.impl); err != nil {
+ return nil, err
+ }
+ return out.Bytes(), nil
+}
+
+func (callee *Callee) GobDecode(data []byte) error {
+ return gob.NewDecoder(bytes.NewReader(data)).Decode(&callee.impl)
+}
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines the analysis of callee effects.
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+const (
+ rinf = -1 // R∞: arbitrary read from memory
+ winf = -2 // W∞: arbitrary write to memory (or unknown control)
+)
+
+// calleefx returns a list of parameter indices indicating the order
+// in which parameters are first referenced during evaluation of the
+// callee, relative both to each other and to other effects of the
+// callee (if any), such as arbitrary reads (rinf) and arbitrary
+// effects (winf), including unknown control flow. Each parameter
+// that is referenced appears once in the list.
+//
+// For example, the effects list of this function:
+//
+// func f(x, y, z int) int {
+// return y + x + g() + z
+// }
+//
+// is [1 0 -2 2], indicating reads of y and x, followed by the unknown
+// effects of the g() call, and finally the read of parameter z. This
+// information is used during inlining to ascertain when it is safe
+// for parameter references to be replaced by their corresponding
+// argument expressions. Such substitutions are permitted only when
+// they do not cause "write" operations (those with effects) to
+// commute with "read" operations (those that have no effect but are
+// not pure). Impure operations may be reordered with other impure
+// operations, and pure operations may be reordered arbitrarily.
+//
+// The analysis ignores the effects of runtime panics, on the
+// assumption that well-behaved programs shouldn't encounter them.
+func calleefx(info *types.Info, body *ast.BlockStmt, paramInfos map[*types.Var]*paramInfo) []int {
+ // This traversal analyzes the callee's statements (in syntax
+ // form, though one could do better with SSA) to compute the
+ // sequence of events of the following kinds:
+ //
+ // 1 read of a parameter variable.
+ // 2. reads from other memory.
+ // 3. writes to memory
+
+ var effects []int // indices of parameters, or rinf/winf (-ve)
+ seen := make(map[int]bool)
+ effect := func(i int) {
+ if !seen[i] {
+ seen[i] = true
+ effects = append(effects, i)
+ }
+ }
+
+ // unknown is called for statements of unknown effects (or control).
+ unknown := func() {
+ effect(winf)
+
+ // Ensure that all remaining parameters are "seen"
+ // after we go into the unknown (unless they are
+ // unreferenced by the function body). This lets us
+ // not bother implementing the complete traversal into
+ // control structures.
+ //
+ // TODO(adonovan): add them in a deterministic order.
+ // (This is not a bug but determinism is good.)
+ for _, pinfo := range paramInfos {
+ if !pinfo.IsResult && len(pinfo.Refs) > 0 {
+ effect(pinfo.Index)
+ }
+ }
+ }
+
+ var visitExpr func(n ast.Expr)
+ var visitStmt func(n ast.Stmt) bool
+ visitExpr = func(n ast.Expr) {
+ switch n := n.(type) {
+ case *ast.Ident:
+ if v, ok := info.Uses[n].(*types.Var); ok && !v.IsField() {
+ // Use of global?
+ if v.Parent() == v.Pkg().Scope() {
+ effect(rinf) // read global var
+ }
+
+ // Use of parameter?
+ if pinfo, ok := paramInfos[v]; ok && !pinfo.IsResult {
+ effect(pinfo.Index) // read parameter var
+ }
+
+ // Use of local variables is ok.
+ }
+
+ case *ast.BasicLit:
+ // no effect
+
+ case *ast.FuncLit:
+ // A func literal has no read or write effect
+ // until called, and (most) function calls are
+ // considered to have arbitrary effects.
+ // So, no effect.
+
+ case *ast.CompositeLit:
+ for _, elt := range n.Elts {
+ visitExpr(elt) // note: visits KeyValueExpr
+ }
+
+ case *ast.ParenExpr:
+ visitExpr(n.X)
+
+ case *ast.SelectorExpr:
+ if seln, ok := info.Selections[n]; ok {
+ visitExpr(n.X)
+
+ // See types.SelectionKind for background.
+ switch seln.Kind() {
+ case types.MethodExpr:
+ // A method expression T.f acts like a
+ // reference to a func decl,
+ // so it doesn't read x until called.
+
+ case types.MethodVal, types.FieldVal:
+ // A field or method value selection x.f
+ // reads x if the selection indirects a pointer.
+
+ if indirectSelection(seln) {
+ effect(rinf)
+ }
+ }
+ } else {
+ // qualified identifier: treat like unqualified
+ visitExpr(n.Sel)
+ }
+
+ case *ast.IndexExpr:
+ if tv := info.Types[n.Index]; tv.IsType() {
+ // no effect (G[T] instantiation)
+ } else {
+ visitExpr(n.X)
+ visitExpr(n.Index)
+ switch tv.Type.Underlying().(type) {
+ case *types.Slice, *types.Pointer: // []T, *[n]T (not string, [n]T)
+ effect(rinf) // indirect read of slice/array element
+ }
+ }
+
+ case *ast.IndexListExpr:
+ // no effect (M[K,V] instantiation)
+
+ case *ast.SliceExpr:
+ visitExpr(n.X)
+ visitExpr(n.Low)
+ visitExpr(n.High)
+ visitExpr(n.Max)
+
+ case *ast.TypeAssertExpr:
+ visitExpr(n.X)
+
+ case *ast.CallExpr:
+ if info.Types[n.Fun].IsType() {
+ // conversion T(x)
+ visitExpr(n.Args[0])
+ } else {
+ // call f(args)
+ visitExpr(n.Fun)
+ for i, arg := range n.Args {
+ if i == 0 && info.Types[arg].IsType() {
+ continue // new(T), make(T, n)
+ }
+ visitExpr(arg)
+ }
+
+ // The pure built-ins have no effects beyond
+ // those of their operands (not even memory reads).
+ // All other calls have unknown effects.
+ if !typesinternal.CallsPureBuiltin(info, n) {
+ unknown() // arbitrary effects
+ }
+ }
+
+ case *ast.StarExpr:
+ visitExpr(n.X)
+ effect(rinf) // *ptr load or store depends on state of heap
+
+ case *ast.UnaryExpr: // + - ! ^ & ~ <-
+ visitExpr(n.X)
+ if n.Op == token.ARROW {
+ unknown() // effect: channel receive
+ }
+
+ case *ast.BinaryExpr:
+ visitExpr(n.X)
+ visitExpr(n.Y)
+
+ case *ast.KeyValueExpr:
+ visitExpr(n.Key) // may be a struct field
+ visitExpr(n.Value)
+
+ case *ast.BadExpr:
+ // no effect
+
+ case nil:
+ // optional subtree
+
+ default:
+ // type syntax: unreachable given traversal
+ panic(n)
+ }
+ }
+
+ // visitStmt's result indicates the continuation:
+ // false for return, true for the next statement.
+ //
+ // We could treat return as an unknown, but this way
+ // yields definite effects for simple sequences like
+ // {S1; S2; return}, so unreferenced parameters are
+ // not spuriously added to the effects list, and thus
+ // not spuriously disqualified from elimination.
+ visitStmt = func(n ast.Stmt) bool {
+ switch n := n.(type) {
+ case *ast.DeclStmt:
+ decl := n.Decl.(*ast.GenDecl)
+ for _, spec := range decl.Specs {
+ switch spec := spec.(type) {
+ case *ast.ValueSpec:
+ for _, v := range spec.Values {
+ visitExpr(v)
+ }
+
+ case *ast.TypeSpec:
+ // no effect
+ }
+ }
+
+ case *ast.LabeledStmt:
+ return visitStmt(n.Stmt)
+
+ case *ast.ExprStmt:
+ visitExpr(n.X)
+
+ case *ast.SendStmt:
+ visitExpr(n.Chan)
+ visitExpr(n.Value)
+ unknown() // effect: channel send
+
+ case *ast.IncDecStmt:
+ visitExpr(n.X)
+ unknown() // effect: variable increment
+
+ case *ast.AssignStmt:
+ for _, lhs := range n.Lhs {
+ visitExpr(lhs)
+ }
+ for _, rhs := range n.Rhs {
+ visitExpr(rhs)
+ }
+ for _, lhs := range n.Lhs {
+ id, _ := lhs.(*ast.Ident)
+ if id != nil && id.Name == "_" {
+ continue // blank assign has no effect
+ }
+ if n.Tok == token.DEFINE && id != nil && info.Defs[id] != nil {
+ continue // new var declared by := has no effect
+ }
+ unknown() // assignment to existing var
+ break
+ }
+
+ case *ast.GoStmt:
+ visitExpr(n.Call.Fun)
+ for _, arg := range n.Call.Args {
+ visitExpr(arg)
+ }
+ unknown() // effect: create goroutine
+
+ case *ast.DeferStmt:
+ visitExpr(n.Call.Fun)
+ for _, arg := range n.Call.Args {
+ visitExpr(arg)
+ }
+ unknown() // effect: push defer
+
+ case *ast.ReturnStmt:
+ for _, res := range n.Results {
+ visitExpr(res)
+ }
+ return false
+
+ case *ast.BlockStmt:
+ for _, stmt := range n.List {
+ if !visitStmt(stmt) {
+ return false
+ }
+ }
+
+ case *ast.BranchStmt:
+ unknown() // control flow
+
+ case *ast.IfStmt:
+ visitStmt(n.Init)
+ visitExpr(n.Cond)
+ unknown() // control flow
+
+ case *ast.SwitchStmt:
+ visitStmt(n.Init)
+ visitExpr(n.Tag)
+ unknown() // control flow
+
+ case *ast.TypeSwitchStmt:
+ visitStmt(n.Init)
+ visitStmt(n.Assign)
+ unknown() // control flow
+
+ case *ast.SelectStmt:
+ unknown() // control flow
+
+ case *ast.ForStmt:
+ visitStmt(n.Init)
+ visitExpr(n.Cond)
+ unknown() // control flow
+
+ case *ast.RangeStmt:
+ visitExpr(n.X)
+ unknown() // control flow
+
+ case *ast.EmptyStmt, *ast.BadStmt:
+ // no effect
+
+ case nil:
+ // optional subtree
+
+ default:
+ panic(n)
+ }
+ return true
+ }
+ visitStmt(body)
+
+ return effects
+}
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package inline implements inlining of Go function calls.
+
+The client provides information about the caller and callee,
+including the source text, syntax tree, and type information, and
+the inliner returns the modified source file for the caller, or an
+error if the inlining operation is invalid (for example because the
+function body refers to names that are inaccessible to the caller).
+
+Although this interface demands more information from the client
+than might seem necessary, it enables smoother integration with
+existing batch and interactive tools that have their own ways of
+managing the processes of reading, parsing, and type-checking
+packages. In particular, this package does not assume that the
+caller and callee belong to the same token.FileSet or
+types.Importer realms.
+
+There are many aspects to a function call. It is the only construct
+that can simultaneously bind multiple variables of different
+explicit types, with implicit assignment conversions. (Neither var
+nor := declarations can do that.) It defines the scope of control
+labels, of return statements, and of defer statements. Arguments
+and results of function calls may be tuples even though tuples are
+not first-class values in Go, and a tuple-valued call expression
+may be "spread" across the argument list of a call or the operands
+of a return statement. All these unique features mean that in the
+general case, not everything that can be expressed by a function
+call can be expressed without one.
+
+So, in general, inlining consists of modifying a function or method
+call expression f(a1, ..., an) so that the name of the function f
+is replaced ("literalized") by a literal copy of the function
+declaration, with free identifiers suitably modified to use the
+locally appropriate identifiers or perhaps constant argument
+values.
+
+Inlining must not change the semantics of the call. Semantics
+preservation is crucial for clients such as codebase maintenance
+tools that automatically inline all calls to designated functions
+on a large scale. Such tools must not introduce subtle behavior
+changes. (Fully inlining a call is dynamically observable using
+reflection over the call stack, but this exception to the rule is
+explicitly allowed.)
+
+In many cases it is possible to entirely replace ("reduce") the
+call by a copy of the function's body in which parameters have been
+replaced by arguments. The inliner supports a number of reduction
+strategies, and we expect this set to grow. Nonetheless, sound
+reduction is surprisingly tricky.
+
+The inliner is in some ways like an optimizing compiler. A compiler
+is considered correct if it doesn't change the meaning of the
+program in translation from source language to target language. An
+optimizing compiler exploits the particulars of the input to
+generate better code, where "better" usually means more efficient.
+When a case is found in which it emits suboptimal code, the
+compiler is improved to recognize more cases, or more rules, and
+more exceptions to rules; this process has no end. Inlining is
+similar except that "better" code means tidier code. The baseline
+translation (literalization) is correct, but there are endless
+rules--and exceptions to rules--by which the output can be
+improved.
+
+The following section lists some of the challenges, and ways in
+which they can be addressed.
+
+ - All effects of the call argument expressions must be preserved,
+ both in their number (they must not be eliminated or repeated),
+ and in their order (both with respect to other arguments, and any
+ effects in the callee function).
+
+ This must be the case even if the corresponding parameters are
+ never referenced, are referenced multiple times, referenced in
+ a different order from the arguments, or referenced within a
+ nested function that may be executed an arbitrary number of
+ times.
+
+ Currently, parameter replacement is not applied to arguments
+ with effects, but with further analysis of the sequence of
+ strict effects within the callee we could relax this constraint.
+
+ - When not all parameters can be substituted by their arguments
+ (e.g. due to possible effects), if the call appears in a
+ statement context, the inliner may introduce a var declaration
+ that declares the parameter variables (with the correct types)
+ and assigns them to their corresponding argument values.
+ The rest of the function body may then follow.
+ For example, the call
+
+ f(1, 2)
+
+ to the function
+
+ func f(x, y int32) { stmts }
+
+ may be reduced to
+
+ { var x, y int32 = 1, 2; stmts }.
+
+ There are many reasons why this is not always possible. For
+ example, true parameters are statically resolved in the same
+ scope, and are dynamically assigned their arguments in
+ parallel; but each spec in a var declaration is statically
+ resolved in sequence and dynamically executed in sequence, so
+ earlier parameters may shadow references in later ones.
+
+ - Even an argument expression as simple as ptr.x may not be
+ referentially transparent, because another argument may have the
+ effect of changing the value of ptr.
+
+ This constraint could be relaxed by some kind of alias or
+ escape analysis that proves that ptr cannot be mutated during
+ the call.
+
+ - Although constants are referentially transparent, as a matter of
+ style we do not wish to duplicate literals that are referenced
+ multiple times in the body because this undoes proper factoring.
+ Also, string literals may be arbitrarily large.
+
+ - If the function body consists of statements other than just
+ "return expr", in some contexts it may be syntactically
+ impossible to reduce the call. Consider:
+
+ if x := f(); cond { ... }
+
+ Go has no equivalent to Lisp's progn or Rust's blocks,
+ nor ML's let expressions (let param = arg in body);
+ its closest equivalent is func(param){body}(arg).
+ Reduction strategies must therefore consider the syntactic
+ context of the call.
+
+ In such situations we could work harder to extract a statement
+ context for the call, by transforming it to:
+
+ { x := f(); if cond { ... } }
+
+ - Similarly, without the equivalent of Rust-style blocks and
+ first-class tuples, there is no general way to reduce a call
+ to a function such as
+
+ func(params)(args)(results) { stmts; return expr }
+
+ to an expression such as
+
+ { var params = args; stmts; expr }
+
+ or even a statement such as
+
+ results = { var params = args; stmts; expr }
+
+ Consequently the declaration and scope of the result variables,
+ and the assignment and control-flow implications of the return
+ statement, must be dealt with by cases.
+
+ - A standalone call statement that calls a function whose body is
+ "return expr" cannot be simply replaced by the body expression
+ if it is not itself a call or channel receive expression; it is
+ necessary to explicitly discard the result using "_ = expr".
+
+ Similarly, if the body is a call expression, only calls to some
+ built-in functions with no result (such as copy or panic) are
+ permitted as statements, whereas others (such as append) return
+ a result that must be used, even if just by discarding.
+
+ - If a parameter or result variable is updated by an assignment
+ within the function body, it cannot always be safely replaced
+ by a variable in the caller. For example, given
+
+ func f(a int) int { a++; return a }
+
+ The call y = f(x) cannot be replaced by { x++; y = x } because
+ this would change the value of the caller's variable x.
+ Only if the caller is finished with x is this safe.
+
+ A similar argument applies to parameter or result variables
+ that escape: by eliminating a variable, inlining would change
+ the identity of the variable that escapes.
+
+ - If the function body uses 'defer' and the inlined call is not a
+ tail-call, inlining may delay the deferred effects.
+
+ - Because the scope of a control label is the entire function, a
+ call cannot be reduced if the caller and callee have intersecting
+ sets of control labels. (It is possible to α-rename any
+ conflicting ones, but our colleagues building C++ refactoring
+ tools report that, when tools must choose new identifiers, they
+ generally do a poor job.)
+
+ - Given
+
+ func f() uint8 { return 0 }
+
+ var x any = f()
+
+ reducing the call to var x any = 0 is unsound because it
+ discards the implicit conversion to uint8. We may need to make
+ each argument-to-parameter conversion explicit if the types
+ differ. Assignments to variadic parameters may need to
+ explicitly construct a slice.
+
+ An analogous problem applies to the implicit assignments in
+ return statements:
+
+ func g() any { return f() }
+
+ Replacing the call f() with 0 would silently lose a
+ conversion to uint8 and change the behavior of the program.
+
+ - When inlining a call f(1, x, g()) where those parameters are
+ unreferenced, we should be able to avoid evaluating 1 and x
+ since they are pure and thus have no effect. But x may be the
+ last reference to a local variable in the caller, so removing
+ it would cause a compilation error. Parameter substitution must
+ avoid making the caller's local variables unreferenced (or must
+ be prepared to eliminate the declaration too---this is where an
+ iterative framework for simplification would really help).
+
+ - An expression such as s[i] may be valid if s and i are
+ variables but invalid if either or both of them are constants.
+ For example, a negative constant index s[-1] is always out of
+ bounds, and even a non-negative constant index may be out of
+ bounds depending on the particular string constant (e.g.
+ "abc"[4]).
+
+ So, if a parameter participates in any expression that is
+ subject to additional compile-time checks when its operands are
+ constant, it may be unsafe to substitute that parameter by a
+ constant argument value (#62664).
+
+More complex callee functions are inlinable with more elaborate and
+invasive changes to the statements surrounding the call expression.
+
+TODO(adonovan): future work:
+
+ - Handle more of the above special cases by careful analysis,
+ thoughtful factoring of the large design space, and thorough
+ test coverage.
+
+ - Compute precisely (not conservatively) when parameter
+ substitution would remove the last reference to a caller local
+ variable, and blank out the local instead of retreating from
+ the substitution.
+
+ - Afford the client more control such as a limit on the total
+ increase in line count, or a refusal to inline using the
+ general approach (replacing name by function literal). This
+ could be achieved by returning metadata alongside the result
+ and having the client conditionally discard the change.
+
+ - Support inlining of generic functions, replacing type parameters
+ by their instantiations.
+
+ - Support inlining of calls to function literals ("closures").
+ But note that the existing algorithm makes widespread assumptions
+ that the callee is a package-level function or method.
+
+ - Eliminate explicit conversions of "untyped" literals inserted
+ conservatively when they are redundant. For example, the
+ conversion int32(1) is redundant when this value is used only as a
+ slice index; but it may be crucial if it is used in x := int32(1)
+ as it changes the type of x, which may have further implications.
+ The conversions may also be important to the falcon analysis.
+
+ - Allow non-'go' build systems such as Bazel/Blaze a chance to
+ decide whether an import is accessible using logic other than
+ "/internal/" path segments. This could be achieved by returning
+ the list of added import paths instead of a text diff.
+
+ - Inlining a function from another module may change the
+ effective version of the Go language spec that governs it. We
+ should probably make the client responsible for rejecting
+ attempts to inline from newer callees to older callers, since
+ there's no way for this package to access module versions.
+
+ - Use an alternative implementation of the import-organizing
+ operation that doesn't require operating on a complete file
+ (and reformatting). Then return the results in a higher-level
+ form as a set of import additions and deletions plus a single
+ diff that encloses the call expression. This interface could
+ perhaps be implemented atop imports.Process by post-processing
+ its result to obtain the abstract import changes and discarding
+ its formatted output.
+*/
+package inline
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+)
+
+// escape implements a simple "address-taken" escape analysis. It
+// calls f for each local variable that appears on the left side of an
+// assignment (escapes=false) or has its address taken (escapes=true).
+// The initialization of a variable by its declaration does not count
+// as an assignment.
+func escape(info *types.Info, root ast.Node, f func(v *types.Var, escapes bool)) {
+
+ // lvalue is called for each address-taken expression or LHS of assignment.
+ // Supported forms are: x, (x), x[i], x.f, *x, T{}.
+ var lvalue func(e ast.Expr, escapes bool)
+ lvalue = func(e ast.Expr, escapes bool) {
+ switch e := e.(type) {
+ case *ast.Ident:
+ if v, ok := info.Uses[e].(*types.Var); ok {
+ if !isPkgLevel(v) {
+ f(v, escapes)
+ }
+ }
+ case *ast.ParenExpr:
+ lvalue(e.X, escapes)
+ case *ast.IndexExpr:
+ // TODO(adonovan): support generics without assuming e.X has a core type.
+ // Consider:
+ //
+ // func Index[T interface{ [3]int | []int }](t T, i int) *int {
+ // return &t[i]
+ // }
+ //
+ // We must traverse the normal terms and check
+ // whether any of them is an array.
+ //
+ // We assume TypeOf returns non-nil.
+ if _, ok := info.TypeOf(e.X).Underlying().(*types.Array); ok {
+ lvalue(e.X, escapes) // &a[i] on array
+ }
+ case *ast.SelectorExpr:
+ // We assume TypeOf returns non-nil.
+ if _, ok := info.TypeOf(e.X).Underlying().(*types.Struct); ok {
+ lvalue(e.X, escapes) // &s.f on struct
+ }
+ case *ast.StarExpr:
+ // *ptr indirects an existing pointer
+ case *ast.CompositeLit:
+ // &T{...} creates a new variable
+ default:
+ panic(fmt.Sprintf("&x on %T", e)) // unreachable in well-typed code
+ }
+ }
+
+ // Search function body for operations &x, x.f(), x++, and x = y
+ // where x is a parameter. Each of these treats x as an address.
+ ast.Inspect(root, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.UnaryExpr:
+ if n.Op == token.AND {
+ lvalue(n.X, true) // &x
+ }
+
+ case *ast.CallExpr:
+ // implicit &x in method call x.f(),
+ // where x has type T and method is (*T).f
+ if sel, ok := n.Fun.(*ast.SelectorExpr); ok {
+ if seln, ok := info.Selections[sel]; ok &&
+ seln.Kind() == types.MethodVal &&
+ isPointer(seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()) {
+ tArg, indirect := effectiveReceiver(seln)
+ if !indirect && !isPointer(tArg) {
+ lvalue(sel.X, true) // &x.f
+ }
+ }
+ }
+
+ case *ast.AssignStmt:
+ for _, lhs := range n.Lhs {
+ if id, ok := lhs.(*ast.Ident); ok &&
+ info.Defs[id] != nil &&
+ n.Tok == token.DEFINE {
+ // declaration: doesn't count
+ } else {
+ lvalue(lhs, false)
+ }
+ }
+
+ case *ast.IncDecStmt:
+ lvalue(n.X, false)
+ }
+ return true
+ })
+}
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines the callee side of the "fallible constant" analysis.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/format"
+ "go/token"
+ "go/types"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// falconResult is the result of the analysis of the callee.
+type falconResult struct {
+ Types []falconType // types for falcon constraint environment
+ Constraints []string // constraints (Go expressions) on values of fallible constants
+}
+
+// A falconType specifies the name and underlying type of a synthetic
+// defined type for use in falcon constraints.
+//
+// Unique types from callee code are bijectively mapped onto falcon
+// types so that constraints are independent of callee type
+// information but preserve type equivalence classes.
+//
+// Fresh names are deliberately obscure to avoid shadowing even if a
+// callee parameter has a name like "int" or "any".
+type falconType struct {
+ Name string
+ Kind types.BasicKind // string/number/bool
+}
+
+// falcon identifies "fallible constant" expressions, which are
+// expressions that may fail to compile if one or more of their
+// operands is changed from non-constant to constant.
+//
+// Consider:
+//
+// func sub(s string, i, j int) string { return s[i:j] }
+//
+// If parameters are replaced by constants, the compiler is
+// required to perform these additional checks:
+//
+// - if i is constant, 0 <= i.
+// - if s and i are constant, i <= len(s).
+// - ditto for j.
+// - if i and j are constant, i <= j.
+//
+// s[i:j] is thus a "fallible constant" expression dependent on {s, i,
+// j}. Each falcon creates a set of conditional constraints across one
+// or more parameter variables.
+//
+// - When inlining a call such as sub("abc", -1, 2), the parameter i
+// cannot be eliminated by substitution as its argument value is
+// negative.
+//
+// - When inlining sub("", 2, 1), all three parameters cannot be
+// simultaneously eliminated by substitution without violating i
+// <= len(s) and j <= len(s), but the parameters i and j could be
+// safely eliminated without s.
+//
+// Parameters that cannot be eliminated must remain non-constant,
+// either in the form of a binding declaration:
+//
+// { var i int = -1; return "abc"[i:2] }
+//
+// or a parameter of a literalization:
+//
+// func (i int) string { return "abc"[i:2] }(-1)
+//
+// These example expressions are obviously doomed to fail at run
+// time, but in realistic cases such expressions are dominated by
+// appropriate conditions that make them reachable only when safe:
+//
+// if 0 <= i && i <= j && j <= len(s) { _ = s[i:j] }
+//
+// (In principle a more sophisticated inliner could entirely eliminate
+// such unreachable blocks based on the condition being always-false
+// for the given parameter substitution, but this is tricky to do safely
+// because the type-checker considers only a single configuration.
+// Consider: if runtime.GOOS == "linux" { ... }.)
+//
+// We believe this is an exhaustive list of "fallible constant" operations:
+//
+// - switch z { case x: case y } // duplicate case values
+// - s[i], s[i:j], s[i:j:k] // index out of bounds (0 <= i <= j <= k <= len(s))
+// - T{x: 0} // index out of bounds, duplicate index
+// - x/y, x%y, x/=y, x%=y // integer division by zero; minint/-1 overflow
+// - x+y, x-y, x*y // arithmetic overflow
+// - x<<y // shift out of range
+// - -x // negation of minint
+// - T(x) // value out of range
+//
+// The fundamental reason for this elaborate algorithm is that the
+// "separate analysis" of callee and caller, as required when running
+// in an environment such as unitchecker, means that there is no way
+// for us to simply invoke the type checker on the combination of
+// caller and callee code, as by the time we analyze the caller, we no
+// longer have access to type information for the callee (and, in
+// particular, any of its direct dependencies that are not direct
+// dependencies of the caller). So, in effect, we are forced to map
+// the problem in a neutral (callee-type-independent) constraint
+// system that can be verified later.
+func falcon(logf func(string, ...any), fset *token.FileSet, params map[*types.Var]*paramInfo, info *types.Info, decl *ast.FuncDecl) falconResult {
+
+ st := &falconState{
+ logf: logf,
+ fset: fset,
+ params: params,
+ info: info,
+ decl: decl,
+ }
+
+ // type mapping
+ st.int = st.typename(types.Typ[types.Int])
+ st.any = "interface{}" // don't use "any" as it may be shadowed
+ for obj, info := range st.params {
+ if isBasic(obj.Type(), types.IsConstType) {
+ info.FalconType = st.typename(obj.Type())
+ }
+ }
+
+ st.stmt(st.decl.Body)
+
+ return st.result
+}
+
+type falconState struct {
+ // inputs
+ logf func(string, ...any)
+ fset *token.FileSet
+ params map[*types.Var]*paramInfo
+ info *types.Info
+ decl *ast.FuncDecl
+
+ // working state
+ int string
+ any string
+ typenames typeutil.Map
+
+ result falconResult
+}
+
+// typename returns the name in the falcon constraint system
+// of a given string/number/bool type t. Falcon types are
+// specified directly in go/types data structures rather than
+// by name, avoiding potential shadowing conflicts with
+// confusing parameter names such as "int".
+//
+// Also, each distinct type (as determined by types.Identical)
+// is mapped to a fresh type in the falcon system so that we
+// can map the types in the callee code into a neutral form
+// that does not depend on imports, allowing us to detect
+// potential conflicts such as
+//
+// map[any]{T1(1): 0, T2(1): 0}
+//
+// where T1=T2.
+func (st *falconState) typename(t types.Type) string {
+ name, ok := st.typenames.At(t).(string)
+ if !ok {
+ basic := t.Underlying().(*types.Basic)
+
+ // That dot ۰ is an Arabic zero numeral U+06F0.
+ // It is very unlikely to appear in a real program.
+ // TODO(adonovan): use a non-heuristic solution.
+ name = fmt.Sprintf("%s۰%d", basic, st.typenames.Len())
+ st.typenames.Set(t, name)
+ st.logf("falcon: emit type %s %s // %q", name, basic, t)
+ st.result.Types = append(st.result.Types, falconType{
+ Name: name,
+ Kind: basic.Kind(),
+ })
+ }
+ return name
+}
+
+// -- constraint emission --
+
+// emit emits a Go expression that must have a legal type.
+// In effect, we let the go/types constant folding algorithm
+// do most of the heavy lifting (though it may be hard to
+// believe from the complexity of this algorithm!).
+func (st *falconState) emit(constraint ast.Expr) {
+ var out strings.Builder
+ if err := format.Node(&out, st.fset, constraint); err != nil {
+ panic(err) // can't happen
+ }
+ syntax := out.String()
+ st.logf("falcon: emit constraint %s", syntax)
+ st.result.Constraints = append(st.result.Constraints, syntax)
+}
+
+// emitNonNegative emits an []T{}[index] constraint,
+// which ensures index is non-negative if constant.
+func (st *falconState) emitNonNegative(index ast.Expr) {
+ st.emit(&ast.IndexExpr{
+ X: &ast.CompositeLit{
+ Type: &ast.ArrayType{
+ Elt: makeIdent(st.int),
+ },
+ },
+ Index: index,
+ })
+}
+
+// emitMonotonic emits an []T{}[i:j] constraint,
+// which ensures i <= j if both are constant.
+func (st *falconState) emitMonotonic(i, j ast.Expr) {
+ st.emit(&ast.SliceExpr{
+ X: &ast.CompositeLit{
+ Type: &ast.ArrayType{
+ Elt: makeIdent(st.int),
+ },
+ },
+ Low: i,
+ High: j,
+ })
+}
+
+// emitUnique emits a T{elem1: 0, ... elemN: 0} constraint,
+// which ensures that all constant elems are unique.
+// T may be a map, slice, or array depending
+// on the desired check semantics.
+func (st *falconState) emitUnique(typ ast.Expr, elems []ast.Expr) {
+ if len(elems) > 1 {
+ var elts []ast.Expr
+ for _, elem := range elems {
+ elts = append(elts, &ast.KeyValueExpr{
+ Key: elem,
+ Value: makeIntLit(0),
+ })
+ }
+ st.emit(&ast.CompositeLit{
+ Type: typ,
+ Elts: elts,
+ })
+ }
+}
+
+// -- traversal --
+
+// The traversal functions scan the callee body for expressions that
+// are not constant but would become constant if the parameter vars
+// were redeclared as constants, and emits for each one a constraint
+// (a Go expression) with the property that it will not type-check
+// (using types.CheckExpr) if the particular argument values are
+// unsuitable.
+//
+// These constraints are checked by Inline with the actual
+// constant argument values. Violations cause it to reject
+// parameters as candidates for substitution.
+
+func (st *falconState) stmt(s ast.Stmt) {
+ ast.Inspect(s, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case ast.Expr:
+ _ = st.expr(n)
+ return false // skip usual traversal
+
+ case *ast.AssignStmt:
+ switch n.Tok {
+ case token.QUO_ASSIGN, token.REM_ASSIGN:
+ // x /= y
+ // Possible "integer division by zero"
+ // Emit constraint: 1/y.
+ _ = st.expr(n.Lhs[0])
+ kY := st.expr(n.Rhs[0])
+ if kY, ok := kY.(ast.Expr); ok {
+ op := token.QUO
+ if n.Tok == token.REM_ASSIGN {
+ op = token.REM
+ }
+ st.emit(&ast.BinaryExpr{
+ Op: op,
+ X: makeIntLit(1),
+ Y: kY,
+ })
+ }
+ return false // skip usual traversal
+ }
+
+ case *ast.SwitchStmt:
+ if n.Init != nil {
+ st.stmt(n.Init)
+ }
+ tBool := types.Type(types.Typ[types.Bool])
+ tagType := tBool // default: true
+ if n.Tag != nil {
+ st.expr(n.Tag)
+ tagType = st.info.TypeOf(n.Tag)
+ }
+
+ // Possible "duplicate case value".
+ // Emit constraint map[T]int{v1: 0, ..., vN:0}
+ // to ensure all maybe-constant case values are unique
+ // (unless switch tag is boolean, which is relaxed).
+ var unique []ast.Expr
+ for _, clause := range n.Body.List {
+ clause := clause.(*ast.CaseClause)
+ for _, caseval := range clause.List {
+ if k := st.expr(caseval); k != nil {
+ unique = append(unique, st.toExpr(k))
+ }
+ }
+ for _, stmt := range clause.Body {
+ st.stmt(stmt)
+ }
+ }
+ if unique != nil && !types.Identical(tagType.Underlying(), tBool) {
+ tname := st.any
+ if !types.IsInterface(tagType) {
+ tname = st.typename(tagType)
+ }
+ t := &ast.MapType{
+ Key: makeIdent(tname),
+ Value: makeIdent(st.int),
+ }
+ st.emitUnique(t, unique)
+ }
+ }
+ return true
+ })
+}
+
+// fieldTypes visits the .Type of each field in the list.
+func (st *falconState) fieldTypes(fields *ast.FieldList) {
+ if fields != nil {
+ for _, field := range fields.List {
+ _ = st.expr(field.Type)
+ }
+ }
+}
+
+// expr visits the expression (or type) and returns a
+// non-nil result if the expression is constant or would
+// become constant if all suitable function parameters were
+// redeclared as constants.
+//
+// If the expression is constant, st.expr returns its type
+// and value (types.TypeAndValue). If the expression would
+// become constant, st.expr returns an ast.Expr tree whose
+// leaves are literals and parameter references, and whose
+// interior nodes are operations that may become constant,
+// such as -x, x+y, f(x), and T(x). We call these would-be
+// constant expressions "fallible constants", since they may
+// fail to type-check for some values of x, i, and j. (We
+// refer to the non-nil cases collectively as "maybe
+// constant", and the nil case as "definitely non-constant".)
+//
+// As a side effect, st.expr emits constraints for each
+// fallible constant expression; this is its main purpose.
+//
+// Consequently, st.expr must visit the entire subtree so
+// that all necessary constraints are emitted. It may not
+// short-circuit the traversal when it encounters a constant
+// subexpression as constants may contain arbitrary other
+// syntax that may impose constraints. Consider (as always)
+// this contrived but legal example of a type parameter (!)
+// that contains statement syntax:
+//
+// func f[T [unsafe.Sizeof(func() { stmts })]int]()
+//
+// There is no need to emit constraints for (e.g.) s[i] when s
+// and i are already constants, because we know the expression
+// is sound, but it is sometimes easier to emit these
+// redundant constraints than to avoid them.
+func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | ast.Expr
+ tv := st.info.Types[e]
+ if tv.Value != nil {
+ // A constant value overrides any other result.
+ defer func() { res = tv }()
+ }
+
+ switch e := e.(type) {
+ case *ast.Ident:
+ if v, ok := st.info.Uses[e].(*types.Var); ok {
+ if _, ok := st.params[v]; ok && isBasic(v.Type(), types.IsConstType) {
+ return e // reference to constable parameter
+ }
+ }
+ // (References to *types.Const are handled by the defer.)
+
+ case *ast.BasicLit:
+ // constant
+
+ case *ast.ParenExpr:
+ return st.expr(e.X)
+
+ case *ast.FuncLit:
+ _ = st.expr(e.Type)
+ st.stmt(e.Body)
+ // definitely non-constant
+
+ case *ast.CompositeLit:
+ // T{k: v, ...}, where T ∈ {array,*array,slice,map},
+ // imposes a constraint that all constant k are
+ // distinct and, for arrays [n]T, within range 0-n.
+ //
+ // Types matter, not just values. For example,
+ // an interface-keyed map may contain keys
+ // that are numerically equal so long as they
+ // are of distinct types. For example:
+ //
+ // type myint int
+ // map[any]bool{1: true, 1: true} // error: duplicate key
+ // map[any]bool{1: true, int16(1): true} // ok
+ // map[any]bool{1: true, myint(1): true} // ok
+ //
+ // This can be asserted by emitting a
+ // constraint of the form T{k1: 0, ..., kN: 0}.
+ if e.Type != nil {
+ _ = st.expr(e.Type)
+ }
+ t := types.Unalias(typeparams.Deref(tv.Type))
+ ct := typeparams.CoreType(t)
+ var mapKeys []ast.Expr // map key expressions; must be distinct if constant
+ for _, elt := range e.Elts {
+ if kv, ok := elt.(*ast.KeyValueExpr); ok {
+ if is[*types.Map](ct) {
+ if k := st.expr(kv.Key); k != nil {
+ mapKeys = append(mapKeys, st.toExpr(k))
+ }
+ }
+ _ = st.expr(kv.Value)
+ } else {
+ _ = st.expr(elt)
+ }
+ }
+ if len(mapKeys) > 0 {
+ // Inlining a map literal may replace variable key expressions by constants.
+ // All such constants must have distinct values.
+ // (Array and slice literals do not permit non-constant keys.)
+ t := ct.(*types.Map)
+ var typ ast.Expr
+ if types.IsInterface(t.Key()) {
+ typ = &ast.MapType{
+ Key: makeIdent(st.any),
+ Value: makeIdent(st.int),
+ }
+ } else {
+ typ = &ast.MapType{
+ Key: makeIdent(st.typename(t.Key())),
+ Value: makeIdent(st.int),
+ }
+ }
+ st.emitUnique(typ, mapKeys)
+ }
+ // definitely non-constant
+
+ case *ast.SelectorExpr:
+ _ = st.expr(e.X)
+ _ = st.expr(e.Sel)
+ // The defer is sufficient to handle
+ // qualified identifiers (pkg.Const).
+ // All other cases are definitely non-constant.
+
+ case *ast.IndexExpr:
+ if tv.IsType() {
+ // type C[T]
+ _ = st.expr(e.X)
+ _ = st.expr(e.Index)
+ } else {
+ // term x[i]
+ //
+ // Constraints (if x is slice/string/array/*array, not map):
+ // - i >= 0
+ // if i is a fallible constant
+ // - i < len(x)
+ // if x is array/*array and
+ // i is a fallible constant;
+ // or if s is a string and both i,
+ // s are maybe-constants,
+ // but not both are constants.
+ kX := st.expr(e.X)
+ kI := st.expr(e.Index)
+ if kI != nil && !is[*types.Map](st.info.TypeOf(e.X).Underlying()) {
+ if kI, ok := kI.(ast.Expr); ok {
+ st.emitNonNegative(kI)
+ }
+ // Emit constraint to check indices against known length.
+ // TODO(adonovan): factor with SliceExpr logic.
+ var x ast.Expr
+ if kX != nil {
+ // string
+ x = st.toExpr(kX)
+ } else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok {
+ // array, *array
+ x = &ast.CompositeLit{
+ Type: &ast.ArrayType{
+ Len: makeIntLit(arr.Len()),
+ Elt: makeIdent(st.int),
+ },
+ }
+ }
+ if x != nil {
+ st.emit(&ast.IndexExpr{
+ X: x,
+ Index: st.toExpr(kI),
+ })
+ }
+ }
+ }
+ // definitely non-constant
+
+ case *ast.SliceExpr:
+ // x[low:high:max]
+ //
+ // Emit non-negative constraints for each index,
+ // plus low <= high <= max <= len(x)
+ // for each pair that are maybe-constant
+ // but not definitely constant.
+
+ kX := st.expr(e.X)
+ var kLow, kHigh, kMax any
+ if e.Low != nil {
+ kLow = st.expr(e.Low)
+ if kLow != nil {
+ if kLow, ok := kLow.(ast.Expr); ok {
+ st.emitNonNegative(kLow)
+ }
+ }
+ }
+ if e.High != nil {
+ kHigh = st.expr(e.High)
+ if kHigh != nil {
+ if kHigh, ok := kHigh.(ast.Expr); ok {
+ st.emitNonNegative(kHigh)
+ }
+ if kLow != nil {
+ st.emitMonotonic(st.toExpr(kLow), st.toExpr(kHigh))
+ }
+ }
+ }
+ if e.Max != nil {
+ kMax = st.expr(e.Max)
+ if kMax != nil {
+ if kMax, ok := kMax.(ast.Expr); ok {
+ st.emitNonNegative(kMax)
+ }
+ if kHigh != nil {
+ st.emitMonotonic(st.toExpr(kHigh), st.toExpr(kMax))
+ }
+ }
+ }
+
+ // Emit constraint to check indices against known length.
+ var x ast.Expr
+ if kX != nil {
+ // string
+ x = st.toExpr(kX)
+ } else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok {
+ // array, *array
+ x = &ast.CompositeLit{
+ Type: &ast.ArrayType{
+ Len: makeIntLit(arr.Len()),
+ Elt: makeIdent(st.int),
+ },
+ }
+ }
+ if x != nil {
+ // Avoid slice[::max] if kHigh is nonconstant (nil).
+ high, max := st.toExpr(kHigh), st.toExpr(kMax)
+ if high == nil {
+ high = max // => slice[:max:max]
+ }
+ st.emit(&ast.SliceExpr{
+ X: x,
+ Low: st.toExpr(kLow),
+ High: high,
+ Max: max,
+ })
+ }
+ // definitely non-constant
+
+ case *ast.TypeAssertExpr:
+ _ = st.expr(e.X)
+ if e.Type != nil {
+ _ = st.expr(e.Type)
+ }
+
+ case *ast.CallExpr:
+ _ = st.expr(e.Fun)
+ if tv, ok := st.info.Types[e.Fun]; ok && tv.IsType() {
+ // conversion T(x)
+ //
+ // Possible "value out of range".
+ kX := st.expr(e.Args[0])
+ if kX != nil && isBasic(tv.Type, types.IsConstType) {
+ conv := convert(makeIdent(st.typename(tv.Type)), st.toExpr(kX))
+ if is[ast.Expr](kX) {
+ st.emit(conv)
+ }
+ return conv
+ }
+ return nil // definitely non-constant
+ }
+
+ // call f(x)
+
+ all := true // all args are possibly-constant
+ kArgs := make([]ast.Expr, len(e.Args))
+ for i, arg := range e.Args {
+ if kArg := st.expr(arg); kArg != nil {
+ kArgs[i] = st.toExpr(kArg)
+ } else {
+ all = false
+ }
+ }
+
+ // Calls to built-ins with fallibly constant arguments
+ // may become constant. All other calls are either
+ // constant or non-constant
+ if id, ok := e.Fun.(*ast.Ident); ok && all && tv.Value == nil {
+ if builtin, ok := st.info.Uses[id].(*types.Builtin); ok {
+ switch builtin.Name() {
+ case "len", "imag", "real", "complex", "min", "max":
+ return &ast.CallExpr{
+ Fun: id,
+ Args: kArgs,
+ Ellipsis: e.Ellipsis,
+ }
+ }
+ }
+ }
+
+ case *ast.StarExpr: // *T, *ptr
+ _ = st.expr(e.X)
+
+ case *ast.UnaryExpr:
+ // + - ! ^ & <- ~
+ //
+ // Possible "negation of minint".
+ // Emit constraint: -x
+ kX := st.expr(e.X)
+ if kX != nil && !is[types.TypeAndValue](kX) {
+ if e.Op == token.SUB {
+ st.emit(&ast.UnaryExpr{
+ Op: e.Op,
+ X: st.toExpr(kX),
+ })
+ }
+
+ return &ast.UnaryExpr{
+ Op: e.Op,
+ X: st.toExpr(kX),
+ }
+ }
+
+ case *ast.BinaryExpr:
+ kX := st.expr(e.X)
+ kY := st.expr(e.Y)
+ switch e.Op {
+ case token.QUO, token.REM:
+ // x/y, x%y
+ //
+ // Possible "integer division by zero" or
+ // "minint / -1" overflow.
+ // Emit constraint: x/y or 1/y
+ if kY != nil {
+ if kX == nil {
+ kX = makeIntLit(1)
+ }
+ st.emit(&ast.BinaryExpr{
+ Op: e.Op,
+ X: st.toExpr(kX),
+ Y: st.toExpr(kY),
+ })
+ }
+
+ case token.ADD, token.SUB, token.MUL:
+ // x+y, x-y, x*y
+ //
+ // Possible "arithmetic overflow".
+ // Emit constraint: x+y
+ if kX != nil && kY != nil {
+ st.emit(&ast.BinaryExpr{
+ Op: e.Op,
+ X: st.toExpr(kX),
+ Y: st.toExpr(kY),
+ })
+ }
+
+ case token.SHL, token.SHR:
+ // x << y, x >> y
+ //
+ // Possible "constant shift too large".
+ // Either operand may be too large individually,
+ // and they may be too large together.
+ // Emit constraint:
+ // x << y (if both maybe-constant)
+ // x << 0 (if y is non-constant)
+ // 1 << y (if x is non-constant)
+ if kX != nil || kY != nil {
+ x := st.toExpr(kX)
+ if x == nil {
+ x = makeIntLit(1)
+ }
+ y := st.toExpr(kY)
+ if y == nil {
+ y = makeIntLit(0)
+ }
+ st.emit(&ast.BinaryExpr{
+ Op: e.Op,
+ X: x,
+ Y: y,
+ })
+ }
+
+ case token.LSS, token.GTR, token.EQL, token.NEQ, token.LEQ, token.GEQ:
+ // < > == != <= <=
+ //
+ // A "x cmp y" expression with constant operands x, y is
+ // itself constant, but I can't see how a constant bool
+ // could be fallible: the compiler doesn't reject duplicate
+ // boolean cases in a switch, presumably because boolean
+ // switches are less like n-way branches and more like
+ // sequential if-else chains with possibly overlapping
+ // conditions; and there is (sadly) no way to convert a
+ // boolean constant to an int constant.
+ }
+ if kX != nil && kY != nil {
+ return &ast.BinaryExpr{
+ Op: e.Op,
+ X: st.toExpr(kX),
+ Y: st.toExpr(kY),
+ }
+ }
+
+ // types
+ //
+ // We need to visit types (and even type parameters)
+ // in order to reach all the places where things could go wrong:
+ //
+ // const (
+ // s = ""
+ // i = 0
+ // )
+ // type C[T [unsafe.Sizeof(func() { _ = s[i] })]int] bool
+
+ case *ast.IndexListExpr:
+ _ = st.expr(e.X)
+ for _, expr := range e.Indices {
+ _ = st.expr(expr)
+ }
+
+ case *ast.Ellipsis:
+ if e.Elt != nil {
+ _ = st.expr(e.Elt)
+ }
+
+ case *ast.ArrayType:
+ if e.Len != nil {
+ _ = st.expr(e.Len)
+ }
+ _ = st.expr(e.Elt)
+
+ case *ast.StructType:
+ st.fieldTypes(e.Fields)
+
+ case *ast.FuncType:
+ st.fieldTypes(e.TypeParams)
+ st.fieldTypes(e.Params)
+ st.fieldTypes(e.Results)
+
+ case *ast.InterfaceType:
+ st.fieldTypes(e.Methods)
+
+ case *ast.MapType:
+ _ = st.expr(e.Key)
+ _ = st.expr(e.Value)
+
+ case *ast.ChanType:
+ _ = st.expr(e.Value)
+ }
+ return
+}
+
+// toExpr converts the result of visitExpr to a falcon expression.
+// (We don't do this in visitExpr as we first need to discriminate
+// constants from maybe-constants.)
+func (st *falconState) toExpr(x any) ast.Expr {
+ switch x := x.(type) {
+ case nil:
+ return nil
+
+ case types.TypeAndValue:
+ lit := makeLiteral(x.Value)
+ if !isBasic(x.Type, types.IsUntyped) {
+ // convert to "typed" type
+ lit = &ast.CallExpr{
+ Fun: makeIdent(st.typename(x.Type)),
+ Args: []ast.Expr{lit},
+ }
+ }
+ return lit
+
+ case ast.Expr:
+ return x
+
+ default:
+ panic(x)
+ }
+}
+
+func makeLiteral(v constant.Value) ast.Expr {
+ switch v.Kind() {
+ case constant.Bool:
+ // Rather than refer to the true or false built-ins,
+ // which could be shadowed by poorly chosen parameter
+ // names, we use 0 == 0 for true and 0 != 0 for false.
+ op := token.EQL
+ if !constant.BoolVal(v) {
+ op = token.NEQ
+ }
+ return &ast.BinaryExpr{
+ Op: op,
+ X: makeIntLit(0),
+ Y: makeIntLit(0),
+ }
+
+ case constant.String:
+ return &ast.BasicLit{
+ Kind: token.STRING,
+ Value: v.ExactString(),
+ }
+
+ case constant.Int:
+ return &ast.BasicLit{
+ Kind: token.INT,
+ Value: v.ExactString(),
+ }
+
+ case constant.Float:
+ return &ast.BasicLit{
+ Kind: token.FLOAT,
+ Value: v.ExactString(),
+ }
+
+ case constant.Complex:
+ // The components could be float or int.
+ y := makeLiteral(constant.Imag(v))
+ y.(*ast.BasicLit).Value += "i" // ugh
+ if re := constant.Real(v); !consteq(re, kZeroInt) {
+ // complex: x + yi
+ y = &ast.BinaryExpr{
+ Op: token.ADD,
+ X: makeLiteral(re),
+ Y: y,
+ }
+ }
+ return y
+
+ default:
+ panic(v.Kind())
+ }
+}
+
+func makeIntLit(x int64) *ast.BasicLit {
+ return &ast.BasicLit{
+ Kind: token.INT,
+ Value: strconv.FormatInt(x, 10),
+ }
+}
+
+func isBasic(t types.Type, info types.BasicInfo) bool {
+ basic, ok := t.Underlying().(*types.Basic)
+ return ok && basic.Info()&info != 0
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Copied, with considerable changes, from go/parser/resolver.go
+// at af53bd2c03.
+
+package inline
+
+import (
+ "go/ast"
+ "go/token"
+)
+
+// freeishNames computes an approximation to the free names of the AST
+// at node n based solely on syntax, inserting values into the map.
+//
+// In the absence of composite literals, the set of free names is exact. Composite
+// literals introduce an ambiguity that can only be resolved with type information:
+// whether F is a field name or a value in `T{F: ...}`.
+// If includeComplitIdents is true, this function conservatively assumes
+// T is not a struct type, so freeishNames overapproximates: the resulting
+// set may contain spurious entries that are not free lexical references
+// but are references to struct fields.
+// If includeComplitIdents is false, this function assumes that T *is*
+// a struct type, so freeishNames underapproximates: the resulting set
+// may omit names that are free lexical references.
+//
+// The code is based on go/parser.resolveFile, but heavily simplified. Crucial
+// differences are:
+// - Instead of resolving names to their objects, this function merely records
+// whether they are free.
+// - Labels are ignored: they do not refer to values.
+// - This is never called on FuncDecls or ImportSpecs, so the function
+// panics if it sees one.
+func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) {
+ v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents}
+ // Begin with a scope, even though n might not be a form that establishes a scope.
+ // For example, n might be:
+ // x := ...
+ // Then we need to add the first x to some scope.
+ v.openScope()
+ ast.Walk(v, n)
+ v.closeScope()
+ assert(v.scope == nil, "unbalanced scopes")
+}
+
+// A freeVisitor holds state for a free-name analysis.
+type freeVisitor struct {
+ scope *scope // the current innermost scope
+ free map[string]bool // free names seen so far
+ includeComplitIdents bool // include identifier key in composite literals
+}
+
+// scope contains all the names defined in a lexical scope.
+// It is like ast.Scope, but without deprecation warnings.
+type scope struct {
+ names map[string]bool
+ outer *scope
+}
+
+func (s *scope) defined(name string) bool {
+ for ; s != nil; s = s.outer {
+ if s.names[name] {
+ return true
+ }
+ }
+ return false
+}
+
+func (v *freeVisitor) Visit(n ast.Node) ast.Visitor {
+ switch n := n.(type) {
+
+ // Expressions.
+ case *ast.Ident:
+ v.resolve(n)
+
+ case *ast.FuncLit:
+ v.openScope()
+ defer v.closeScope()
+ v.walkFuncType(n.Type)
+ v.walkBody(n.Body)
+
+ case *ast.SelectorExpr:
+ v.walk(n.X)
+ // Skip n.Sel: it cannot be free.
+
+ case *ast.StructType:
+ v.openScope()
+ defer v.closeScope()
+ v.walkFieldList(n.Fields)
+
+ case *ast.FuncType:
+ v.openScope()
+ defer v.closeScope()
+ v.walkFuncType(n)
+
+ case *ast.CompositeLit:
+ v.walk(n.Type)
+ for _, e := range n.Elts {
+ if kv, _ := e.(*ast.KeyValueExpr); kv != nil {
+ if ident, _ := kv.Key.(*ast.Ident); ident != nil {
+ // It is not possible from syntax alone to know whether
+ // an identifier used as a composite literal key is
+ // a struct field (if n.Type is a struct) or a value
+ // (if n.Type is a map, slice or array).
+ if v.includeComplitIdents {
+ // Over-approximate by treating both cases as potentially
+ // free names.
+ v.resolve(ident)
+ } else {
+ // Under-approximate by ignoring potentially free names.
+ }
+ } else {
+ v.walk(kv.Key)
+ }
+ v.walk(kv.Value)
+ } else {
+ v.walk(e)
+ }
+ }
+
+ case *ast.InterfaceType:
+ v.openScope()
+ defer v.closeScope()
+ v.walkFieldList(n.Methods)
+
+ // Statements
+ case *ast.AssignStmt:
+ walkSlice(v, n.Rhs)
+ if n.Tok == token.DEFINE {
+ v.shortVarDecl(n.Lhs)
+ } else {
+ walkSlice(v, n.Lhs)
+ }
+
+ case *ast.LabeledStmt:
+ // ignore labels
+ // TODO(jba): consider labels?
+ v.walk(n.Stmt)
+
+ case *ast.BranchStmt:
+ // Ignore labels.
+ // TODO(jba): consider labels?
+
+ case *ast.BlockStmt:
+ v.openScope()
+ defer v.closeScope()
+ walkSlice(v, n.List)
+
+ case *ast.IfStmt:
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Init)
+ v.walk(n.Cond)
+ v.walk(n.Body)
+ v.walk(n.Else)
+
+ case *ast.CaseClause:
+ walkSlice(v, n.List)
+ v.openScope()
+ defer v.closeScope()
+ walkSlice(v, n.Body)
+
+ case *ast.SwitchStmt:
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Init)
+ v.walk(n.Tag)
+ v.walkBody(n.Body)
+
+ case *ast.TypeSwitchStmt:
+ if n.Init != nil {
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Init)
+ }
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Assign)
+ // We can use walkBody here because we don't track label scopes.
+ v.walkBody(n.Body)
+
+ case *ast.CommClause:
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Comm)
+ walkSlice(v, n.Body)
+
+ case *ast.SelectStmt:
+ v.walkBody(n.Body)
+
+ case *ast.ForStmt:
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.Init)
+ v.walk(n.Cond)
+ v.walk(n.Post)
+ v.walk(n.Body)
+
+ case *ast.RangeStmt:
+ v.openScope()
+ defer v.closeScope()
+ v.walk(n.X)
+ var lhs []ast.Expr
+ if n.Key != nil {
+ lhs = append(lhs, n.Key)
+ }
+ if n.Value != nil {
+ lhs = append(lhs, n.Value)
+ }
+ if len(lhs) > 0 {
+ if n.Tok == token.DEFINE {
+ v.shortVarDecl(lhs)
+ } else {
+ walkSlice(v, lhs)
+ }
+ }
+ v.walk(n.Body)
+
+ // Declarations
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.CONST, token.VAR:
+ for _, spec := range n.Specs {
+ spec := spec.(*ast.ValueSpec)
+ walkSlice(v, spec.Values)
+ if spec.Type != nil {
+ v.walk(spec.Type)
+ }
+ v.declare(spec.Names...)
+ }
+ case token.TYPE:
+ for _, spec := range n.Specs {
+ spec := spec.(*ast.TypeSpec)
+ // Go spec: The scope of a type identifier declared inside a
+ // function begins at the identifier in the TypeSpec and ends
+ // at the end of the innermost containing block.
+ v.declare(spec.Name)
+ if spec.TypeParams != nil {
+ v.openScope()
+ defer v.closeScope()
+ v.walkTypeParams(spec.TypeParams)
+ }
+ v.walk(spec.Type)
+ }
+
+ case token.IMPORT:
+ panic("encountered import declaration in free analysis")
+ }
+
+ case *ast.FuncDecl:
+ panic("encountered top-level function declaration in free analysis")
+
+ default:
+ return v
+ }
+
+ return nil
+}
+
+func (r *freeVisitor) openScope() {
+ r.scope = &scope{map[string]bool{}, r.scope}
+}
+
+func (r *freeVisitor) closeScope() {
+ r.scope = r.scope.outer
+}
+
+func (r *freeVisitor) walk(n ast.Node) {
+ if n != nil {
+ ast.Walk(r, n)
+ }
+}
+
+// walkFuncType walks a function type. It is used for explicit
+// function types, like this:
+//
+// type RunFunc func(context.Context) error
+//
+// and function literals, like this:
+//
+// func(a, b int) int { return a + b}
+//
+// neither of which have type parameters.
+// Function declarations do involve type parameters, but we don't
+// handle them.
+func (r *freeVisitor) walkFuncType(typ *ast.FuncType) {
+ // The order here doesn't really matter, because names in
+ // a field list cannot appear in types.
+ // (The situation is different for type parameters, for which
+ // see [freeVisitor.walkTypeParams].)
+ r.resolveFieldList(typ.Params)
+ r.resolveFieldList(typ.Results)
+ r.declareFieldList(typ.Params)
+ r.declareFieldList(typ.Results)
+}
+
+// walkTypeParams is like walkFieldList, but declares type parameters eagerly so
+// that they may be resolved in the constraint expressions held in the field
+// Type.
+func (r *freeVisitor) walkTypeParams(list *ast.FieldList) {
+ r.declareFieldList(list)
+ r.resolveFieldList(list)
+}
+
+func (r *freeVisitor) walkBody(body *ast.BlockStmt) {
+ if body == nil {
+ return
+ }
+ walkSlice(r, body.List)
+}
+
+func (r *freeVisitor) walkFieldList(list *ast.FieldList) {
+ if list == nil {
+ return
+ }
+ r.resolveFieldList(list) // .Type may contain references
+ r.declareFieldList(list) // .Names declares names
+}
+
+func (r *freeVisitor) shortVarDecl(lhs []ast.Expr) {
+ // Go spec: A short variable declaration may redeclare variables provided
+ // they were originally declared in the same block with the same type, and
+ // at least one of the non-blank variables is new.
+ //
+ // However, it doesn't matter to free analysis whether a variable is declared
+ // fresh or redeclared.
+ for _, x := range lhs {
+ // In a well-formed program each expr must be an identifier,
+ // but be forgiving.
+ if id, ok := x.(*ast.Ident); ok {
+ r.declare(id)
+ }
+ }
+}
+
+func walkSlice[S ~[]E, E ast.Node](r *freeVisitor, list S) {
+ for _, e := range list {
+ r.walk(e)
+ }
+}
+
+// resolveFieldList resolves the types of the fields in list.
+// The companion method declareFieldList declares the names of the fields.
+func (r *freeVisitor) resolveFieldList(list *ast.FieldList) {
+ if list == nil {
+ return
+ }
+ for _, f := range list.List {
+ r.walk(f.Type)
+ }
+}
+
+// declareFieldList declares the names of the fields in list.
+// (Names in a FieldList always establish new bindings.)
+// The companion method resolveFieldList resolves the types of the fields.
+func (r *freeVisitor) declareFieldList(list *ast.FieldList) {
+ if list == nil {
+ return
+ }
+ for _, f := range list.List {
+ r.declare(f.Names...)
+ }
+}
+
+// resolve marks ident as free if it is not in scope.
+// TODO(jba): rename: no resolution is happening.
+func (r *freeVisitor) resolve(ident *ast.Ident) {
+ if s := ident.Name; s != "_" && !r.scope.defined(s) {
+ r.free[s] = true
+ }
+}
+
+// declare adds each non-blank ident to the current scope.
+func (r *freeVisitor) declare(idents ...*ast.Ident) {
+ for _, id := range idents {
+ if id.Name != "_" {
+ r.scope.names[id.Name] = true
+ }
+ }
+}
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "maps"
+ pathpkg "path"
+ "reflect"
+ "slices"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/types/typeutil"
+ internalastutil "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/packagepath"
+ "golang.org/x/tools/internal/typeparams"
+ "golang.org/x/tools/internal/typesinternal"
+)
+
+// A Caller describes the function call and its enclosing context.
+//
+// The client is responsible for populating this struct and passing it to Inline.
+type Caller struct {
+ Fset *token.FileSet
+ Types *types.Package
+ Info *types.Info
+ File *ast.File
+ Call *ast.CallExpr
+ Content []byte // source of file containing
+
+ path []ast.Node // path from call to root of file syntax tree
+ enclosingFunc *ast.FuncDecl // top-level function/method enclosing the call, if any
+}
+
+type logger = func(string, ...any)
+
+// Options specifies parameters affecting the inliner algorithm.
+// All fields are optional.
+type Options struct {
+ Logf logger // log output function, records decision-making process
+ IgnoreEffects bool // ignore potential side effects of arguments (unsound)
+}
+
+// Result holds the result of code transformation.
+type Result struct {
+ Content []byte // formatted, transformed content of caller file
+ Literalized bool // chosen strategy replaced callee() with func(){...}()
+ BindingDecl bool // transformation added "var params = args" declaration
+
+ // TODO(adonovan): provide an API for clients that want structured
+ // output: a list of import additions and deletions plus one or more
+ // localized diffs (or even AST transformations, though ownership and
+ // mutation are tricky) near the call site.
+}
+
+// Inline inlines the called function (callee) into the function call (caller)
+// and returns the updated, formatted content of the caller source file.
+//
+// Inline does not mutate any public fields of Caller or Callee.
+func Inline(caller *Caller, callee *Callee, opts *Options) (*Result, error) {
+ copy := *opts // shallow copy
+ opts = ©
+ // Set default options.
+ if opts.Logf == nil {
+ opts.Logf = func(string, ...any) {}
+ }
+
+ st := &state{
+ caller: caller,
+ callee: callee,
+ opts: opts,
+ }
+ return st.inline()
+}
+
+// state holds the working state of the inliner.
+type state struct {
+ caller *Caller
+ callee *Callee
+ opts *Options
+}
+
+func (st *state) inline() (*Result, error) {
+ logf, caller, callee := st.opts.Logf, st.caller, st.callee
+
+ logf("inline %s @ %v",
+ debugFormatNode(caller.Fset, caller.Call),
+ caller.Fset.PositionFor(caller.Call.Lparen, false))
+
+ if !consistentOffsets(caller) {
+ return nil, fmt.Errorf("internal error: caller syntax positions are inconsistent with file content (did you forget to use FileSet.PositionFor when computing the file name?)")
+ }
+
+ // Break the string literal so we can use inlining in this file. :)
+ if ast.IsGenerated(caller.File) &&
+ bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) {
+ return nil, fmt.Errorf("cannot inline calls from files that import \"C\"")
+ }
+
+ res, err := st.inlineCall()
+ if err != nil {
+ return nil, err
+ }
+
+ // Replace the call (or some node that encloses it) by new syntax.
+ assert(res.old != nil, "old is nil")
+ assert(res.new != nil, "new is nil")
+
+ // A single return operand inlined to a unary
+ // expression context may need parens. Otherwise:
+ // func two() int { return 1+1 }
+ // print(-two()) => print(-1+1) // oops!
+ //
+ // Usually it is not necessary to insert ParenExprs
+ // as the formatter is smart enough to insert them as
+ // needed by the context. But the res.{old,new}
+ // substitution is done by formatting res.new in isolation
+ // and then splicing its text over res.old, so the
+ // formatter doesn't see the parent node and cannot do
+ // the right thing. (One solution would be to always
+ // format the enclosing node of old, but that requires
+ // non-lossy comment handling, #20744.)
+ //
+ // So, we must analyze the call's context
+ // to see whether ambiguity is possible.
+ // For example, if the context is x[y:z], then
+ // the x subtree is subject to precedence ambiguity
+ // (replacing x by p+q would give p+q[y:z] which is wrong)
+ // but the y and z subtrees are safe.
+ if needsParens(caller.path, res.old, res.new) {
+ res.new = &ast.ParenExpr{X: res.new.(ast.Expr)}
+ }
+
+ // Some reduction strategies return a new block holding the
+ // callee's statements. The block's braces may be elided when
+ // there is no conflict between names declared in the block
+ // with those declared by the parent block, and no risk of
+ // a caller's goto jumping forward across a declaration.
+ //
+ // This elision is only safe when the ExprStmt is beneath a
+ // BlockStmt, CaseClause.Body, or CommClause.Body;
+ // (see "statement theory").
+ //
+ // The inlining analysis may have already determined that eliding braces is
+ // safe. Otherwise, we analyze its safety here.
+ elideBraces := res.elideBraces
+ if !elideBraces {
+ if newBlock, ok := res.new.(*ast.BlockStmt); ok {
+ i := slices.Index(caller.path, res.old)
+ parent := caller.path[i+1]
+ var body []ast.Stmt
+ switch parent := parent.(type) {
+ case *ast.BlockStmt:
+ body = parent.List
+ case *ast.CommClause:
+ body = parent.Body
+ case *ast.CaseClause:
+ body = parent.Body
+ }
+ if body != nil {
+ callerNames := declares(body)
+
+ // If BlockStmt is a function body,
+ // include its receiver, params, and results.
+ addFieldNames := func(fields *ast.FieldList) {
+ if fields != nil {
+ for _, field := range fields.List {
+ for _, id := range field.Names {
+ callerNames[id.Name] = true
+ }
+ }
+ }
+ }
+ switch f := caller.path[i+2].(type) {
+ case *ast.FuncDecl:
+ addFieldNames(f.Recv)
+ addFieldNames(f.Type.Params)
+ addFieldNames(f.Type.Results)
+ case *ast.FuncLit:
+ addFieldNames(f.Type.Params)
+ addFieldNames(f.Type.Results)
+ }
+
+ if len(callerLabels(caller.path)) > 0 {
+ // TODO(adonovan): be more precise and reject
+ // only forward gotos across the inlined block.
+ logf("keeping block braces: caller uses control labels")
+ } else if intersects(declares(newBlock.List), callerNames) {
+ logf("keeping block braces: avoids name conflict")
+ } else {
+ elideBraces = true
+ }
+ }
+ }
+ }
+
+ // File rewriting. This proceeds in multiple passes, in order to maximally
+ // preserve comment positioning. (This could be greatly simplified once
+ // comments are stored in the tree.)
+ //
+ // Don't call replaceNode(caller.File, res.old, res.new)
+ // as it mutates the caller's syntax tree.
+ // Instead, splice the file, replacing the extent of the "old"
+ // node by a formatting of the "new" node, and re-parse.
+ // We'll fix up the imports on this new tree, and format again.
+ //
+ // Inv: f is the result of parsing content, using fset.
+ var (
+ content = caller.Content
+ fset = caller.Fset
+ f *ast.File // parsed below
+ )
+ reparse := func() error {
+ const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
+ f, err = parser.ParseFile(fset, "callee.go", content, mode)
+ if err != nil {
+ // Something has gone very wrong.
+ logf("failed to reparse <<%s>>: %v", string(content), err) // debugging
+ return err
+ }
+ return nil
+ }
+ {
+ start := offsetOf(fset, res.old.Pos())
+ end := offsetOf(fset, res.old.End())
+ var out bytes.Buffer
+ out.Write(content[:start])
+ // TODO(adonovan): might it make more sense to use
+ // callee.Fset when formatting res.new?
+ // The new tree is a mix of (cloned) caller nodes for
+ // the argument expressions and callee nodes for the
+ // function body. In essence the question is: which
+ // is more likely to have comments?
+ // Usually the callee body will be larger and more
+ // statement-heavy than the arguments, but a
+ // strategy may widen the scope of the replacement
+ // (res.old) from CallExpr to, say, its enclosing
+ // block, so the caller nodes dominate.
+ // Precise comment handling would make this a
+ // non-issue. Formatting wouldn't really need a
+ // FileSet at all.
+ if elideBraces {
+ for i, stmt := range res.new.(*ast.BlockStmt).List {
+ if i > 0 {
+ out.WriteByte('\n')
+ }
+ if err := format.Node(&out, fset, stmt); err != nil {
+ return nil, err
+ }
+ }
+ } else {
+ if err := format.Node(&out, fset, res.new); err != nil {
+ return nil, err
+ }
+ }
+ out.Write(content[end:])
+ content = out.Bytes()
+ if err := reparse(); err != nil {
+ return nil, err
+ }
+ }
+
+ // Add new imports that are still used.
+ newImports := trimNewImports(res.newImports, res.new)
+ // Insert new imports after last existing import,
+ // to avoid migration of pre-import comments.
+ // The imports will be organized below.
+ if len(newImports) > 0 {
+ // If we have imports to add, do so independent of the rest of the file.
+ // Otherwise, the length of the new imports may consume floating comments,
+ // causing them to be printed inside the imports block.
+ var (
+ importDecl *ast.GenDecl
+ comments []*ast.CommentGroup // relevant comments.
+ before, after []byte // pre- and post-amble for the imports block.
+ )
+ if len(f.Imports) > 0 {
+ // Append specs to existing import decl
+ importDecl = f.Decls[0].(*ast.GenDecl)
+ for _, comment := range f.Comments {
+ // Filter comments. Don't use CommentMap.Filter here, because we don't
+ // want to include comments that document the import decl itself, for
+ // example:
+ //
+ // // We don't want this comment to be duplicated.
+ // import (
+ // "something"
+ // )
+ if importDecl.Pos() <= comment.Pos() && comment.Pos() < importDecl.End() {
+ comments = append(comments, comment)
+ }
+ }
+ before = content[:offsetOf(fset, importDecl.Pos())]
+ importDecl.Doc = nil // present in before
+ after = content[offsetOf(fset, importDecl.End()):]
+ } else {
+ // Insert new import decl.
+ importDecl = &ast.GenDecl{Tok: token.IMPORT}
+ f.Decls = prepend[ast.Decl](importDecl, f.Decls...)
+
+ // Make room for the new declaration after the package declaration.
+ pkgEnd := f.Name.End()
+ file := fset.File(pkgEnd)
+ if file == nil {
+ logf("internal error: missing pkg file")
+ return nil, fmt.Errorf("missing pkg file for %s", f.Name.Name)
+ }
+ // Preserve any comments after the package declaration, by splicing in
+ // the new import block after the end of the package declaration line.
+ line := file.Line(pkgEnd)
+ if line < len(file.Lines()) { // line numbers are 1-based
+ nextLinePos := file.LineStart(line + 1)
+ nextLine := offsetOf(fset, nextLinePos)
+ before = slices.Concat(content[:nextLine], []byte("\n"))
+ after = slices.Concat([]byte("\n\n"), content[nextLine:])
+ } else {
+ before = slices.Concat(content, []byte("\n\n"))
+ }
+ }
+ // Add new imports.
+ // Set their position to after the last position of the old imports, to keep
+ // comments on the old imports from moving.
+ lastPos := token.NoPos
+ if lastSpec := last(importDecl.Specs); lastSpec != nil {
+ lastPos = lastSpec.Pos()
+ if c := lastSpec.(*ast.ImportSpec).Comment; c != nil {
+ lastPos = c.Pos()
+ }
+ }
+ for _, imp := range newImports {
+ // Check that the new imports are accessible.
+ path, _ := strconv.Unquote(imp.spec.Path.Value)
+ if !packagepath.CanImport(caller.Types.Path(), path) {
+ return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, path)
+ }
+ if lastPos.IsValid() {
+ lastPos++
+ imp.spec.Path.ValuePos = lastPos
+ }
+ importDecl.Specs = append(importDecl.Specs, imp.spec)
+ }
+
+ var out bytes.Buffer
+ out.Write(before)
+ commented := &printer.CommentedNode{
+ Node: importDecl,
+ Comments: comments,
+ }
+
+ if err := format.Node(&out, fset, commented); err != nil {
+ logf("failed to format new importDecl: %v", err) // debugging
+ return nil, err
+ }
+ out.Write(after)
+ content = out.Bytes()
+ if err := reparse(); err != nil {
+ return nil, err
+ }
+ }
+ // Delete imports referenced only by caller.Call.Fun.
+ for _, oldImport := range res.oldImports {
+ specToDelete := oldImport.spec
+ name := ""
+ if specToDelete.Name != nil {
+ name = specToDelete.Name.Name
+ }
+ path, _ := strconv.Unquote(specToDelete.Path.Value)
+ astutil.DeleteNamedImport(caller.Fset, f, name, path)
+ }
+
+ var out bytes.Buffer
+ if err := format.Node(&out, caller.Fset, f); err != nil {
+ return nil, err
+ }
+ newSrc := out.Bytes()
+
+ literalized := false
+ if call, ok := res.new.(*ast.CallExpr); ok && is[*ast.FuncLit](call.Fun) {
+ literalized = true
+ }
+
+ return &Result{
+ Content: newSrc,
+ Literalized: literalized,
+ BindingDecl: res.bindingDecl,
+ }, nil
+}
+
+// An oldImport is an import that will be deleted from the caller file.
+type oldImport struct {
+ pkgName *types.PkgName
+ spec *ast.ImportSpec
+}
+
+// A newImport is an import that will be added to the caller file.
+type newImport struct {
+ pkgName string
+ spec *ast.ImportSpec
+}
+
+// importState tracks information about imports.
+type importState struct {
+ logf func(string, ...any)
+ caller *Caller
+ importMap map[string][]string // from package paths in the caller's file to local names
+ newImports []newImport // for references to free names in callee; to be added to the file
+ oldImports []oldImport // referenced only by caller.Call.Fun; to be removed from the file
+}
+
+// newImportState returns an importState with initial information about the caller's imports.
+func newImportState(logf func(string, ...any), caller *Caller, callee *gobCallee) *importState {
+ // For simplicity we ignore existing dot imports, so that a qualified
+ // identifier (QI) in the callee is always represented by a QI in the caller,
+ // allowing us to treat a QI like a selection on a package name.
+ is := &importState{
+ logf: logf,
+ caller: caller,
+ importMap: make(map[string][]string),
+ }
+
+ for _, imp := range caller.File.Imports {
+ if pkgName, ok := importedPkgName(caller.Info, imp); ok &&
+ pkgName.Name() != "." &&
+ pkgName.Name() != "_" {
+
+ // If the import's sole use is in caller.Call.Fun of the form p.F(...),
+ // where p.F is a qualified identifier, the p import may not be
+ // necessary.
+ //
+ // Only the qualified identifier case matters, as other references to
+ // imported package names in the Call.Fun expression (e.g.
+ // x.after(3*time.Second).f() or time.Second.String()) will remain after
+ // inlining, as arguments.
+ //
+ // If that is the case, proactively check if any of the callee FreeObjs
+ // need this import. Doing so eagerly simplifies the resulting logic.
+ needed := true
+ sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr)
+ if ok && soleUse(caller.Info, pkgName) == sel.X {
+ needed = false // no longer needed by caller
+ // Check to see if any of the inlined free objects need this package.
+ for _, obj := range callee.FreeObjs {
+ if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 {
+ needed = true // needed by callee
+ break
+ }
+ }
+ }
+
+ // Exclude imports not needed by the caller or callee after inlining; the second
+ // return value holds these.
+ if needed {
+ path := pkgName.Imported().Path()
+ is.importMap[path] = append(is.importMap[path], pkgName.Name())
+ } else {
+ is.oldImports = append(is.oldImports, oldImport{pkgName: pkgName, spec: imp})
+ }
+ }
+ }
+ return is
+}
+
+// importName finds an existing import name to use in a particular shadowing
+// context. It is used to determine the set of new imports in
+// localName, and is also used for writing out names in inlining
+// strategies below.
+func (i *importState) importName(pkgPath string, shadow shadowMap) string {
+ for _, name := range i.importMap[pkgPath] {
+ // Check that either the import preexisted, or that it was newly added
+ // (no PkgName) but is not shadowed, either in the callee (shadows) or
+ // caller (caller.lookup).
+ if shadow[name] == 0 {
+ found := i.caller.lookup(name)
+ if is[*types.PkgName](found) || found == nil {
+ return name
+ }
+ }
+ }
+ return ""
+}
+
+// localName returns the local name for a given imported package path,
+// adding one if it doesn't exists.
+func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) string {
+ // Does an import already exist that works in this shadowing context?
+ if name := i.importName(pkgPath, shadow); name != "" {
+ return name
+ }
+
+ newlyAdded := func(name string) bool {
+ return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.pkgName == name })
+ }
+
+ // shadowedInCaller reports whether a candidate package name
+ // already refers to a declaration in the caller.
+ shadowedInCaller := func(name string) bool {
+ obj := i.caller.lookup(name)
+ if obj == nil {
+ return false
+ }
+ // If obj will be removed, the name is available.
+ return !slices.ContainsFunc(i.oldImports, func(o oldImport) bool { return o.pkgName == obj })
+ }
+
+ // import added by callee
+ //
+ // Choose local PkgName based on last segment of
+ // package path plus, if needed, a numeric suffix to
+ // ensure uniqueness.
+ //
+ // "init" is not a legal PkgName.
+ //
+ // TODO(rfindley): is it worth preserving local package names for callee
+ // imports? Are they likely to be better or worse than the name we choose
+ // here?
+ base := pkgName
+ name := base
+ for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ {
+ name = fmt.Sprintf("%s%d", base, n)
+ }
+ i.logf("adding import %s %q", name, pkgPath)
+ spec := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(pkgPath),
+ },
+ }
+ // Use explicit pkgname (out of necessity) when it differs from the declared name,
+ // or (for good style) when it differs from base(pkgpath).
+ if name != pkgName || name != pathpkg.Base(pkgPath) {
+ spec.Name = makeIdent(name)
+ }
+ i.newImports = append(i.newImports, newImport{
+ pkgName: name,
+ spec: spec,
+ })
+ i.importMap[pkgPath] = append(i.importMap[pkgPath], name)
+ return name
+}
+
+// trimNewImports removes imports that are no longer needed.
+//
+// The list of new imports as constructed by calls to [importState.localName]
+// includes all of the packages referenced by the callee.
+// But in the process of inlining, we may have dropped some of those references.
+// For example, if the callee looked like this:
+//
+// func F(x int) (p.T) {... /* no mention of p */ ...}
+//
+// and we inlined by assignment:
+//
+// v := ...
+//
+// then the reference to package p drops away.
+//
+// Remove the excess imports by seeing which remain in new, the expression
+// to be inlined.
+// We can find those by looking at the free names in new.
+// The list of free names cannot include spurious package names.
+// Free-name tracking is precise except for the case of an identifier
+// key in a composite literal, which names either a field or a value.
+// Neither fields nor values are package names.
+// Since they are not relevant to removing unused imports, we instruct
+// freeishNames to omit composite-literal keys that are identifiers.
+func trimNewImports(newImports []newImport, new ast.Node) []newImport {
+ free := map[string]bool{}
+ const omitComplitIdents = false
+ freeishNames(free, new, omitComplitIdents)
+ var res []newImport
+ for _, ni := range newImports {
+ if free[ni.pkgName] {
+ res = append(res, ni)
+ }
+ }
+ return res
+}
+
+type inlineCallResult struct {
+ newImports []newImport // to add
+ oldImports []oldImport // to remove
+
+ // If elideBraces is set, old is an ast.Stmt and new is an ast.BlockStmt to
+ // be spliced in. This allows the inlining analysis to assert that inlining
+ // the block is OK; if elideBraces is unset and old is an ast.Stmt and new is
+ // an ast.BlockStmt, braces may still be elided if the post-processing
+ // analysis determines that it is safe to do so.
+ //
+ // Ideally, it would not be necessary for the inlining analysis to "reach
+ // through" to the post-processing pass in this way. Instead, inlining could
+ // just set old to be an ast.BlockStmt and rewrite the entire BlockStmt, but
+ // unfortunately in order to preserve comments, it is important that inlining
+ // replace as little syntax as possible.
+ elideBraces bool
+ bindingDecl bool // transformation inserted "var params = args" declaration
+ old, new ast.Node // e.g. replace call expr by callee function body expression
+}
+
+// inlineCall returns a pair of an old node (the call, or something
+// enclosing it) and a new node (its replacement, which may be a
+// combination of caller, callee, and new nodes), along with the set
+// of new imports needed.
+//
+// TODO(adonovan): rethink the 'result' interface. The assumption of a
+// one-to-one replacement seems fragile. One can easily imagine the
+// transformation replacing the call and adding new variable
+// declarations, for example, or replacing a call statement by zero or
+// many statements.)
+// NOTE(rfindley): we've sort-of done this, with the 'elideBraces' flag that
+// allows inlining a statement list. However, due to loss of comments, more
+// sophisticated rewrites are challenging.
+//
+// TODO(adonovan): in earlier drafts, the transformation was expressed
+// by splicing substrings of the two source files because syntax
+// trees don't preserve comments faithfully (see #20744), but such
+// transformations don't compose. The current implementation is
+// tree-based but is very lossy wrt comments. It would make a good
+// candidate for evaluating an alternative fully self-contained tree
+// representation, such as any proposed solution to #20744, or even
+// dst or some private fork of go/ast.)
+// TODO(rfindley): see if we can reduce the amount of comment lossiness by
+// using printer.CommentedNode, which has been useful elsewhere.
+//
+// TODO(rfindley): inlineCall is getting very long, and very stateful, making
+// it very hard to read. The following refactoring may improve readability and
+// maintainability:
+// - Rename 'state' to 'callsite', since that is what it encapsulates.
+// - Add results of pre-processing analysis into the callsite struct, such as
+// the effective importMap, new/old imports, arguments, etc. Essentially
+// anything that resulted from initial analysis of the call site, and which
+// may be useful to inlining strategies.
+// - Delegate this call site analysis to a constructor or initializer, such
+// as 'analyzeCallsite', so that it does not consume bandwidth in the
+// 'inlineCall' logical flow.
+// - Once analyzeCallsite returns, the callsite is immutable, much in the
+// same way as the Callee and Caller are immutable.
+// - Decide on a standard interface for strategies (and substrategies), such
+// that they may be delegated to a separate method on callsite.
+//
+// In this way, the logical flow of inline call will clearly follow the
+// following structure:
+// 1. Analyze the call site.
+// 2. Try strategies, in order, until one succeeds.
+// 3. Process the results.
+//
+// If any expensive analysis may be avoided by earlier strategies, it can be
+// encapsulated in its own type and passed to subsequent strategies.
+func (st *state) inlineCall() (*inlineCallResult, error) {
+ logf, caller, callee := st.opts.Logf, st.caller, &st.callee.impl
+
+ checkInfoFields(caller.Info)
+
+ // Inlining of dynamic calls is not currently supported,
+ // even for local closure calls. (This would be a lot of work.)
+ calleeSymbol := typeutil.StaticCallee(caller.Info, caller.Call)
+ if calleeSymbol == nil {
+ // e.g. interface method
+ return nil, fmt.Errorf("cannot inline: not a static function call")
+ }
+
+ // Reject cross-package inlining if callee has
+ // free references to unexported symbols.
+ samePkg := caller.Types.Path() == callee.PkgPath
+ if !samePkg && len(callee.Unexported) > 0 {
+ return nil, fmt.Errorf("cannot inline call to %s because body refers to non-exported %s",
+ callee.Name, callee.Unexported[0])
+ }
+
+ // -- analyze callee's free references in caller context --
+
+ // Compute syntax path enclosing Call, innermost first (Path[0]=Call),
+ // and outermost enclosing function, if any.
+ caller.path, _ = astutil.PathEnclosingInterval(caller.File, caller.Call.Pos(), caller.Call.End())
+ for _, n := range caller.path {
+ if decl, ok := n.(*ast.FuncDecl); ok {
+ caller.enclosingFunc = decl
+ break
+ }
+ }
+
+ // If call is within a function, analyze all its
+ // local vars for the "single assignment" property.
+ // (Taking the address &v counts as a potential assignment.)
+ var assign1 func(v *types.Var) bool // reports whether v a single-assignment local var
+ {
+ updatedLocals := make(map[*types.Var]bool)
+ if caller.enclosingFunc != nil {
+ escape(caller.Info, caller.enclosingFunc, func(v *types.Var, _ bool) {
+ updatedLocals[v] = true
+ })
+ logf("multiple-assignment vars: %v", updatedLocals)
+ }
+ assign1 = func(v *types.Var) bool { return !updatedLocals[v] }
+ }
+
+ // Extract information about the caller's imports.
+ istate := newImportState(logf, caller, callee)
+
+ // Compute the renaming of the callee's free identifiers.
+ objRenames, err := st.renameFreeObjs(istate)
+ if err != nil {
+ return nil, err
+ }
+
+ res := &inlineCallResult{
+ newImports: istate.newImports,
+ oldImports: istate.oldImports,
+ }
+
+ // Parse callee function declaration.
+ calleeFset, calleeDecl, err := parseCompact(callee.Content)
+ if err != nil {
+ return nil, err // "can't happen"
+ }
+
+ // replaceCalleeID replaces an identifier in the callee. See [replacer] for
+ // more detailed semantics.
+ replaceCalleeID := func(offset int, repl ast.Expr, unpackVariadic bool) {
+ path, id := findIdent(calleeDecl, calleeDecl.Pos()+token.Pos(offset))
+ logf("- replace id %q @ #%d to %q", id.Name, offset, debugFormatNode(calleeFset, repl))
+ // Replace f([]T{a, b, c}...) with f(a, b, c).
+ if lit, ok := repl.(*ast.CompositeLit); ok && unpackVariadic && len(path) > 0 {
+ if call, ok := last(path).(*ast.CallExpr); ok &&
+ call.Ellipsis.IsValid() &&
+ id == last(call.Args) {
+
+ call.Args = append(call.Args[:len(call.Args)-1], lit.Elts...)
+ call.Ellipsis = token.NoPos
+ return
+ }
+ }
+ replaceNode(calleeDecl, id, repl)
+ }
+
+ // Generate replacements for each free identifier.
+ // (The same tree may be spliced in multiple times, resulting in a DAG.)
+ for _, ref := range callee.FreeRefs {
+ if repl := objRenames[ref.Object]; repl != nil {
+ replaceCalleeID(ref.Offset, repl, false)
+ }
+ }
+
+ // Gather the effective call arguments, including the receiver.
+ // Later, elements will be eliminated (=> nil) by parameter substitution.
+ args, err := st.arguments(caller, calleeDecl, assign1)
+ if err != nil {
+ return nil, err // e.g. implicit field selection cannot be made explicit
+ }
+
+ // Gather effective parameter tuple, including the receiver if any.
+ // Simplify variadic parameters to slices (in all cases but one).
+ var params []*parameter // including receiver; nil => parameter substituted
+ {
+ sig := calleeSymbol.Type().(*types.Signature)
+ if sig.Recv() != nil {
+ params = append(params, ¶meter{
+ obj: sig.Recv(),
+ fieldType: calleeDecl.Recv.List[0].Type,
+ info: callee.Params[0],
+ })
+ }
+
+ // Flatten the list of syntactic types.
+ var types []ast.Expr
+ for _, field := range calleeDecl.Type.Params.List {
+ if field.Names == nil {
+ types = append(types, field.Type)
+ } else {
+ for range field.Names {
+ types = append(types, field.Type)
+ }
+ }
+ }
+
+ for i := 0; i < sig.Params().Len(); i++ {
+ params = append(params, ¶meter{
+ obj: sig.Params().At(i),
+ fieldType: types[i],
+ info: callee.Params[len(params)],
+ })
+ }
+
+ // Variadic function?
+ //
+ // There are three possible types of call:
+ // - ordinary f(a1, ..., aN)
+ // - ellipsis f(a1, ..., slice...)
+ // - spread f(recv?, g()) where g() is a tuple.
+ // The first two are desugared to non-variadic calls
+ // with an ordinary slice parameter;
+ // the third is tricky and cannot be reduced, and (if
+ // a receiver is present) cannot even be literalized.
+ // Fortunately it is vanishingly rare.
+ //
+ // TODO(adonovan): extract this to a function.
+ if sig.Variadic() {
+ lastParam := last(params)
+ if len(args) > 0 && last(args).spread {
+ // spread call to variadic: tricky
+ lastParam.variadic = true
+ } else {
+ // ordinary/ellipsis call to variadic
+
+ // simplify decl: func(T...) -> func([]T)
+ lastParamField := last(calleeDecl.Type.Params.List)
+ lastParamField.Type = &ast.ArrayType{
+ Elt: lastParamField.Type.(*ast.Ellipsis).Elt,
+ }
+
+ if caller.Call.Ellipsis.IsValid() {
+ // ellipsis call: f(slice...) -> f(slice)
+ // nop
+ } else {
+ // ordinary call: f(a1, ... aN) -> f([]T{a1, ..., aN})
+ //
+ // Substitution of []T{...} in the callee body may lead to
+ // g([]T{a1, ..., aN}...), which we simplify to g(a1, ..., an)
+ // later; see replaceCalleeID.
+ n := len(params) - 1
+ ordinary, extra := args[:n], args[n:]
+ var elts []ast.Expr
+ freevars := make(map[string]bool)
+ pure, effects := true, false
+ for _, arg := range extra {
+ elts = append(elts, arg.expr)
+ pure = pure && arg.pure
+ effects = effects || arg.effects
+ maps.Copy(freevars, arg.freevars)
+ }
+ args = append(ordinary, &argument{
+ expr: &ast.CompositeLit{
+ Type: lastParamField.Type,
+ Elts: elts,
+ },
+ typ: lastParam.obj.Type(),
+ constant: nil,
+ pure: pure,
+ effects: effects,
+ duplicable: false,
+ freevars: freevars,
+ variadic: true,
+ })
+ }
+ }
+ }
+ }
+
+ typeArgs := st.typeArguments(caller.Call)
+ if len(typeArgs) != len(callee.TypeParams) {
+ return nil, fmt.Errorf("cannot inline: type parameter inference is not yet supported")
+ }
+ if err := substituteTypeParams(logf, callee.TypeParams, typeArgs, params, replaceCalleeID); err != nil {
+ return nil, err
+ }
+
+ // Log effective arguments.
+ for i, arg := range args {
+ logf("arg #%d: %s pure=%t effects=%t duplicable=%t free=%v type=%v",
+ i, debugFormatNode(caller.Fset, arg.expr),
+ arg.pure, arg.effects, arg.duplicable, arg.freevars, arg.typ)
+ }
+
+ // Note: computation below should be expressed in terms of
+ // the args and params slices, not the raw material.
+
+ // Perform parameter substitution.
+ // May eliminate some elements of params/args.
+ substitute(logf, caller, params, args, callee.Effects, callee.Falcon, replaceCalleeID)
+
+ // Update the callee's signature syntax.
+ updateCalleeParams(calleeDecl, params)
+
+ // Create a var (param = arg; ...) decl for use by some strategies.
+ bindingDecl := createBindingDecl(logf, caller, args, calleeDecl, callee.Results)
+
+ var remainingArgs []ast.Expr
+ for _, arg := range args {
+ if arg != nil {
+ remainingArgs = append(remainingArgs, arg.expr)
+ }
+ }
+
+ // -- let the inlining strategies begin --
+ //
+ // When we commit to a strategy, we log a message of the form:
+ //
+ // "strategy: reduce expr-context call to { return expr }"
+ //
+ // This is a terse way of saying:
+ //
+ // we plan to reduce a call
+ // that appears in expression context
+ // to a function whose body is of the form { return expr }
+
+ // TODO(adonovan): split this huge function into a sequence of
+ // function calls with an error sentinel that means "try the
+ // next strategy", and make sure each strategy writes to the
+ // log the reason it didn't match.
+
+ // Special case: eliminate a call to a function whose body is empty.
+ // (=> callee has no results and caller is a statement.)
+ //
+ // func f(params) {}
+ // f(args)
+ // => _, _ = args
+ //
+ if len(calleeDecl.Body.List) == 0 {
+ logf("strategy: reduce call to empty body")
+
+ // Evaluate the arguments for effects and delete the call entirely.
+ // Note(golang/go#71486): stmt can be nil if the call is in a go or defer
+ // statement.
+ // TODO: discard go or defer statements as well.
+ if stmt := callStmt(caller.path, false); stmt != nil {
+ res.old = stmt
+ if nargs := len(remainingArgs); nargs > 0 {
+ // Emit "_, _ = args" to discard results.
+
+ // TODO(adonovan): if args is the []T{a1, ..., an}
+ // literal synthesized during variadic simplification,
+ // consider unwrapping it to its (pure) elements.
+ // Perhaps there's no harm doing this for any slice literal.
+
+ // Make correction for spread calls
+ // f(g()) or recv.f(g()) where g() is a tuple.
+ if last := last(args); last != nil && last.spread {
+ nspread := last.typ.(*types.Tuple).Len()
+ if len(args) > 1 { // [recv, g()]
+ // A single AssignStmt cannot discard both, so use a 2-spec var decl.
+ res.new = &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: []ast.Spec{
+ &ast.ValueSpec{
+ Names: []*ast.Ident{makeIdent("_")},
+ Values: []ast.Expr{args[0].expr},
+ },
+ &ast.ValueSpec{
+ Names: blanks[*ast.Ident](nspread),
+ Values: []ast.Expr{args[1].expr},
+ },
+ },
+ }
+ return res, nil
+ }
+
+ // Sole argument is spread call.
+ nargs = nspread
+ }
+
+ res.new = &ast.AssignStmt{
+ Lhs: blanks[ast.Expr](nargs),
+ Tok: token.ASSIGN,
+ Rhs: remainingArgs,
+ }
+
+ } else {
+ // No remaining arguments: delete call statement entirely
+ res.new = &ast.EmptyStmt{}
+ }
+ return res, nil
+ }
+ }
+
+ // If all parameters have been substituted and no result
+ // variable is referenced, we don't need a binding decl.
+ // This may enable better reduction strategies.
+ allResultsUnreferenced := forall(callee.Results, func(i int, r *paramInfo) bool { return len(r.Refs) == 0 })
+ needBindingDecl := !allResultsUnreferenced ||
+ exists(params, func(i int, p *parameter) bool { return p != nil })
+
+ // The two strategies below overlap for a tail call of {return exprs}:
+ // The expr-context reduction is nice because it keeps the
+ // caller's return stmt and merely switches its operand,
+ // without introducing a new block, but it doesn't work with
+ // implicit return conversions.
+ //
+ // TODO(adonovan): unify these cases more cleanly, allowing return-
+ // operand replacement and implicit conversions, by adding
+ // conversions around each return operand (if not a spread return).
+
+ // Special case: call to { return exprs }.
+ //
+ // Reduces to:
+ // { var (bindings); _, _ = exprs }
+ // or _, _ = exprs
+ // or expr
+ //
+ // If:
+ // - the body is just "return expr" with trivial implicit conversions,
+ // or the caller's return type matches the callee's,
+ // - all parameters and result vars can be eliminated
+ // or replaced by a binding decl,
+ // then the call expression can be replaced by the
+ // callee's body expression, suitably substituted.
+ if len(calleeDecl.Body.List) == 1 &&
+ is[*ast.ReturnStmt](calleeDecl.Body.List[0]) &&
+ len(calleeDecl.Body.List[0].(*ast.ReturnStmt).Results) > 0 { // not a bare return
+ results := calleeDecl.Body.List[0].(*ast.ReturnStmt).Results
+
+ parent, grandparent := callContext(caller.path)
+
+ // statement context
+ if stmt, ok := parent.(*ast.ExprStmt); ok &&
+ (!needBindingDecl || bindingDecl != nil) {
+ logf("strategy: reduce stmt-context call to { return exprs }")
+ clearPositions(calleeDecl.Body)
+
+ if callee.ValidForCallStmt {
+ logf("callee body is valid as statement")
+ // Inv: len(results) == 1
+ if !needBindingDecl {
+ // Reduces to: expr
+ res.old = caller.Call
+ res.new = results[0]
+ } else {
+ // Reduces to: { var (bindings); expr }
+ res.bindingDecl = true
+ res.old = stmt
+ res.new = &ast.BlockStmt{
+ List: []ast.Stmt{
+ bindingDecl.stmt,
+ &ast.ExprStmt{X: results[0]},
+ },
+ }
+ }
+ } else {
+ logf("callee body is not valid as statement")
+ // The call is a standalone statement, but the
+ // callee body is not suitable as a standalone statement
+ // (f() or <-ch), explicitly discard the results:
+ // Reduces to: _, _ = exprs
+ discard := &ast.AssignStmt{
+ Lhs: blanks[ast.Expr](callee.NumResults),
+ Tok: token.ASSIGN,
+ Rhs: results,
+ }
+ res.old = stmt
+ if !needBindingDecl {
+ // Reduces to: _, _ = exprs
+ res.new = discard
+ } else {
+ // Reduces to: { var (bindings); _, _ = exprs }
+ res.bindingDecl = true
+ res.new = &ast.BlockStmt{
+ List: []ast.Stmt{
+ bindingDecl.stmt,
+ discard,
+ },
+ }
+ }
+ }
+ return res, nil
+ }
+
+ // Assignment context.
+ //
+ // If there is no binding decl, or if the binding decl declares no names,
+ // an assignment a, b := f() can be reduced to a, b := x, y.
+ if stmt, ok := parent.(*ast.AssignStmt); ok &&
+ is[*ast.BlockStmt](grandparent) &&
+ (!needBindingDecl || (bindingDecl != nil && len(bindingDecl.names) == 0)) {
+
+ // Reduces to: { var (bindings); lhs... := rhs... }
+ if newStmts, ok := st.assignStmts(stmt, results, istate.importName); ok {
+ logf("strategy: reduce assign-context call to { return exprs }")
+
+ clearPositions(calleeDecl.Body)
+
+ block := &ast.BlockStmt{
+ List: newStmts,
+ }
+ if needBindingDecl {
+ res.bindingDecl = true
+ block.List = prepend(bindingDecl.stmt, block.List...)
+ }
+
+ // assignStmts does not introduce new bindings, and replacing an
+ // assignment only works if the replacement occurs in the same scope.
+ // Therefore, we must ensure that braces are elided.
+ res.elideBraces = true
+ res.old = stmt
+ res.new = block
+ return res, nil
+ }
+ }
+
+ // expression context
+ if !needBindingDecl {
+ clearPositions(calleeDecl.Body)
+
+ anyNonTrivialReturns := hasNonTrivialReturn(callee.Returns)
+
+ if callee.NumResults == 1 {
+ logf("strategy: reduce expr-context call to { return expr }")
+ // (includes some simple tail-calls)
+
+ // Make implicit return conversion explicit.
+ if anyNonTrivialReturns {
+ results[0] = convert(calleeDecl.Type.Results.List[0].Type, results[0])
+ }
+
+ res.old = caller.Call
+ res.new = results[0]
+ return res, nil
+
+ } else if !anyNonTrivialReturns {
+ logf("strategy: reduce spread-context call to { return expr }")
+ // There is no general way to reify conversions in a spread
+ // return, hence the requirement above.
+ //
+ // TODO(adonovan): allow this reduction when no
+ // conversion is required by the context.
+
+ // The call returns multiple results but is
+ // not a standalone call statement. It must
+ // be the RHS of a spread assignment:
+ // var x, y = f()
+ // x, y := f()
+ // x, y = f()
+ // or the sole argument to a spread call:
+ // printf(f())
+ // or spread return statement:
+ // return f()
+ res.old = parent
+ switch context := parent.(type) {
+ case *ast.AssignStmt:
+ // Inv: the call must be in Rhs[0], not Lhs.
+ assign := shallowCopy(context)
+ assign.Rhs = results
+ res.new = assign
+ case *ast.ValueSpec:
+ // Inv: the call must be in Values[0], not Names.
+ spec := shallowCopy(context)
+ spec.Values = results
+ res.new = spec
+ case *ast.CallExpr:
+ // Inv: the call must be in Args[0], not Fun.
+ call := shallowCopy(context)
+ call.Args = results
+ res.new = call
+ case *ast.ReturnStmt:
+ // Inv: the call must be Results[0].
+ ret := shallowCopy(context)
+ ret.Results = results
+ res.new = ret
+ default:
+ return nil, fmt.Errorf("internal error: unexpected context %T for spread call", context)
+ }
+ return res, nil
+ }
+ }
+ }
+
+ // Special case: tail-call.
+ //
+ // Inlining:
+ // return f(args)
+ // where:
+ // func f(params) (results) { body }
+ // reduces to:
+ // { var (bindings); body }
+ // { body }
+ // so long as:
+ // - all parameters can be eliminated or replaced by a binding decl,
+ // - call is a tail-call;
+ // - all returns in body have trivial result conversions,
+ // or the caller's return type matches the callee's,
+ // - there is no label conflict;
+ // - no result variable is referenced by name,
+ // or implicitly by a bare return.
+ //
+ // The body may use defer, arbitrary control flow, and
+ // multiple returns.
+ //
+ // TODO(adonovan): add a strategy for a 'void tail
+ // call', i.e. a call statement prior to an (explicit
+ // or implicit) return.
+ parent, _ := callContext(caller.path)
+ if ret, ok := parent.(*ast.ReturnStmt); ok &&
+ len(ret.Results) == 1 &&
+ tailCallSafeReturn(caller, calleeSymbol, callee) &&
+ !callee.HasBareReturn &&
+ (!needBindingDecl || bindingDecl != nil) &&
+ !hasLabelConflict(caller.path, callee.Labels) &&
+ allResultsUnreferenced {
+ logf("strategy: reduce tail-call")
+ body := calleeDecl.Body
+ clearPositions(body)
+ if needBindingDecl {
+ res.bindingDecl = true
+ body.List = prepend(bindingDecl.stmt, body.List...)
+ }
+ res.old = ret
+ res.new = body
+ return res, nil
+ }
+
+ // Special case: call to void function
+ //
+ // Inlining:
+ // f(args)
+ // where:
+ // func f(params) { stmts }
+ // reduces to:
+ // { var (bindings); stmts }
+ // { stmts }
+ // so long as:
+ // - callee is a void function (no returns)
+ // - callee does not use defer
+ // - there is no label conflict between caller and callee
+ // - all parameters and result vars can be eliminated
+ // or replaced by a binding decl,
+ // - caller ExprStmt is in unrestricted statement context.
+ if stmt := callStmt(caller.path, true); stmt != nil &&
+ (!needBindingDecl || bindingDecl != nil) &&
+ !callee.HasDefer &&
+ !hasLabelConflict(caller.path, callee.Labels) &&
+ len(callee.Returns) == 0 {
+ logf("strategy: reduce stmt-context call to { stmts }")
+ body := calleeDecl.Body
+ var repl ast.Stmt = body
+ clearPositions(repl)
+ if needBindingDecl {
+ body.List = prepend(bindingDecl.stmt, body.List...)
+ }
+ res.old = stmt
+ res.new = repl
+ return res, nil
+ }
+
+ // TODO(adonovan): parameterless call to { stmts; return expr }
+ // from one of these contexts:
+ // x, y = f()
+ // x, y := f()
+ // var x, y = f()
+ // =>
+ // var (x T1, y T2); { stmts; x, y = expr }
+ //
+ // Because the params are no longer declared simultaneously
+ // we need to check that (for example) x ∉ freevars(T2),
+ // in addition to the usual checks for arg/result conversions,
+ // complex control, etc.
+ // Also test cases where expr is an n-ary call (spread returns).
+
+ // Literalization isn't quite infallible.
+ // Consider a spread call to a method in which
+ // no parameters are eliminated, e.g.
+ // new(T).f(g())
+ // where
+ // func (recv *T) f(x, y int) { body }
+ // func g() (int, int)
+ // This would be literalized to:
+ // func (recv *T, x, y int) { body }(new(T), g()),
+ // which is not a valid argument list because g() must appear alone.
+ // Reject this case for now.
+ if len(args) == 2 && args[0] != nil && args[1] != nil && is[*types.Tuple](args[1].typ) {
+ return nil, fmt.Errorf("can't yet inline spread call to method")
+ }
+
+ // Infallible general case: literalization.
+ //
+ // func(params) { body }(args)
+ //
+ logf("strategy: literalization")
+ funcLit := &ast.FuncLit{
+ Type: calleeDecl.Type,
+ Body: calleeDecl.Body,
+ }
+ // clear positions before prepending the binding decl below, since the
+ // binding decl contains syntax from the caller and we must not mutate the
+ // caller. (This was a prior bug.)
+ clearPositions(funcLit)
+
+ // Literalization can still make use of a binding
+ // decl as it gives a more natural reading order:
+ //
+ // func() { var params = args; body }()
+ //
+ // TODO(adonovan): relax the allResultsUnreferenced requirement
+ // by adding a parameter-only (no named results) binding decl.
+ if bindingDecl != nil && allResultsUnreferenced {
+ funcLit.Type.Params.List = nil
+ remainingArgs = nil
+ res.bindingDecl = true
+ funcLit.Body.List = prepend(bindingDecl.stmt, funcLit.Body.List...)
+ }
+
+ // Emit a new call to a function literal in place of
+ // the callee name, with appropriate replacements.
+ newCall := &ast.CallExpr{
+ Fun: funcLit,
+ Ellipsis: token.NoPos, // f(slice...) is always simplified
+ Args: remainingArgs,
+ }
+ res.old = caller.Call
+ res.new = newCall
+ return res, nil
+}
+
+// renameFreeObjs computes the renaming of the callee's free identifiers.
+// It returns a slice of names (identifiers or selector expressions) corresponding
+// to the callee's free objects (gobCallee.FreeObjs).
+func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) {
+ caller, callee := st.caller, &st.callee.impl
+ objRenames := make([]ast.Expr, len(callee.FreeObjs)) // nil => no change
+ for i, obj := range callee.FreeObjs {
+ // obj is a free object of the callee.
+ //
+ // Possible cases are:
+ // - builtin function, type, or value (e.g. nil, zero)
+ // => check not shadowed in caller.
+ // - package-level var/func/const/types
+ // => same package: check not shadowed in caller.
+ // => otherwise: import other package, form a qualified identifier.
+ // (Unexported cross-package references were rejected already.)
+ // - type parameter
+ // => not yet supported
+ // - pkgname
+ // => import other package and use its local name.
+ //
+ // There can be no free references to labels, fields, or methods.
+
+ // Note that we must consider potential shadowing both
+ // at the caller side (caller.lookup) and, when
+ // choosing new PkgNames, within the callee (obj.shadow).
+
+ var newName ast.Expr
+ if obj.Kind == "pkgname" {
+ // Use locally appropriate import, creating as needed.
+ n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
+ newName = makeIdent(n) // imported package
+ } else if !obj.ValidPos {
+ // Built-in function, type, or value (e.g. nil, zero):
+ // check not shadowed at caller.
+ found := caller.lookup(obj.Name) // always finds something
+ if found.Pos().IsValid() {
+ return nil, fmt.Errorf("cannot inline, because the callee refers to built-in %q, which in the caller is shadowed by a %s (declared at line %d)",
+ obj.Name, objectKind(found),
+ caller.Fset.PositionFor(found.Pos(), false).Line)
+ }
+
+ } else {
+ // Must be reference to package-level var/func/const/type,
+ // since type parameters are not yet supported.
+ qualify := false
+ if obj.PkgPath == callee.PkgPath {
+ // reference within callee package
+ if caller.Types.Path() == callee.PkgPath {
+ // Caller and callee are in same package.
+ // Check caller has not shadowed the decl.
+ //
+ // This may fail if the callee is "fake", such as for signature
+ // refactoring where the callee is modified to be a trivial wrapper
+ // around the refactored signature.
+ found := caller.lookup(obj.Name)
+ if found != nil && !isPkgLevel(found) {
+ return nil, fmt.Errorf("cannot inline, because the callee refers to %s %q, which in the caller is shadowed by a %s (declared at line %d)",
+ obj.Kind, obj.Name,
+ objectKind(found),
+ caller.Fset.PositionFor(found.Pos(), false).Line)
+ }
+ } else {
+ // Cross-package reference.
+ qualify = true
+ }
+ } else {
+ // Reference to a package-level declaration
+ // in another package, without a qualified identifier:
+ // it must be a dot import.
+ qualify = true
+ }
+
+ // Form a qualified identifier, pkg.Name.
+ if qualify {
+ pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
+ newName = &ast.SelectorExpr{
+ X: makeIdent(pkgName),
+ Sel: makeIdent(obj.Name),
+ }
+ }
+ }
+ objRenames[i] = newName
+ }
+ return objRenames, nil
+}
+
+type argument struct {
+ expr ast.Expr
+ typ types.Type // may be tuple for sole non-receiver arg in spread call
+ constant constant.Value // value of argument if constant
+ spread bool // final arg is call() assigned to multiple params
+ pure bool // expr is pure (doesn't read variables)
+ effects bool // expr has effects (updates variables)
+ duplicable bool // expr may be duplicated
+ freevars map[string]bool // free names of expr
+ variadic bool // is explicit []T{...} for eliminated variadic
+ desugaredRecv bool // is *recv or &recv, where operator was elided
+}
+
+// typeArguments returns the type arguments of the call.
+// It only collects the arguments that are explicitly provided; it does
+// not attempt type inference.
+func (st *state) typeArguments(call *ast.CallExpr) []*argument {
+ var exprs []ast.Expr
+ switch d := ast.Unparen(call.Fun).(type) {
+ case *ast.IndexExpr:
+ exprs = []ast.Expr{d.Index}
+ case *ast.IndexListExpr:
+ exprs = d.Indices
+ default:
+ // No type arguments
+ return nil
+ }
+ var args []*argument
+ for _, e := range exprs {
+ arg := &argument{expr: e, freevars: freeVars(st.caller.Info, e)}
+ // Wrap the instantiating type in parens when it's not an
+ // ident or qualified ident to prevent "if x == struct{}"
+ // parsing ambiguity, or "T(x)" where T = "*int" or "func()"
+ // from misparsing.
+ // TODO(adonovan): this fails in cases where parens are disallowed, such as
+ // in the composite literal expression T{k: v}.
+ if _, ok := arg.expr.(*ast.Ident); !ok {
+ arg.expr = &ast.ParenExpr{X: arg.expr}
+ }
+ args = append(args, arg)
+ }
+ return args
+}
+
+// arguments returns the effective arguments of the call.
+//
+// If the receiver argument and parameter have
+// different pointerness, make the "&" or "*" explicit.
+//
+// Also, if x.f() is shorthand for promoted method x.y.f(),
+// make the .y explicit in T.f(x.y, ...).
+//
+// Beware that:
+//
+// - a method can only be called through a selection, but only
+// the first of these two forms needs special treatment:
+//
+// expr.f(args) -> ([&*]expr, args) MethodVal
+// T.f(recv, args) -> ( expr, args) MethodExpr
+//
+// - the presence of a value in receiver-position in the call
+// is a property of the caller, not the callee. A method
+// (calleeDecl.Recv != nil) may be called like an ordinary
+// function.
+//
+// - the types.Signatures seen by the caller (from
+// StaticCallee) and by the callee (from decl type)
+// differ in this case.
+//
+// In a spread call f(g()), the sole ordinary argument g(),
+// always last in args, has a tuple type.
+//
+// We compute type-based predicates like pure, duplicable,
+// freevars, etc, now, before we start modifying syntax.
+func (st *state) arguments(caller *Caller, calleeDecl *ast.FuncDecl, assign1 func(*types.Var) bool) ([]*argument, error) {
+ var args []*argument
+
+ callArgs := caller.Call.Args
+ if calleeDecl.Recv != nil {
+ if len(st.callee.impl.TypeParams) > 0 {
+ return nil, fmt.Errorf("cannot inline: generic methods not yet supported")
+ }
+ sel := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr)
+ seln := caller.Info.Selections[sel]
+ var recvArg ast.Expr
+ switch seln.Kind() {
+ case types.MethodVal: // recv.f(callArgs)
+ recvArg = sel.X
+ case types.MethodExpr: // T.f(recv, callArgs)
+ recvArg = callArgs[0]
+ callArgs = callArgs[1:]
+ }
+ if recvArg != nil {
+ // Compute all the type-based predicates now,
+ // before we start meddling with the syntax;
+ // the meddling will update them.
+ arg := &argument{
+ expr: recvArg,
+ typ: caller.Info.TypeOf(recvArg),
+ constant: caller.Info.Types[recvArg].Value,
+ pure: pure(caller.Info, assign1, recvArg),
+ effects: st.effects(caller.Info, recvArg),
+ duplicable: duplicable(caller.Info, recvArg),
+ freevars: freeVars(caller.Info, recvArg),
+ }
+ recvArg = nil // prevent accidental use
+
+ // Move receiver argument recv.f(args) to argument list f(&recv, args).
+ args = append(args, arg)
+
+ // Make field selections explicit (recv.f -> recv.y.f),
+ // updating arg.{expr,typ}.
+ indices := seln.Index()
+ for _, index := range indices[:len(indices)-1] {
+ fld := typeparams.CoreType(typeparams.Deref(arg.typ)).(*types.Struct).Field(index)
+ if fld.Pkg() != caller.Types && !fld.Exported() {
+ return nil, fmt.Errorf("in %s, implicit reference to unexported field .%s cannot be made explicit",
+ debugFormatNode(caller.Fset, caller.Call.Fun),
+ fld.Name())
+ }
+ if isPointer(arg.typ) {
+ arg.pure = false // implicit *ptr operation => impure
+ }
+ arg.expr = &ast.SelectorExpr{
+ X: arg.expr,
+ Sel: makeIdent(fld.Name()),
+ }
+ arg.typ = fld.Type()
+ arg.duplicable = false
+ }
+
+ // Make * or & explicit.
+ argIsPtr := isPointer(arg.typ)
+ paramIsPtr := isPointer(seln.Obj().Type().Underlying().(*types.Signature).Recv().Type())
+ if !argIsPtr && paramIsPtr {
+ // &recv
+ arg.expr = &ast.UnaryExpr{Op: token.AND, X: arg.expr}
+ arg.typ = types.NewPointer(arg.typ)
+ arg.desugaredRecv = true
+ } else if argIsPtr && !paramIsPtr {
+ // *recv
+ arg.expr = &ast.StarExpr{X: arg.expr}
+ arg.typ = typeparams.Deref(arg.typ)
+ arg.duplicable = false
+ arg.pure = false
+ arg.desugaredRecv = true
+ }
+ }
+ }
+ for _, expr := range callArgs {
+ tv := caller.Info.Types[expr]
+ args = append(args, &argument{
+ expr: expr,
+ typ: tv.Type,
+ constant: tv.Value,
+ spread: is[*types.Tuple](tv.Type), // => last
+ pure: pure(caller.Info, assign1, expr),
+ effects: st.effects(caller.Info, expr),
+ duplicable: duplicable(caller.Info, expr),
+ freevars: freeVars(caller.Info, expr),
+ })
+ }
+
+ // Re-typecheck each constant argument expression in a neutral context.
+ //
+ // In a call such as func(int16){}(1), the type checker infers
+ // the type "int16", not "untyped int", for the argument 1,
+ // because it has incorporated information from the left-hand
+ // side of the assignment implicit in parameter passing, but
+ // of course in a different context, the expression 1 may have
+ // a different type.
+ //
+ // So, we must use CheckExpr to recompute the type of the
+ // argument in a neutral context to find its inherent type.
+ // (This is arguably a bug in go/types, but I'm pretty certain
+ // I requested it be this way long ago... -adonovan)
+ //
+ // This is only needed for constants. Other implicit
+ // assignment conversions, such as unnamed-to-named struct or
+ // chan to <-chan, do not result in the type-checker imposing
+ // the LHS type on the RHS value.
+ for _, arg := range args {
+ if arg.constant == nil {
+ continue
+ }
+ info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
+ if err := types.CheckExpr(caller.Fset, caller.Types, caller.Call.Pos(), arg.expr, info); err != nil {
+ return nil, err
+ }
+ arg.typ = info.TypeOf(arg.expr)
+ }
+
+ return args, nil
+}
+
+type parameter struct {
+ obj *types.Var // parameter var from caller's signature
+ fieldType ast.Expr // syntax of type, from calleeDecl.Type.{Recv,Params}
+ info *paramInfo // information from AnalyzeCallee
+ variadic bool // (final) parameter is unsimplified ...T
+}
+
+// A replacer replaces an identifier at the given offset in the callee.
+// The replacement tree must not belong to the caller; use cloneNode as needed.
+// If unpackVariadic is set, the replacement is a composite resulting from
+// variadic elimination, and may be unpacked into variadic calls.
+type replacer = func(offset int, repl ast.Expr, unpackVariadic bool)
+
+// substituteTypeParams replaces type parameters in the callee with the corresponding type arguments
+// from the call.
+func substituteTypeParams(logf logger, typeParams []*paramInfo, typeArgs []*argument, params []*parameter, replace replacer) error {
+ assert(len(typeParams) == len(typeArgs), "mismatched number of type params/args")
+ for i, paramInfo := range typeParams {
+ arg := typeArgs[i]
+ // Perform a simplified, conservative shadow analysis: fail if there is any shadowing.
+ for free := range arg.freevars {
+ if paramInfo.Shadow[free] != 0 {
+ return fmt.Errorf("cannot inline: type argument #%d (type parameter %s) is shadowed", i, paramInfo.Name)
+ }
+ }
+ logf("replacing type param %s with %s", paramInfo.Name, debugFormatNode(token.NewFileSet(), arg.expr))
+ for _, ref := range paramInfo.Refs {
+ replace(ref.Offset, internalastutil.CloneNode(arg.expr), false)
+ }
+ // Also replace parameter field types.
+ // TODO(jba): find a way to do this that is not so slow and clumsy.
+ // Ideally, we'd walk each p.fieldType once, replacing all type params together.
+ for _, p := range params {
+ if id, ok := p.fieldType.(*ast.Ident); ok && id.Name == paramInfo.Name {
+ p.fieldType = arg.expr
+ } else {
+ for _, id := range identsNamed(p.fieldType, paramInfo.Name) {
+ replaceNode(p.fieldType, id, arg.expr)
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func identsNamed(n ast.Node, name string) []*ast.Ident {
+ var ids []*ast.Ident
+ ast.Inspect(n, func(n ast.Node) bool {
+ if id, ok := n.(*ast.Ident); ok && id.Name == name {
+ ids = append(ids, id)
+ }
+ return true
+ })
+ return ids
+}
+
+// substitute implements parameter elimination by substitution.
+//
+// It considers each parameter and its corresponding argument in turn
+// and evaluate these conditions:
+//
+// - the parameter is neither address-taken nor assigned;
+// - the argument is pure;
+// - if the parameter refcount is zero, the argument must
+// not contain the last use of a local var;
+// - if the parameter refcount is > 1, the argument must be duplicable;
+// - the argument (or types.Default(argument) if it's untyped) has
+// the same type as the parameter.
+//
+// If all conditions are met then the parameter can be substituted and
+// each reference to it replaced by the argument. In that case, the
+// replaceCalleeID function is called for each reference to the
+// parameter, and is provided with its relative offset and replacement
+// expression (argument), and the corresponding elements of params and
+// args are replaced by nil.
+func substitute(logf logger, caller *Caller, params []*parameter, args []*argument, effects []int, falcon falconResult, replace replacer) {
+ // Inv:
+ // in calls to variadic, len(args) >= len(params)-1
+ // in spread calls to non-variadic, len(args) < len(params)
+ // in spread calls to variadic, len(args) <= len(params)
+ // (In spread calls len(args) = 1, or 2 if call has receiver.)
+ // Non-spread variadics have been simplified away already,
+ // so the args[i] lookup is safe if we stop after the spread arg.
+ assert(len(args) <= len(params), "too many arguments")
+
+ // Collect candidates for substitution.
+ //
+ // An argument is a candidate if it is not otherwise rejected, and any free
+ // variables that are shadowed only by other parameters.
+ //
+ // Therefore, substitution candidates are represented by a graph, where edges
+ // lead from each argument to the other arguments that, if substituted, would
+ // allow the argument to be substituted. We collect these edges in the
+ // [substGraph]. Any node that is known not to be elided from the graph.
+ // Arguments in this graph with no edges are substitutable independent of
+ // other nodes, though they may be removed due to falcon or effects analysis.
+ sg := make(substGraph)
+next:
+ for i, param := range params {
+ arg := args[i]
+
+ // Check argument against parameter.
+ //
+ // Beware: don't use types.Info on arg since
+ // the syntax may be synthetic (not created by parser)
+ // and thus lacking positions and types;
+ // do it earlier (see pure/duplicable/freevars).
+
+ if arg.spread {
+ // spread => last argument, but not always last parameter
+ logf("keeping param %q and following ones: argument %s is spread",
+ param.info.Name, debugFormatNode(caller.Fset, arg.expr))
+ return // give up
+ }
+ assert(!param.variadic, "unsimplified variadic parameter")
+ if param.info.Escapes {
+ logf("keeping param %q: escapes from callee", param.info.Name)
+ continue
+ }
+ if param.info.Assigned {
+ logf("keeping param %q: assigned by callee", param.info.Name)
+ continue // callee needs the parameter variable
+ }
+ if len(param.info.Refs) > 1 && !arg.duplicable {
+ logf("keeping param %q: argument is not duplicable", param.info.Name)
+ continue // incorrect or poor style to duplicate an expression
+ }
+ if len(param.info.Refs) == 0 {
+ if arg.effects {
+ logf("keeping param %q: though unreferenced, it has effects", param.info.Name)
+ continue
+ }
+
+ // If the caller is within a function body,
+ // eliminating an unreferenced parameter might
+ // remove the last reference to a caller local var.
+ if caller.enclosingFunc != nil {
+ for free := range arg.freevars {
+ // TODO(rfindley): we can get this 100% right by looking for
+ // references among other arguments which have non-zero references
+ // within the callee.
+ if v, ok := caller.lookup(free).(*types.Var); ok && within(v.Pos(), caller.enclosingFunc.Body) && !isUsedOutsideCall(caller, v) {
+
+ // Check to see if the substituted var is used within other args
+ // whose corresponding params ARE used in the callee
+ usedElsewhere := func() bool {
+ for i, param := range params {
+ if i < len(args) && len(param.info.Refs) > 0 { // excludes original param
+ for name := range args[i].freevars {
+ if caller.lookup(name) == v {
+ return true
+ }
+ }
+ }
+ }
+ return false
+ }
+ if !usedElsewhere() {
+ logf("keeping param %q: arg contains perhaps the last reference to caller local %v @ %v",
+ param.info.Name, v, caller.Fset.PositionFor(v.Pos(), false))
+ continue next
+ }
+ }
+ }
+ }
+ }
+
+ // Arg is a potential substitution candidate: analyze its shadowing.
+ //
+ // Consider inlining a call f(z, 1) to
+ //
+ // func f(x, y int) int { z := y; return x + y + z }
+ //
+ // we can't replace x in the body by z (or any
+ // expression that has z as a free identifier) because there's an
+ // intervening declaration of z that would shadow the caller's one.
+ //
+ // However, we *could* replace x in the body by y, as long as the y
+ // parameter is also removed by substitution.
+
+ sg[arg] = nil // Absent shadowing, the arg is substitutable.
+ for free := range arg.freevars {
+ switch s := param.info.Shadow[free]; {
+ case s < 0:
+ // Shadowed by a non-parameter symbol, so arg is not substitutable.
+ delete(sg, arg)
+ case s > 0:
+ // Shadowed by a parameter; arg may be substitutable, if only shadowed
+ // by other substitutable parameters.
+ if s > len(args) {
+ // Defensive: this should not happen in the current factoring, since
+ // spread arguments are already handled.
+ delete(sg, arg)
+ }
+ if edges, ok := sg[arg]; ok {
+ sg[arg] = append(edges, args[s-1])
+ }
+ }
+ }
+ }
+
+ // Process the initial state of the substitution graph.
+ sg.prune()
+
+ // Now we check various conditions on the substituted argument set as a
+ // whole. These conditions reject substitution candidates, but since their
+ // analysis depends on the full set of candidates, we do not process side
+ // effects of their candidate rejection until after the analysis completes,
+ // in a call to prune. After pruning, we must re-run the analysis to check
+ // for additional rejections.
+ //
+ // Here's an example of that in practice:
+ //
+ // var a [3]int
+ //
+ // func falcon(x, y, z int) {
+ // _ = x + a[y+z]
+ // }
+ //
+ // func _() {
+ // var y int
+ // const x, z = 1, 2
+ // falcon(y, x, z)
+ // }
+ //
+ // In this example, arguments 0 and 1 are shadowed by each other's
+ // corresponding parameter, and so each can be substituted only if they are
+ // both substituted. But the fallible constant analysis finds a violated
+ // constraint: x + z = 3, and so the constant array index would cause a
+ // compile-time error if argument 1 (x) were substituted. Therefore,
+ // following the falcon analysis, we must also prune argument 0.
+ //
+ // As far as I (rfindley) can tell, the falcon analysis should always succeed
+ // after the first pass, as it's not possible for additional bindings to
+ // cause new constraint failures. Nevertheless, we re-run it to be sure.
+ //
+ // However, the same cannot be said of the effects analysis, as demonstrated
+ // by this example:
+ //
+ // func effects(w, x, y, z int) {
+ // _ = x + w + y + z
+ // }
+
+ // func _() {
+ // v := 0
+ // w := func() int { v++; return 0 }
+ // x := func() int { v++; return 0 }
+ // y := func() int { v++; return 0 }
+ // effects(x(), w(), y(), x()) //@ inline(re"effects", effects)
+ // }
+ //
+ // In this example, arguments 0, 1, and 3 are related by the substitution
+ // graph. The first effects analysis implies that arguments 0 and 1 must be
+ // bound, and therefore argument 3 must be bound. But then a subsequent
+ // effects analysis forces argument 2 to also be bound.
+
+ // Reject constant arguments as substitution candidates if they cause
+ // violation of falcon constraints.
+ //
+ // Keep redoing the analysis until we no longer reject additional arguments,
+ // as the set of substituted parameters affects the falcon package.
+ for checkFalconConstraints(logf, params, args, falcon, sg) {
+ sg.prune()
+ }
+
+ // As a final step, introduce bindings to resolve any
+ // evaluation order hazards. This must be done last, as
+ // additional subsequent bindings could introduce new hazards.
+ //
+ // As with the falcon analysis, keep redoing the analysis until the no more
+ // arguments are rejected.
+ for resolveEffects(logf, args, effects, sg) {
+ sg.prune()
+ }
+
+ // The remaining candidates are safe to substitute.
+ for i, param := range params {
+ if arg := args[i]; sg.has(arg) {
+
+ // It is safe to substitute param and replace it with arg.
+ // The formatter introduces parens as needed for precedence.
+ //
+ // Because arg.expr belongs to the caller,
+ // we clone it before splicing it into the callee tree.
+ logf("replacing parameter %q by argument %q",
+ param.info.Name, debugFormatNode(caller.Fset, arg.expr))
+ for _, ref := range param.info.Refs {
+ // Apply any transformations necessary for this reference.
+ argExpr := arg.expr
+
+ // If the reference itself is being selected, and we applied desugaring
+ // (an explicit &x or *x), we can undo that desugaring here as it is
+ // not necessary for a selector. We don't need to check addressability
+ // here because if we desugared, the receiver must have been
+ // addressable.
+ if ref.IsSelectionOperand && arg.desugaredRecv {
+ switch e := argExpr.(type) {
+ case *ast.UnaryExpr:
+ argExpr = e.X
+ case *ast.StarExpr:
+ argExpr = e.X
+ }
+ }
+
+ // If the reference requires exact type agreement between parameter and
+ // argument, wrap the argument in an explicit conversion if
+ // substitution might materially change its type. (We already did the
+ // necessary shadowing check on the parameter type syntax.)
+ //
+ // The types must agree in any of these cases:
+ // - the argument affects type inference;
+ // - the reference's concrete type is assigned to an interface type;
+ // - the reference is not an assignment, nor a trivial conversion of an untyped constant.
+ //
+ // In all other cases, no explicit conversion is necessary as either
+ // the type does not matter, or must have already agreed for well-typed
+ // code.
+ //
+ // This is only needed for substituted arguments. All other arguments
+ // are given explicit types in either a binding decl or when using the
+ // literalization strategy.
+ //
+ // If the types are identical, we can eliminate
+ // redundant type conversions such as this:
+ //
+ // Callee:
+ // func f(i int32) { fmt.Println(i) }
+ // Caller:
+ // func g() { f(int32(1)) }
+ // Inlined as:
+ // func g() { fmt.Println(int32(int32(1)))
+ //
+ // Recall that non-trivial does not imply non-identical for constant
+ // conversions; however, at this point state.arguments has already
+ // re-typechecked the constant and set arg.type to its (possibly
+ // "untyped") inherent type, so the conversion from untyped 1 to int32
+ // is non-trivial even though both arg and param have identical types
+ // (int32).
+ needType := ref.AffectsInference ||
+ (ref.Assignable && ref.IfaceAssignment && !param.info.IsInterface) ||
+ (!ref.Assignable && !trivialConversion(arg.constant, arg.typ, param.obj.Type()))
+
+ if needType &&
+ !types.Identical(types.Default(arg.typ), param.obj.Type()) {
+
+ // If arg.expr is already an interface call, strip it.
+ if call, ok := argExpr.(*ast.CallExpr); ok && len(call.Args) == 1 {
+ if typ, ok := isConversion(caller.Info, call); ok && isNonTypeParamInterface(typ) {
+ argExpr = call.Args[0]
+ }
+ }
+
+ argExpr = convert(param.fieldType, argExpr)
+ logf("param %q (offset %d): adding explicit %s -> %s conversion around argument",
+ param.info.Name, ref.Offset, arg.typ, param.obj.Type())
+ }
+ replace(ref.Offset, internalastutil.CloneNode(argExpr).(ast.Expr), arg.variadic)
+ }
+ params[i] = nil // substituted
+ args[i] = nil // substituted
+ }
+ }
+}
+
+// isConversion reports whether the given call is a type conversion, returning
+// (operand, true) if so.
+//
+// If the call is not a conversion, it returns (nil, false).
+func isConversion(info *types.Info, call *ast.CallExpr) (types.Type, bool) {
+ if tv, ok := info.Types[call.Fun]; ok && tv.IsType() {
+ return tv.Type, true
+ }
+ return nil, false
+}
+
+// isNonTypeParamInterface reports whether t is a non-type parameter interface
+// type.
+func isNonTypeParamInterface(t types.Type) bool {
+ return !typeparams.IsTypeParam(t) && types.IsInterface(t)
+}
+
+// isUsedOutsideCall reports whether v is used outside of caller.Call, within
+// the body of caller.enclosingFunc.
+func isUsedOutsideCall(caller *Caller, v *types.Var) bool {
+ used := false
+ ast.Inspect(caller.enclosingFunc.Body, func(n ast.Node) bool {
+ if n == caller.Call {
+ return false
+ }
+ switch n := n.(type) {
+ case *ast.Ident:
+ if use := caller.Info.Uses[n]; use == v {
+ used = true
+ }
+ case *ast.FuncType:
+ // All params are used.
+ for _, fld := range n.Params.List {
+ for _, n := range fld.Names {
+ if def := caller.Info.Defs[n]; def == v {
+ used = true
+ }
+ }
+ }
+ }
+ return !used // keep going until we find a use
+ })
+ return used
+}
+
+// checkFalconConstraints checks whether constant arguments
+// are safe to substitute (e.g. s[i] -> ""[0] is not safe.)
+//
+// Any failed constraint causes us to reject all constant arguments as
+// substitution candidates (by clearing args[i].substitution=false).
+//
+// TODO(adonovan): we could obtain a finer result rejecting only the
+// freevars of each failed constraint, and processing constraints in
+// order of increasing arity, but failures are quite rare.
+func checkFalconConstraints(logf logger, params []*parameter, args []*argument, falcon falconResult, sg substGraph) bool {
+ // Create a dummy package, as this is the only
+ // way to create an environment for CheckExpr.
+ pkg := types.NewPackage("falcon", "falcon")
+
+ // Declare types used by constraints.
+ for _, typ := range falcon.Types {
+ logf("falcon env: type %s %s", typ.Name, types.Typ[typ.Kind])
+ pkg.Scope().Insert(types.NewTypeName(token.NoPos, pkg, typ.Name, types.Typ[typ.Kind]))
+ }
+
+ // Declared constants and variables for for parameters.
+ nconst := 0
+ for i, param := range params {
+ name := param.info.Name
+ if name == "" {
+ continue // unreferenced
+ }
+ arg := args[i]
+ if arg.constant != nil && sg.has(arg) && param.info.FalconType != "" {
+ t := pkg.Scope().Lookup(param.info.FalconType).Type()
+ pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, t, arg.constant))
+ logf("falcon env: const %s %s = %v", name, param.info.FalconType, arg.constant)
+ nconst++
+ } else {
+ v := types.NewVar(token.NoPos, pkg, name, arg.typ)
+ typesinternal.SetVarKind(v, typesinternal.PackageVar)
+ pkg.Scope().Insert(v)
+ logf("falcon env: var %s %s", name, arg.typ)
+ }
+ }
+ if nconst == 0 {
+ return false // nothing to do
+ }
+
+ // Parse and evaluate the constraints in the environment.
+ fset := token.NewFileSet()
+ removed := false
+ for _, falcon := range falcon.Constraints {
+ expr, err := parser.ParseExprFrom(fset, "falcon", falcon, 0)
+ if err != nil {
+ panic(fmt.Sprintf("failed to parse falcon constraint %s: %v", falcon, err))
+ }
+ if err := types.CheckExpr(fset, pkg, token.NoPos, expr, nil); err != nil {
+ logf("falcon: constraint %s violated: %v", falcon, err)
+ for j, arg := range args {
+ if arg.constant != nil && sg.has(arg) {
+ logf("keeping param %q due falcon violation", params[j].info.Name)
+ removed = sg.remove(arg) || removed
+ }
+ }
+ break
+ }
+ logf("falcon: constraint %s satisfied", falcon)
+ }
+ return removed
+}
+
+// resolveEffects marks arguments as non-substitutable to resolve
+// hazards resulting from the callee evaluation order described by the
+// effects list.
+//
+// To do this, each argument is categorized as a read (R), write (W),
+// or pure. A hazard occurs when the order of evaluation of a W
+// changes with respect to any R or W. Pure arguments can be
+// effectively ignored, as they can be safely evaluated in any order.
+//
+// The callee effects list contains the index of each parameter in the
+// order it is first evaluated during execution of the callee. In
+// addition, the two special values R∞ and W∞ indicate the relative
+// position of the callee's first non-parameter read and its first
+// effects (or other unknown behavior).
+// For example, the list [0 2 1 R∞ 3 W∞] for func(a, b, c, d)
+// indicates that the callee referenced parameters a, c, and b,
+// followed by an arbitrary read, then parameter d, and finally
+// unknown behavior.
+//
+// When an argument is marked as not substitutable, we say that it is
+// 'bound', in the sense that its evaluation occurs in a binding decl
+// or literalized call. Such bindings always occur in the original
+// callee parameter order.
+//
+// In this context, "resolving hazards" means binding arguments so
+// that they are evaluated in a valid, hazard-free order. A trivial
+// solution to this problem would be to bind all arguments, but of
+// course that's not useful. The goal is to bind as few arguments as
+// possible.
+//
+// The algorithm proceeds by inspecting arguments in reverse parameter
+// order (right to left), preserving the invariant that every
+// higher-ordered argument is either already substituted or does not
+// need to be substituted. At each iteration, if there is an
+// evaluation hazard in the callee effects relative to the current
+// argument, the argument must be bound. Subsequently, if the argument
+// is bound for any reason, each lower-ordered argument must also be
+// bound if either the argument or lower-order argument is a
+// W---otherwise the binding itself would introduce a hazard.
+//
+// Thus, after each iteration, there are no hazards relative to the
+// current argument. Subsequent iterations cannot introduce hazards
+// with that argument because they can result only in additional
+// binding of lower-ordered arguments.
+func resolveEffects(logf logger, args []*argument, effects []int, sg substGraph) bool {
+ effectStr := func(effects bool, idx int) string {
+ i := fmt.Sprint(idx)
+ if idx == len(args) {
+ i = "∞"
+ }
+ return string("RW"[btoi(effects)]) + i
+ }
+ removed := false
+ for i := len(args) - 1; i >= 0; i-- {
+ argi := args[i]
+ if sg.has(argi) && !argi.pure {
+ // i is not bound: check whether it must be bound due to hazards.
+ idx := slices.Index(effects, i)
+ if idx >= 0 {
+ for _, j := range effects[:idx] {
+ var (
+ ji int // effective param index
+ jw bool // j is a write
+ )
+ if j == winf || j == rinf {
+ jw = j == winf
+ ji = len(args)
+ } else {
+ jw = args[j].effects
+ ji = j
+ }
+ if ji > i && (jw || argi.effects) { // out of order evaluation
+ logf("binding argument %s: preceded by %s",
+ effectStr(argi.effects, i), effectStr(jw, ji))
+
+ removed = sg.remove(argi) || removed
+ break
+ }
+ }
+ }
+ }
+ if !sg.has(argi) {
+ for j := 0; j < i; j++ {
+ argj := args[j]
+ if argj.pure {
+ continue
+ }
+ if (argi.effects || argj.effects) && sg.has(argj) {
+ logf("binding argument %s: %s is bound",
+ effectStr(argj.effects, j), effectStr(argi.effects, i))
+
+ removed = sg.remove(argj) || removed
+ }
+ }
+ }
+ }
+ return removed
+}
+
+// A substGraph is a directed graph representing arguments that may be
+// substituted, provided all of their related arguments (or "dependencies") are
+// also substituted. The candidates arguments for substitution are the keys in
+// this graph, and the edges represent shadowing of free variables of the key
+// by parameters corresponding to the dependency arguments.
+//
+// Any argument not present as a map key is known not to be substitutable. Some
+// arguments may have edges leading to other arguments that are not present in
+// the graph. In this case, those arguments also cannot be substituted, because
+// they have free variables that are shadowed by parameters that cannot be
+// substituted. Calling [substGraph.prune] removes these arguments from the
+// graph.
+//
+// The 'prune' operation is not built into the 'remove' step both because
+// analyses (falcon, effects) need local information about each argument
+// independent of dependencies, and for the efficiency of pruning once en masse
+// after each analysis.
+type substGraph map[*argument][]*argument
+
+// has reports whether arg is a candidate for substitution.
+func (g substGraph) has(arg *argument) bool {
+ _, ok := g[arg]
+ return ok
+}
+
+// remove marks arg as not substitutable, reporting whether the arg was
+// previously substitutable.
+//
+// remove does not have side effects on other arguments that may be
+// unsubstitutable as a result of their dependency being removed.
+// Call [substGraph.prune] to propagate these side effects, removing dependent
+// arguments.
+func (g substGraph) remove(arg *argument) bool {
+ pre := len(g)
+ delete(g, arg)
+ return len(g) < pre
+}
+
+// prune updates the graph to remove any keys that reach other arguments not
+// present in the graph.
+func (g substGraph) prune() {
+ // visit visits the forward transitive closure of arg and reports whether any
+ // missing argument was encountered, removing all nodes on the path to it
+ // from arg.
+ //
+ // The seen map is used for cycle breaking. In the presence of cycles, visit
+ // may report a false positive for an intermediate argument. For example,
+ // consider the following graph, where only a and b are candidates for
+ // substitution (meaning, only a and b are present in the graph).
+ //
+ // a ↔ b
+ // ↓
+ // [c]
+ //
+ // In this case, starting a visit from a, visit(b, seen) may report 'true',
+ // because c has not yet been considered. For this reason, we must guarantee
+ // that visit is called with an empty seen map at least once for each node.
+ var visit func(*argument, map[*argument]unit) bool
+ visit = func(arg *argument, seen map[*argument]unit) bool {
+ deps, ok := g[arg]
+ if !ok {
+ return false
+ }
+ if _, ok := seen[arg]; !ok {
+ seen[arg] = unit{}
+ for _, dep := range deps {
+ if !visit(dep, seen) {
+ delete(g, arg)
+ return false
+ }
+ }
+ }
+ return true
+ }
+ for arg := range g {
+ // Remove any argument that is, or transitively depends upon,
+ // an unsubstitutable argument.
+ //
+ // Each visitation gets a fresh cycle-breaking set.
+ visit(arg, make(map[*argument]unit))
+ }
+}
+
+// updateCalleeParams updates the calleeDecl syntax to remove
+// substituted parameters and move the receiver (if any) to the head
+// of the ordinary parameters.
+func updateCalleeParams(calleeDecl *ast.FuncDecl, params []*parameter) {
+ // The logic is fiddly because of the three forms of ast.Field:
+ //
+ // func(int), func(x int), func(x, y int)
+ //
+ // Also, ensure that all remaining parameters are named
+ // to avoid a mix of named/unnamed when joining (recv, params...).
+ // func (T) f(int, bool) -> (_ T, _ int, _ bool)
+ // (Strictly, we need do this only for methods and only when
+ // the namednesses of Recv and Params differ; that might be tidier.)
+
+ paramIdx := 0 // index in original parameter list (incl. receiver)
+ var newParams []*ast.Field
+ filterParams := func(field *ast.Field) {
+ var names []*ast.Ident
+ if field.Names == nil {
+ // Unnamed parameter field (e.g. func f(int)
+ if params[paramIdx] != nil {
+ // Give it an explicit name "_" since we will
+ // make the receiver (if any) a regular parameter
+ // and one cannot mix named and unnamed parameters.
+ names = append(names, makeIdent("_"))
+ }
+ paramIdx++
+ } else {
+ // Named parameter field e.g. func f(x, y int)
+ // Remove substituted parameters in place.
+ // If all were substituted, delete field.
+ for _, id := range field.Names {
+ if pinfo := params[paramIdx]; pinfo != nil {
+ // Rename unreferenced parameters with "_".
+ // This is crucial for binding decls, since
+ // unlike parameters, they are subject to
+ // "unreferenced var" checks.
+ if len(pinfo.info.Refs) == 0 {
+ id = makeIdent("_")
+ }
+ names = append(names, id)
+ }
+ paramIdx++
+ }
+ }
+ if names != nil {
+ newParams = append(newParams, &ast.Field{
+ Names: names,
+ Type: field.Type,
+ })
+ }
+ }
+ if calleeDecl.Recv != nil {
+ filterParams(calleeDecl.Recv.List[0])
+ calleeDecl.Recv = nil
+ }
+ for _, field := range calleeDecl.Type.Params.List {
+ filterParams(field)
+ }
+ calleeDecl.Type.Params.List = newParams
+}
+
+// bindingDeclInfo records information about the binding decl produced by
+// createBindingDecl.
+type bindingDeclInfo struct {
+ names map[string]bool // names bound by the binding decl; possibly empty
+ stmt ast.Stmt // the binding decl itself
+}
+
+// createBindingDecl constructs a "binding decl" that implements
+// parameter assignment and declares any named result variables
+// referenced by the callee. It returns nil if there were no
+// unsubstituted parameters.
+//
+// It may not always be possible to create the decl (e.g. due to
+// shadowing), in which case it also returns nil; but if it succeeds,
+// the declaration may be used by reduction strategies to relax the
+// requirement that all parameters have been substituted.
+//
+// For example, a call:
+//
+// f(a0, a1, a2)
+//
+// where:
+//
+// func f(p0, p1 T0, p2 T1) { body }
+//
+// reduces to:
+//
+// {
+// var (
+// p0, p1 T0 = a0, a1
+// p2 T1 = a2
+// )
+// body
+// }
+//
+// so long as p0, p1 ∉ freevars(T1) or freevars(a2), and so on,
+// because each spec is statically resolved in sequence and
+// dynamically assigned in sequence. By contrast, all
+// parameters are resolved simultaneously and assigned
+// simultaneously.
+//
+// The pX names should already be blank ("_") if the parameter
+// is unreferenced; this avoids "unreferenced local var" checks.
+//
+// Strategies may impose additional checks on return
+// conversions, labels, defer, etc.
+func createBindingDecl(logf logger, caller *Caller, args []*argument, calleeDecl *ast.FuncDecl, results []*paramInfo) *bindingDeclInfo {
+ // Spread calls are tricky as they may not align with the
+ // parameters' field groupings nor types.
+ // For example, given
+ // func g() (int, string)
+ // the call
+ // f(g())
+ // is legal with these decls of f:
+ // func f(int, string)
+ // func f(x, y any)
+ // func f(x, y ...any)
+ // TODO(adonovan): support binding decls for spread calls by
+ // splitting parameter groupings as needed.
+ if lastArg := last(args); lastArg != nil && lastArg.spread {
+ logf("binding decls not yet supported for spread calls")
+ return nil
+ }
+
+ var (
+ specs []ast.Spec
+ names = make(map[string]bool) // names defined by previous specs
+ )
+ // shadow reports whether any name referenced by spec is
+ // shadowed by a name declared by a previous spec (since,
+ // unlike parameters, each spec of a var decl is within the
+ // scope of the previous specs).
+ shadow := func(spec *ast.ValueSpec) bool {
+ // Compute union of free names of type and values
+ // and detect shadowing. Values is the arguments
+ // (caller syntax), so we can use type info.
+ // But Type is the untyped callee syntax,
+ // so we have to use a syntax-only algorithm.
+ free := make(map[string]bool)
+ for _, value := range spec.Values {
+ for name := range freeVars(caller.Info, value) {
+ free[name] = true
+ }
+ }
+ const includeComplitIdents = true
+ freeishNames(free, spec.Type, includeComplitIdents)
+ for name := range free {
+ if names[name] {
+ logf("binding decl would shadow free name %q", name)
+ return true
+ }
+ }
+ for _, id := range spec.Names {
+ if id.Name != "_" {
+ names[id.Name] = true
+ }
+ }
+ return false
+ }
+
+ // parameters
+ //
+ // Bind parameters that were not eliminated through
+ // substitution. (Non-nil arguments correspond to the
+ // remaining parameters in calleeDecl.)
+ var values []ast.Expr
+ for _, arg := range args {
+ if arg != nil {
+ values = append(values, arg.expr)
+ }
+ }
+ for _, field := range calleeDecl.Type.Params.List {
+ // Each field (param group) becomes a ValueSpec.
+ spec := &ast.ValueSpec{
+ Names: cleanNodes(field.Names),
+ Type: cleanNode(field.Type),
+ Values: values[:len(field.Names)],
+ }
+ values = values[len(field.Names):]
+ if shadow(spec) {
+ return nil
+ }
+ specs = append(specs, spec)
+ }
+ assert(len(values) == 0, "args/params mismatch")
+
+ // results
+ //
+ // Add specs to declare any named result
+ // variables that are referenced by the body.
+ if calleeDecl.Type.Results != nil {
+ resultIdx := 0
+ for _, field := range calleeDecl.Type.Results.List {
+ if field.Names == nil {
+ resultIdx++
+ continue // unnamed field
+ }
+ var names []*ast.Ident
+ for _, id := range field.Names {
+ if len(results[resultIdx].Refs) > 0 {
+ names = append(names, id)
+ }
+ resultIdx++
+ }
+ if len(names) > 0 {
+ spec := &ast.ValueSpec{
+ Names: cleanNodes(names),
+ Type: cleanNode(field.Type),
+ }
+ if shadow(spec) {
+ return nil
+ }
+ specs = append(specs, spec)
+ }
+ }
+ }
+
+ if len(specs) == 0 {
+ logf("binding decl not needed: all parameters substituted")
+ return nil
+ }
+
+ stmt := &ast.DeclStmt{
+ Decl: &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: specs,
+ },
+ }
+ logf("binding decl: %s", debugFormatNode(caller.Fset, stmt))
+ return &bindingDeclInfo{names: names, stmt: stmt}
+}
+
+// lookup does a symbol lookup in the lexical environment of the caller.
+func (caller *Caller) lookup(name string) types.Object {
+ pos := caller.Call.Pos()
+ for _, n := range caller.path {
+ if scope := scopeFor(caller.Info, n); scope != nil {
+ if _, obj := scope.LookupParent(name, pos); obj != nil {
+ return obj
+ }
+ }
+ }
+ return nil
+}
+
+func scopeFor(info *types.Info, n ast.Node) *types.Scope {
+ // The function body scope (containing not just params)
+ // is associated with the function's type, not body.
+ switch fn := n.(type) {
+ case *ast.FuncDecl:
+ n = fn.Type
+ case *ast.FuncLit:
+ n = fn.Type
+ }
+ return info.Scopes[n]
+}
+
+// -- predicates over expressions --
+
+// freeVars returns the names of all free identifiers of e:
+// those lexically referenced by it but not defined within it.
+// (Fields and methods are not included.)
+func freeVars(info *types.Info, e ast.Expr) map[string]bool {
+ free := make(map[string]bool)
+ ast.Inspect(e, func(n ast.Node) bool {
+ if id, ok := n.(*ast.Ident); ok {
+ // The isField check is so that we don't treat T{f: 0} as a ref to f.
+ if obj, ok := info.Uses[id]; ok && !within(obj.Pos(), e) && !isField(obj) {
+ free[obj.Name()] = true
+ }
+ }
+ return true
+ })
+ return free
+}
+
+// effects reports whether an expression might change the state of the
+// program (through function calls and channel receives) and affect
+// the evaluation of subsequent expressions.
+func (st *state) effects(info *types.Info, expr ast.Expr) bool {
+ effects := false
+ ast.Inspect(expr, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.FuncLit:
+ return false // prune descent
+
+ case *ast.CallExpr:
+ if info.Types[n.Fun].IsType() {
+ // A conversion T(x) has only the effect of its operand.
+ } else if !typesinternal.CallsPureBuiltin(info, n) {
+ // A handful of built-ins have no effect
+ // beyond those of their arguments.
+ // All other calls (including append, copy, recover)
+ // have unknown effects.
+ //
+ // As with 'pure', there is room for
+ // improvement by inspecting the callee.
+ effects = true
+ }
+
+ case *ast.UnaryExpr:
+ if n.Op == token.ARROW { // <-ch
+ effects = true
+ }
+ }
+ return true
+ })
+
+ // Even if consideration of effects is not desired,
+ // we continue to compute, log, and discard them.
+ if st.opts.IgnoreEffects && effects {
+ effects = false
+ st.opts.Logf("ignoring potential effects of argument %s",
+ debugFormatNode(st.caller.Fset, expr))
+ }
+
+ return effects
+}
+
+// pure reports whether an expression has the same result no matter
+// when it is executed relative to other expressions, so it can be
+// commuted with any other expression or statement without changing
+// its meaning.
+//
+// An expression is considered impure if it reads the contents of any
+// variable, with the exception of "single assignment" local variables
+// (as classified by the provided callback), which are never updated
+// after their initialization.
+//
+// Pure does not imply duplicable: for example, new(T) and T{} are
+// pure expressions but both return a different value each time they
+// are evaluated, so they are not safe to duplicate.
+//
+// Purity does not imply freedom from run-time panics. We assume that
+// target programs do not encounter run-time panics nor depend on them
+// for correct operation.
+//
+// TODO(adonovan): add unit tests of this function.
+func pure(info *types.Info, assign1 func(*types.Var) bool, e ast.Expr) bool {
+ var pure func(e ast.Expr) bool
+ pure = func(e ast.Expr) bool {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return pure(e.X)
+
+ case *ast.Ident:
+ if v, ok := info.Uses[e].(*types.Var); ok {
+ // In general variables are impure
+ // as they may be updated, but
+ // single-assignment local variables
+ // never change value.
+ //
+ // We assume all package-level variables
+ // may be updated, but for non-exported
+ // ones we could do better by analyzing
+ // the complete package.
+ return !isPkgLevel(v) && assign1(v)
+ }
+
+ // All other kinds of reference are pure.
+ return true
+
+ case *ast.FuncLit:
+ // A function literal may allocate a closure that
+ // references mutable variables, but mutation
+ // cannot be observed without calling the function,
+ // and calls are considered impure.
+ return true
+
+ case *ast.BasicLit:
+ return true
+
+ case *ast.UnaryExpr: // + - ! ^ & but not <-
+ return e.Op != token.ARROW && pure(e.X)
+
+ case *ast.BinaryExpr: // arithmetic, shifts, comparisons, &&/||
+ return pure(e.X) && pure(e.Y)
+
+ case *ast.CallExpr:
+ // A conversion is as pure as its operand.
+ if info.Types[e.Fun].IsType() {
+ return pure(e.Args[0])
+ }
+
+ // Calls to some built-ins are as pure as their arguments.
+ if typesinternal.CallsPureBuiltin(info, e) {
+ for _, arg := range e.Args {
+ if !pure(arg) {
+ return false
+ }
+ }
+ return true
+ }
+
+ // All other calls are impure, so we can
+ // reject them without even looking at e.Fun.
+ //
+ // More sophisticated analysis could infer purity in
+ // commonly used functions such as strings.Contains;
+ // perhaps we could offer the client a hook so that
+ // go/analysis-based implementation could exploit the
+ // results of a purity analysis. But that would make
+ // the inliner's choices harder to explain.
+ return false
+
+ case *ast.CompositeLit:
+ // T{...} is as pure as its elements.
+ for _, elt := range e.Elts {
+ if kv, ok := elt.(*ast.KeyValueExpr); ok {
+ if !pure(kv.Value) {
+ return false
+ }
+ if id, ok := kv.Key.(*ast.Ident); ok {
+ if v, ok := info.Uses[id].(*types.Var); ok && v.IsField() {
+ continue // struct {field: value}
+ }
+ }
+ // map/slice/array {key: value}
+ if !pure(kv.Key) {
+ return false
+ }
+
+ } else if !pure(elt) {
+ return false
+ }
+ }
+ return true
+
+ case *ast.SelectorExpr:
+ if seln, ok := info.Selections[e]; ok {
+ // See types.SelectionKind for background.
+ switch seln.Kind() {
+ case types.MethodExpr:
+ // A method expression T.f acts like a
+ // reference to a func decl, so it is pure.
+ return true
+
+ case types.MethodVal, types.FieldVal:
+ // A field or method selection x.f is pure
+ // if x is pure and the selection does
+ // not indirect a pointer.
+ return !indirectSelection(seln) && pure(e.X)
+
+ default:
+ panic(seln)
+ }
+ } else {
+ // A qualified identifier is
+ // treated like an unqualified one.
+ return pure(e.Sel)
+ }
+
+ case *ast.StarExpr:
+ return false // *ptr depends on the state of the heap
+
+ default:
+ return false
+ }
+ }
+ return pure(e)
+}
+
+// duplicable reports whether it is appropriate for the expression to
+// be freely duplicated.
+//
+// Given the declaration
+//
+// func f(x T) T { return x + g() + x }
+//
+// an argument y is considered duplicable if we would wish to see a
+// call f(y) simplified to y+g()+y. This is true for identifiers,
+// integer literals, unary negation, and selectors x.f where x is not
+// a pointer. But we would not wish to duplicate expressions that:
+// - have side effects (e.g. nearly all calls),
+// - are not referentially transparent (e.g. &T{}, ptr.field, *ptr), or
+// - are long (e.g. "huge string literal").
+func duplicable(info *types.Info, e ast.Expr) bool {
+ switch e := e.(type) {
+ case *ast.ParenExpr:
+ return duplicable(info, e.X)
+
+ case *ast.Ident:
+ return true
+
+ case *ast.BasicLit:
+ v := info.Types[e].Value
+ switch e.Kind {
+ case token.INT:
+ return true // any int
+ case token.STRING:
+ return consteq(v, kZeroString) // only ""
+ case token.FLOAT:
+ return consteq(v, kZeroFloat) || consteq(v, kOneFloat) // only 0.0 or 1.0
+ }
+
+ case *ast.UnaryExpr: // e.g. +1, -1
+ return (e.Op == token.ADD || e.Op == token.SUB) && duplicable(info, e.X)
+
+ case *ast.CompositeLit:
+ // Empty struct or array literals T{} are duplicable.
+ // (Non-empty literals are too verbose, and slice/map
+ // literals allocate indirect variables.)
+ if len(e.Elts) == 0 {
+ switch info.TypeOf(e).Underlying().(type) {
+ case *types.Struct, *types.Array:
+ return true
+ }
+ }
+ return false
+
+ case *ast.CallExpr:
+ // Treat type conversions as duplicable if they do not observably allocate.
+ // The only cases of observable allocations are
+ // the `[]byte(string)` and `[]rune(string)` conversions.
+ //
+ // Duplicating string([]byte) conversions increases
+ // allocation but doesn't change behavior, but the
+ // reverse, []byte(string), allocates a distinct array,
+ // which is observable.
+
+ if !info.Types[e.Fun].IsType() { // check whether e.Fun is a type conversion
+ return false
+ }
+
+ fun := info.TypeOf(e.Fun)
+ arg := info.TypeOf(e.Args[0])
+
+ switch fun := fun.Underlying().(type) {
+ case *types.Slice:
+ // Do not mark []byte(string) and []rune(string) as duplicable.
+ elem, ok := fun.Elem().Underlying().(*types.Basic)
+ if ok && (elem.Kind() == types.Rune || elem.Kind() == types.Byte) {
+ from, ok := arg.Underlying().(*types.Basic)
+ isString := ok && from.Info()&types.IsString != 0
+ return !isString
+ }
+ case *types.TypeParam:
+ return false // be conservative
+ }
+ return true
+
+ case *ast.SelectorExpr:
+ if seln, ok := info.Selections[e]; ok {
+ // A field or method selection x.f is referentially
+ // transparent if it does not indirect a pointer.
+ return !indirectSelection(seln)
+ }
+ // A qualified identifier pkg.Name is referentially transparent.
+ return true
+ }
+ return false
+}
+
+func consteq(x, y constant.Value) bool {
+ return constant.Compare(x, token.EQL, y)
+}
+
+var (
+ kZeroInt = constant.MakeInt64(0)
+ kZeroString = constant.MakeString("")
+ kZeroFloat = constant.MakeFloat64(0.0)
+ kOneFloat = constant.MakeFloat64(1.0)
+)
+
+// -- inline helpers --
+
+func assert(cond bool, msg string) {
+ if !cond {
+ panic(msg)
+ }
+}
+
+// blanks returns a slice of n > 0 blank identifiers.
+func blanks[E ast.Expr](n int) []E {
+ if n == 0 {
+ panic("blanks(0)")
+ }
+ res := make([]E, n)
+ for i := range res {
+ res[i] = ast.Expr(makeIdent("_")).(E) // ugh
+ }
+ return res
+}
+
+func makeIdent(name string) *ast.Ident {
+ return &ast.Ident{Name: name}
+}
+
+// importedPkgName returns the PkgName object declared by an ImportSpec.
+// TODO(adonovan): make this a method of types.Info (#62037).
+func importedPkgName(info *types.Info, imp *ast.ImportSpec) (*types.PkgName, bool) {
+ var obj types.Object
+ if imp.Name != nil {
+ obj = info.Defs[imp.Name]
+ } else {
+ obj = info.Implicits[imp]
+ }
+ pkgname, ok := obj.(*types.PkgName)
+ return pkgname, ok
+}
+
+func isPkgLevel(obj types.Object) bool {
+ // TODO(adonovan): consider using the simpler obj.Parent() ==
+ // obj.Pkg().Scope() instead. But be sure to test carefully
+ // with instantiations of generics.
+ return obj.Pkg().Scope().Lookup(obj.Name()) == obj
+}
+
+// callContext returns the two nodes immediately enclosing the call
+// (specified as a PathEnclosingInterval), ignoring parens.
+func callContext(callPath []ast.Node) (parent, grandparent ast.Node) {
+ _ = callPath[0].(*ast.CallExpr) // sanity check
+ for _, n := range callPath[1:] {
+ if !is[*ast.ParenExpr](n) {
+ if parent == nil {
+ parent = n
+ } else {
+ return parent, n
+ }
+ }
+ }
+ return parent, nil
+}
+
+// hasLabelConflict reports whether the set of labels of the function
+// enclosing the call (specified as a PathEnclosingInterval)
+// intersects with the set of callee labels.
+func hasLabelConflict(callPath []ast.Node, calleeLabels []string) bool {
+ labels := callerLabels(callPath)
+ for _, label := range calleeLabels {
+ if labels[label] {
+ return true // conflict
+ }
+ }
+ return false
+}
+
+// callerLabels returns the set of control labels in the function (if
+// any) enclosing the call (specified as a PathEnclosingInterval).
+func callerLabels(callPath []ast.Node) map[string]bool {
+ var callerBody *ast.BlockStmt
+ switch f := callerFunc(callPath).(type) {
+ case *ast.FuncDecl:
+ callerBody = f.Body
+ case *ast.FuncLit:
+ callerBody = f.Body
+ }
+ var labels map[string]bool
+ if callerBody != nil {
+ ast.Inspect(callerBody, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.FuncLit:
+ return false // prune traversal
+ case *ast.LabeledStmt:
+ if labels == nil {
+ labels = make(map[string]bool)
+ }
+ labels[n.Label.Name] = true
+ }
+ return true
+ })
+ }
+ return labels
+}
+
+// callerFunc returns the innermost Func{Decl,Lit} node enclosing the
+// call (specified as a PathEnclosingInterval).
+func callerFunc(callPath []ast.Node) ast.Node {
+ _ = callPath[0].(*ast.CallExpr) // sanity check
+ for _, n := range callPath[1:] {
+ if is[*ast.FuncDecl](n) || is[*ast.FuncLit](n) {
+ return n
+ }
+ }
+ return nil
+}
+
+// callStmt reports whether the function call (specified
+// as a PathEnclosingInterval) appears within an ExprStmt,
+// and returns it if so.
+//
+// If unrestricted, callStmt returns nil if the ExprStmt f() appears
+// in a restricted context (such as "if f(); cond {") where it cannot
+// be replaced by an arbitrary statement. (See "statement theory".)
+func callStmt(callPath []ast.Node, unrestricted bool) *ast.ExprStmt {
+ parent, _ := callContext(callPath)
+ stmt, ok := parent.(*ast.ExprStmt)
+ if ok && unrestricted {
+ switch callPath[slices.Index(callPath, ast.Node(stmt))+1].(type) {
+ case *ast.LabeledStmt,
+ *ast.BlockStmt,
+ *ast.CaseClause,
+ *ast.CommClause:
+ // unrestricted
+ default:
+ // TODO(adonovan): handle restricted
+ // XYZStmt.Init contexts (but not ForStmt.Post)
+ // by creating a block around the if/for/switch:
+ // "if f(); cond {" -> "{ stmts; if cond {"
+
+ return nil // restricted
+ }
+ }
+ return stmt
+}
+
+// Statement theory
+//
+// These are all the places a statement may appear in the AST:
+//
+// LabeledStmt.Stmt Stmt -- any
+// BlockStmt.List []Stmt -- any (but see switch/select)
+// IfStmt.Init Stmt? -- simple
+// IfStmt.Body BlockStmt
+// IfStmt.Else Stmt? -- IfStmt or BlockStmt
+// CaseClause.Body []Stmt -- any
+// SwitchStmt.Init Stmt? -- simple
+// SwitchStmt.Body BlockStmt -- CaseClauses only
+// TypeSwitchStmt.Init Stmt? -- simple
+// TypeSwitchStmt.Assign Stmt -- AssignStmt(TypeAssertExpr) or ExprStmt(TypeAssertExpr)
+// TypeSwitchStmt.Body BlockStmt -- CaseClauses only
+// CommClause.Comm Stmt? -- SendStmt or ExprStmt(UnaryExpr) or AssignStmt(UnaryExpr)
+// CommClause.Body []Stmt -- any
+// SelectStmt.Body BlockStmt -- CommClauses only
+// ForStmt.Init Stmt? -- simple
+// ForStmt.Post Stmt? -- simple
+// ForStmt.Body BlockStmt
+// RangeStmt.Body BlockStmt
+//
+// simple = AssignStmt | SendStmt | IncDecStmt | ExprStmt.
+//
+// A BlockStmt cannot replace an ExprStmt in
+// {If,Switch,TypeSwitch}Stmt.Init or ForStmt.Post.
+// That is allowed only within:
+// LabeledStmt.Stmt Stmt
+// BlockStmt.List []Stmt
+// CaseClause.Body []Stmt
+// CommClause.Body []Stmt
+
+// replaceNode performs a destructive update of the tree rooted at
+// root, replacing each occurrence of "from" with "to". If to is nil and
+// the element is within a slice, the slice element is removed.
+//
+// The root itself cannot be replaced; an attempt will panic.
+//
+// This function must not be called on the caller's syntax tree.
+//
+// TODO(adonovan): polish this up and move it to astutil package.
+// TODO(adonovan): needs a unit test.
+func replaceNode(root ast.Node, from, to ast.Node) {
+ if from == nil {
+ panic("from == nil")
+ }
+ if reflect.ValueOf(from).IsNil() {
+ panic(fmt.Sprintf("from == (%T)(nil)", from))
+ }
+ if from == root {
+ panic("from == root")
+ }
+ found := false
+ var parent reflect.Value // parent variable of interface type, containing a pointer
+ var visit func(reflect.Value)
+ visit = func(v reflect.Value) {
+ switch v.Kind() {
+ case reflect.Pointer:
+ if v.Interface() == from {
+ found = true
+
+ // If v is a struct field or array element
+ // (e.g. Field.Comment or Field.Names[i])
+ // then it is addressable (a pointer variable).
+ //
+ // But if it was the value an interface
+ // (e.g. *ast.Ident within ast.Node)
+ // then it is non-addressable, and we need
+ // to set the enclosing interface (parent).
+ if !v.CanAddr() {
+ v = parent
+ }
+
+ // to=nil => use zero value
+ var toV reflect.Value
+ if to != nil {
+ toV = reflect.ValueOf(to)
+ } else {
+ toV = reflect.Zero(v.Type()) // e.g. ast.Expr(nil)
+ }
+ v.Set(toV)
+
+ } else if !v.IsNil() {
+ switch v.Interface().(type) {
+ case *ast.Object, *ast.Scope:
+ // Skip fields of types potentially involved in cycles.
+ default:
+ visit(v.Elem())
+ }
+ }
+
+ case reflect.Struct:
+ for i := range v.Type().NumField() {
+ visit(v.Field(i))
+ }
+
+ case reflect.Slice:
+ compact := false
+ for i := range v.Len() {
+ visit(v.Index(i))
+ if v.Index(i).IsNil() {
+ compact = true
+ }
+ }
+ if compact {
+ // Elements were deleted. Eliminate nils.
+ // (Do this is a second pass to avoid
+ // unnecessary writes in the common case.)
+ j := 0
+ for i := range v.Len() {
+ if !v.Index(i).IsNil() {
+ v.Index(j).Set(v.Index(i))
+ j++
+ }
+ }
+ v.SetLen(j)
+ }
+ case reflect.Interface:
+ parent = v
+ visit(v.Elem())
+
+ case reflect.Array, reflect.Chan, reflect.Func, reflect.Map, reflect.UnsafePointer:
+ panic(v) // unreachable in AST
+ default:
+ // bool, string, number: nop
+ }
+ parent = reflect.Value{}
+ }
+ visit(reflect.ValueOf(root))
+ if !found {
+ panic(fmt.Sprintf("%T not found", from))
+ }
+}
+
+// cleanNode returns a clone of node with positions cleared.
+//
+// It should be used for any callee nodes that are formatted using the caller
+// file set.
+func cleanNode[T ast.Node](node T) T {
+ clone := internalastutil.CloneNode(node)
+ clearPositions(clone)
+ return clone
+}
+
+func cleanNodes[T ast.Node](nodes []T) []T {
+ var clean []T
+ for _, node := range nodes {
+ clean = append(clean, cleanNode(node))
+ }
+ return clean
+}
+
+// clearPositions destroys token.Pos information within the tree rooted at root,
+// as positions in callee trees may cause caller comments to be emitted prematurely.
+//
+// In general it isn't safe to clear a valid Pos because some of them
+// (e.g. CallExpr.Ellipsis, TypeSpec.Assign) are significant to
+// go/printer, so this function sets each non-zero Pos to 1, which
+// suffices to avoid advancing the printer's comment cursor.
+//
+// This function mutates its argument; do not invoke on caller syntax.
+//
+// TODO(adonovan): remove this horrendous workaround when #20744 is finally fixed.
+func clearPositions(root ast.Node) {
+ posType := reflect.TypeFor[token.Pos]()
+ ast.Inspect(root, func(n ast.Node) bool {
+ if n != nil {
+ v := reflect.ValueOf(n).Elem() // deref the pointer to struct
+ fields := v.Type().NumField()
+ for i := range fields {
+ f := v.Field(i)
+ // Clearing Pos arbitrarily is destructive,
+ // as its presence may be semantically significant
+ // (e.g. CallExpr.Ellipsis, TypeSpec.Assign)
+ // or affect formatting preferences (e.g. GenDecl.Lparen).
+ //
+ // Note: for proper formatting, it may be necessary to be selective
+ // about which positions we set to 1 vs which we set to token.NoPos.
+ // (e.g. we can set most to token.NoPos, save the few that are
+ // significant).
+ if f.Type() == posType {
+ if f.Interface() != token.NoPos {
+ f.Set(reflect.ValueOf(token.Pos(1)))
+ }
+ }
+ }
+ }
+ return true
+ })
+}
+
+// findIdent finds the Ident beneath root that has the given pos.
+// It returns the path to the ident (excluding the ident), and the ident
+// itself, where the path is the sequence of ast.Nodes encountered in a
+// depth-first search to find ident.
+func findIdent(root ast.Node, pos token.Pos) ([]ast.Node, *ast.Ident) {
+ // TODO(adonovan): opt: skip subtrees that don't contain pos.
+ var (
+ path []ast.Node
+ found *ast.Ident
+ )
+ ast.Inspect(root, func(n ast.Node) bool {
+ if found != nil {
+ return false
+ }
+ if n == nil {
+ path = path[:len(path)-1]
+ return false
+ }
+ if id, ok := n.(*ast.Ident); ok {
+ if id.Pos() == pos {
+ found = id
+ return true
+ }
+ }
+ path = append(path, n)
+ return true
+ })
+ if found == nil {
+ panic(fmt.Sprintf("findIdent %d not found in %s",
+ pos, debugFormatNode(token.NewFileSet(), root)))
+ }
+ return path, found
+}
+
+func prepend[T any](elem T, slice ...T) []T {
+ return append([]T{elem}, slice...)
+}
+
+// debugFormatNode formats a node or returns a formatting error.
+// Its sloppy treatment of errors is appropriate only for logging.
+func debugFormatNode(fset *token.FileSet, n ast.Node) string {
+ var out strings.Builder
+ if err := format.Node(&out, fset, n); err != nil {
+ out.WriteString(err.Error())
+ }
+ return out.String()
+}
+
+func shallowCopy[T any](ptr *T) *T {
+ copy := *ptr
+ return ©
+}
+
+// ∀
+func forall[T any](list []T, f func(i int, x T) bool) bool {
+ for i, x := range list {
+ if !f(i, x) {
+ return false
+ }
+ }
+ return true
+}
+
+// ∃
+func exists[T any](list []T, f func(i int, x T) bool) bool {
+ for i, x := range list {
+ if f(i, x) {
+ return true
+ }
+ }
+ return false
+}
+
+// last returns the last element of a slice, or zero if empty.
+func last[T any](slice []T) T {
+ n := len(slice)
+ if n > 0 {
+ return slice[n-1]
+ }
+ return *new(T)
+}
+
+// consistentOffsets reports whether the portion of caller.Content
+// that corresponds to caller.Call can be parsed as a call expression.
+// If not, the client has provided inconsistent information, possibly
+// because they forgot to ignore line directives when computing the
+// filename enclosing the call.
+// This is just a heuristic.
+func consistentOffsets(caller *Caller) bool {
+ start := offsetOf(caller.Fset, caller.Call.Pos())
+ end := offsetOf(caller.Fset, caller.Call.End())
+ if !(0 < start && start < end && end <= len(caller.Content)) {
+ return false
+ }
+ expr, err := parser.ParseExpr(string(caller.Content[start:end]))
+ if err != nil {
+ return false
+ }
+ return is[*ast.CallExpr](expr)
+}
+
+// needsParens reports whether parens are required to avoid ambiguity
+// around the new node replacing the specified old node (which is some
+// ancestor of the CallExpr identified by its PathEnclosingInterval).
+func needsParens(callPath []ast.Node, old, new ast.Node) bool {
+ // Find enclosing old node and its parent.
+ i := slices.Index(callPath, old)
+ if i == -1 {
+ panic("not found")
+ }
+
+ // There is no precedence ambiguity when replacing
+ // (e.g.) a statement enclosing the call.
+ if !is[ast.Expr](old) {
+ return false
+ }
+
+ // An expression beneath a non-expression
+ // has no precedence ambiguity.
+ parent, ok := callPath[i+1].(ast.Expr)
+ if !ok {
+ return false
+ }
+
+ precedence := func(n ast.Node) int {
+ switch n := n.(type) {
+ case *ast.UnaryExpr, *ast.StarExpr:
+ return token.UnaryPrec
+ case *ast.BinaryExpr:
+ return n.Op.Precedence()
+ }
+ return -1
+ }
+
+ // Parens are not required if the new node
+ // is not unary or binary.
+ newprec := precedence(new)
+ if newprec < 0 {
+ return false
+ }
+
+ // Parens are required if parent and child are both
+ // unary or binary and the parent has higher precedence.
+ if precedence(parent) > newprec {
+ return true
+ }
+
+ // Was the old node the operand of a postfix operator?
+ // f().sel
+ // f()[i:j]
+ // f()[i]
+ // f().(T)
+ // f()(x)
+ switch parent := parent.(type) {
+ case *ast.SelectorExpr:
+ return parent.X == old
+ case *ast.IndexExpr:
+ return parent.X == old
+ case *ast.SliceExpr:
+ return parent.X == old
+ case *ast.TypeAssertExpr:
+ return parent.X == old
+ case *ast.CallExpr:
+ return parent.Fun == old
+ }
+ return false
+}
+
+// declares returns the set of lexical names declared by a
+// sequence of statements from the same block, excluding sub-blocks.
+// (Lexical names do not include control labels.)
+func declares(stmts []ast.Stmt) map[string]bool {
+ names := make(map[string]bool)
+ for _, stmt := range stmts {
+ switch stmt := stmt.(type) {
+ case *ast.DeclStmt:
+ for _, spec := range stmt.Decl.(*ast.GenDecl).Specs {
+ switch spec := spec.(type) {
+ case *ast.ValueSpec:
+ for _, id := range spec.Names {
+ names[id.Name] = true
+ }
+ case *ast.TypeSpec:
+ names[spec.Name.Name] = true
+ }
+ }
+
+ case *ast.AssignStmt:
+ if stmt.Tok == token.DEFINE {
+ for _, lhs := range stmt.Lhs {
+ names[lhs.(*ast.Ident).Name] = true
+ }
+ }
+ }
+ }
+ delete(names, "_")
+ return names
+}
+
+// A importNameFunc is used to query local import names in the caller, in a
+// particular shadowing context.
+//
+// The shadow map contains additional names shadowed in the inlined code, at
+// the position the local import name is to be used. The shadow map only needs
+// to contain newly introduced names in the inlined code; names shadowed at the
+// caller are handled automatically.
+type importNameFunc = func(pkgPath string, shadow shadowMap) string
+
+// assignStmts rewrites a statement assigning the results of a call into zero
+// or more statements that assign its return operands, or (nil, false) if no
+// such rewrite is possible. The set of bindings created by the result of
+// assignStmts is the same as the set of bindings created by the callerStmt.
+//
+// The callee must contain exactly one return statement.
+//
+// This is (once again) a surprisingly complex task. For example, depending on
+// types and existing bindings, the assignment
+//
+// a, b := f()
+//
+// could be rewritten as:
+//
+// a, b := 1, 2
+//
+// but may need to be written as:
+//
+// a, b := int8(1), int32(2)
+//
+// In the case where the return statement within f is a spread call to another
+// function g(), we cannot explicitly convert the return values inline, and so
+// it may be necessary to split the declaration and assignment of variables
+// into separate statements:
+//
+// a, b := g()
+//
+// or
+//
+// var a int32
+// a, b = g()
+//
+// or
+//
+// var (
+// a int8
+// b int32
+// )
+// a, b = g()
+//
+// Note: assignStmts may return (nil, true) if it determines that the rewritten
+// assignment consists only of _ = nil assignments.
+func (st *state) assignStmts(callerStmt *ast.AssignStmt, returnOperands []ast.Expr, importName importNameFunc) ([]ast.Stmt, bool) {
+ logf, caller, callee := st.opts.Logf, st.caller, &st.callee.impl
+
+ assert(len(callee.Returns) == 1, "unexpected multiple returns")
+ resultInfo := callee.Returns[0]
+
+ // When constructing assign statements, we need to make sure that we don't
+ // modify types on the left-hand side, such as would happen if the type of a
+ // RHS expression does not match the corresponding LHS type at the caller
+ // (due to untyped conversion or interface widening).
+ //
+ // This turns out to be remarkably tricky to handle correctly.
+ //
+ // Substrategies below are labeled as `Substrategy <name>:`.
+
+ // Collect LHS information.
+ var (
+ lhs []ast.Expr // shallow copy of the LHS slice, for mutation
+ defs = make([]*ast.Ident, len(callerStmt.Lhs)) // indexes in lhs of defining identifiers
+ blanks = make([]bool, len(callerStmt.Lhs)) // indexes in lhs of blank identifiers
+ byType typeutil.Map // map of distinct types -> indexes, for writing specs later
+ )
+ for i, expr := range callerStmt.Lhs {
+ lhs = append(lhs, expr)
+ if name, ok := expr.(*ast.Ident); ok {
+ if name.Name == "_" {
+ blanks[i] = true
+ continue // no type
+ }
+
+ if obj, isDef := caller.Info.Defs[name]; isDef {
+ defs[i] = name
+ typ := obj.Type()
+ idxs, _ := byType.At(typ).([]int)
+ idxs = append(idxs, i)
+ byType.Set(typ, idxs)
+ }
+ }
+ }
+
+ // Collect RHS information
+ //
+ // The RHS is either a parallel assignment or spread assignment, but by
+ // looping over both callerStmt.Rhs and returnOperands we handle both.
+ var (
+ rhs []ast.Expr // new RHS of assignment, owned by the inliner
+ callIdx = -1 // index of the call among the original RHS
+ nilBlankAssigns = make(map[int]unit) // indexes in rhs of _ = nil assignments, which can be deleted
+ freeNames = make(map[string]bool) // free(ish) names among rhs expressions
+ nonTrivial = make(map[int]bool) // indexes in rhs of nontrivial result conversions
+ )
+ const includeComplitIdents = true
+
+ for i, expr := range callerStmt.Rhs {
+ if expr == caller.Call {
+ assert(callIdx == -1, "malformed (duplicative) AST")
+ callIdx = i
+ for j, returnOperand := range returnOperands {
+ freeishNames(freeNames, returnOperand, includeComplitIdents)
+ rhs = append(rhs, returnOperand)
+ if resultInfo[j]&nonTrivialResult != 0 {
+ nonTrivial[i+j] = true
+ }
+ if blanks[i+j] && resultInfo[j]&untypedNilResult != 0 {
+ nilBlankAssigns[i+j] = unit{}
+ }
+ }
+ } else {
+ // We must clone before clearing positions, since e came from the caller.
+ expr = internalastutil.CloneNode(expr)
+ clearPositions(expr)
+ freeishNames(freeNames, expr, includeComplitIdents)
+ rhs = append(rhs, expr)
+ }
+ }
+ assert(callIdx >= 0, "failed to find call in RHS")
+
+ // Substrategy "splice": Check to see if we can simply splice in the result
+ // expressions from the callee, such as simplifying
+ //
+ // x, y := f()
+ //
+ // to
+ //
+ // x, y := e1, e2
+ //
+ // where the types of x and y match the types of e1 and e2.
+ //
+ // This works as long as we don't need to write any additional type
+ // information.
+ if len(nonTrivial) == 0 { // no non-trivial conversions to worry about
+
+ logf("substrategy: splice assignment")
+ return []ast.Stmt{&ast.AssignStmt{
+ Lhs: lhs,
+ Tok: callerStmt.Tok,
+ TokPos: callerStmt.TokPos,
+ Rhs: rhs,
+ }}, true
+ }
+
+ // Inlining techniques below will need to write type information in order to
+ // preserve the correct types of LHS identifiers.
+ //
+ // typeExpr is a simple helper to write out type expressions. It currently
+ // handles (possibly qualified) type names.
+ //
+ // TODO(rfindley):
+ // 1. expand this to handle more type expressions.
+ // 2. refactor to share logic with callee rewriting.
+ universeAny := types.Universe.Lookup("any")
+ typeExpr := func(typ types.Type, shadow shadowMap) ast.Expr {
+ var (
+ typeName string
+ obj *types.TypeName // nil for basic types
+ )
+ if tname := typesinternal.TypeNameFor(typ); tname != nil {
+ obj = tname
+ typeName = tname.Name()
+ }
+
+ // Special case: check for universe "any".
+ // TODO(golang/go#66921): this may become unnecessary if any becomes a proper alias.
+ if typ == universeAny.Type() {
+ typeName = "any"
+ }
+
+ if typeName == "" {
+ return nil
+ }
+
+ if obj == nil || obj.Pkg() == nil || obj.Pkg() == caller.Types { // local type or builtin
+ if shadow[typeName] != 0 {
+ logf("cannot write shadowed type name %q", typeName)
+ return nil
+ }
+ obj, _ := caller.lookup(typeName).(*types.TypeName)
+ if obj != nil && types.Identical(obj.Type(), typ) {
+ return ast.NewIdent(typeName)
+ }
+ } else if pkgName := importName(obj.Pkg().Path(), shadow); pkgName != "" {
+ return &ast.SelectorExpr{
+ X: ast.NewIdent(pkgName),
+ Sel: ast.NewIdent(typeName),
+ }
+ }
+ return nil
+ }
+
+ // Substrategy "spread": in the case of a spread call (func f() (T1, T2) return
+ // g()), since we didn't hit the 'splice' substrategy, there must be some
+ // non-declaring expression on the LHS. Simplify this by pre-declaring
+ // variables, rewriting
+ //
+ // x, y := f()
+ //
+ // to
+ //
+ // var x int
+ // x, y = g()
+ //
+ // Which works as long as the predeclared variables do not overlap with free
+ // names on the RHS.
+ if len(rhs) != len(lhs) {
+ assert(len(rhs) == 1 && len(returnOperands) == 1, "expected spread call")
+
+ for _, id := range defs {
+ if id != nil && freeNames[id.Name] {
+ // By predeclaring variables, we're changing them to be in scope of the
+ // RHS. We can't do this if their names are free on the RHS.
+ return nil, false
+ }
+ }
+
+ // Write out the specs, being careful to avoid shadowing free names in
+ // their type expressions.
+ var (
+ specs []ast.Spec
+ specIdxs []int
+ shadow = make(shadowMap)
+ )
+ failed := false
+ byType.Iterate(func(typ types.Type, v any) {
+ if failed {
+ return
+ }
+ idxs := v.([]int)
+ specIdxs = append(specIdxs, idxs[0])
+ texpr := typeExpr(typ, shadow)
+ if texpr == nil {
+ failed = true
+ return
+ }
+ spec := &ast.ValueSpec{
+ Type: texpr,
+ }
+ for _, idx := range idxs {
+ spec.Names = append(spec.Names, ast.NewIdent(defs[idx].Name))
+ }
+ specs = append(specs, spec)
+ })
+ if failed {
+ return nil, false
+ }
+ logf("substrategy: spread assignment")
+ return []ast.Stmt{
+ &ast.DeclStmt{
+ Decl: &ast.GenDecl{
+ Tok: token.VAR,
+ Specs: specs,
+ },
+ },
+ &ast.AssignStmt{
+ Lhs: callerStmt.Lhs,
+ Tok: token.ASSIGN,
+ Rhs: returnOperands,
+ },
+ }, true
+ }
+
+ assert(len(lhs) == len(rhs), "mismatching LHS and RHS")
+
+ // Substrategy "convert": write out RHS expressions with explicit type conversions
+ // as necessary, rewriting
+ //
+ // x, y := f()
+ //
+ // to
+ //
+ // x, y := 1, int32(2)
+ //
+ // As required to preserve types.
+ //
+ // In the special case of _ = nil, which is disallowed by the type checker
+ // (since nil has no default type), we delete the assignment.
+ var origIdxs []int // maps back to original indexes after lhs and rhs are pruned
+ i := 0
+ for j := range lhs {
+ if _, ok := nilBlankAssigns[j]; !ok {
+ lhs[i] = lhs[j]
+ rhs[i] = rhs[j]
+ origIdxs = append(origIdxs, j)
+ i++
+ }
+ }
+ lhs = lhs[:i]
+ rhs = rhs[:i]
+
+ if len(lhs) == 0 {
+ logf("trivial assignment after pruning nil blanks assigns")
+ // After pruning, we have no remaining assignments.
+ // Signal this by returning a non-nil slice of statements.
+ return nil, true
+ }
+
+ // Write out explicit conversions as necessary.
+ //
+ // A conversion is necessary if the LHS is being defined, and the RHS return
+ // involved a nontrivial implicit conversion.
+ for i, expr := range rhs {
+ idx := origIdxs[i]
+ if nonTrivial[idx] && defs[idx] != nil {
+ typ := caller.Info.TypeOf(lhs[i])
+ texpr := typeExpr(typ, nil)
+ if texpr == nil {
+ return nil, false
+ }
+ if _, ok := texpr.(*ast.StarExpr); ok {
+ // TODO(rfindley): is this necessary? Doesn't the formatter add these parens?
+ texpr = &ast.ParenExpr{X: texpr} // *T -> (*T) so that (*T)(x) is valid
+ }
+ rhs[i] = &ast.CallExpr{
+ Fun: texpr,
+ Args: []ast.Expr{expr},
+ }
+ }
+ }
+ logf("substrategy: convert assignment")
+ return []ast.Stmt{&ast.AssignStmt{
+ Lhs: lhs,
+ Tok: callerStmt.Tok,
+ Rhs: rhs,
+ }}, true
+}
+
+// tailCallSafeReturn reports whether the callee's return statements may be safely
+// used to return from the function enclosing the caller (which must exist).
+func tailCallSafeReturn(caller *Caller, calleeSymbol *types.Func, callee *gobCallee) bool {
+ // It is safe if all callee returns involve only trivial conversions.
+ if !hasNonTrivialReturn(callee.Returns) {
+ return true
+ }
+
+ var callerType types.Type
+ // Find type of innermost function enclosing call.
+ // (Beware: Caller.enclosingFunc is the outermost.)
+loop:
+ for _, n := range caller.path {
+ switch f := n.(type) {
+ case *ast.FuncDecl:
+ callerType = caller.Info.ObjectOf(f.Name).Type()
+ break loop
+ case *ast.FuncLit:
+ callerType = caller.Info.TypeOf(f)
+ break loop
+ }
+ }
+
+ // Non-trivial return conversions in the callee are permitted
+ // if the same non-trivial conversion would occur after inlining,
+ // i.e. if the caller and callee results tuples are identical.
+ callerResults := callerType.(*types.Signature).Results()
+ calleeResults := calleeSymbol.Type().(*types.Signature).Results()
+ return types.Identical(callerResults, calleeResults)
+}
+
+// hasNonTrivialReturn reports whether any of the returns involve a nontrivial
+// implicit conversion of a result expression.
+func hasNonTrivialReturn(returnInfo [][]returnOperandFlags) bool {
+ for _, resultInfo := range returnInfo {
+ for _, r := range resultInfo {
+ if r&nonTrivialResult != 0 {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// soleUse returns the ident that refers to obj, if there is exactly one.
+func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) {
+ // This is not efficient, but it is called infrequently.
+ for id, obj2 := range info.Uses {
+ if obj2 == obj {
+ if sole != nil {
+ return nil // not unique
+ }
+ sole = id
+ }
+ }
+ return sole
+}
+
+type unit struct{} // for representing sets as maps
--- /dev/null
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package inline
+
+// This file defines various common helpers.
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "reflect"
+ "strings"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+func is[T any](x any) bool {
+ _, ok := x.(T)
+ return ok
+}
+
+func btoi(b bool) int {
+ if b {
+ return 1
+ } else {
+ return 0
+ }
+}
+
+func offsetOf(fset *token.FileSet, pos token.Pos) int {
+ return fset.PositionFor(pos, false).Offset
+}
+
+// objectKind returns an object's kind (e.g. var, func, const, typename).
+func objectKind(obj types.Object) string {
+ return strings.TrimPrefix(strings.ToLower(reflect.TypeOf(obj).String()), "*types.")
+}
+
+// within reports whether pos is within the half-open interval [n.Pos, n.End).
+func within(pos token.Pos, n ast.Node) bool {
+ return n.Pos() <= pos && pos < n.End()
+}
+
+// trivialConversion reports whether it is safe to omit the implicit
+// value-to-variable conversion that occurs in argument passing or
+// result return. The only case currently allowed is converting from
+// untyped constant to its default type (e.g. 0 to int).
+//
+// The reason for this check is that converting from A to B to C may
+// yield a different result than converting A directly to C: consider
+// 0 to int32 to any.
+//
+// trivialConversion under-approximates trivial conversions, as unfortunately
+// go/types does not record the type of an expression *before* it is implicitly
+// converted, and therefore it cannot distinguish typed constant
+// expressions from untyped constant expressions. For example, in the
+// expression `c + 2`, where c is a uint32 constant, trivialConversion does not
+// detect that the default type of this expression is actually uint32, not untyped
+// int.
+//
+// We could, of course, do better here by reverse engineering some of go/types'
+// constant handling. That may or may not be worthwhile.
+//
+// Example: in func f() int32 { return 0 },
+// the type recorded for 0 is int32, not untyped int;
+// although it is Identical to the result var,
+// the conversion is non-trivial.
+func trivialConversion(fromValue constant.Value, from, to types.Type) bool {
+ if fromValue != nil {
+ var defaultType types.Type
+ switch fromValue.Kind() {
+ case constant.Bool:
+ defaultType = types.Typ[types.Bool]
+ case constant.String:
+ defaultType = types.Typ[types.String]
+ case constant.Int:
+ defaultType = types.Typ[types.Int]
+ case constant.Float:
+ defaultType = types.Typ[types.Float64]
+ case constant.Complex:
+ defaultType = types.Typ[types.Complex128]
+ default:
+ return false
+ }
+ return types.Identical(defaultType, to)
+ }
+ return types.Identical(from, to)
+}
+
+func checkInfoFields(info *types.Info) {
+ assert(info.Defs != nil, "types.Info.Defs is nil")
+ assert(info.Implicits != nil, "types.Info.Implicits is nil")
+ assert(info.Scopes != nil, "types.Info.Scopes is nil")
+ assert(info.Selections != nil, "types.Info.Selections is nil")
+ assert(info.Types != nil, "types.Info.Types is nil")
+ assert(info.Uses != nil, "types.Info.Uses is nil")
+}
+
+// intersects reports whether the maps' key sets intersect.
+func intersects[K comparable, T1, T2 any](x map[K]T1, y map[K]T2) bool {
+ if len(x) > len(y) {
+ return intersects(y, x)
+ }
+ for k := range x {
+ if _, ok := y[k]; ok {
+ return true
+ }
+ }
+ return false
+}
+
+// convert returns syntax for the conversion T(x).
+func convert(T, x ast.Expr) *ast.CallExpr {
+ // The formatter generally adds parens as needed,
+ // but before go1.22 it had a bug (#63362) for
+ // channel types that requires this workaround.
+ if ch, ok := T.(*ast.ChanType); ok && ch.Dir == ast.RECV {
+ T = &ast.ParenExpr{X: T}
+ }
+ return &ast.CallExpr{
+ Fun: T,
+ Args: []ast.Expr{x},
+ }
+}
+
+// isPointer reports whether t's core type is a pointer.
+func isPointer(t types.Type) bool {
+ return is[*types.Pointer](typeparams.CoreType(t))
+}
+
+// indirectSelection is like seln.Indirect() without bug #8353.
+func indirectSelection(seln *types.Selection) bool {
+ // Work around bug #8353 in Selection.Indirect when Kind=MethodVal.
+ if seln.Kind() == types.MethodVal {
+ tArg, indirect := effectiveReceiver(seln)
+ if indirect {
+ return true
+ }
+
+ tParam := seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()
+ return isPointer(tArg) && !isPointer(tParam) // implicit *
+ }
+
+ return seln.Indirect()
+}
+
+// effectiveReceiver returns the effective type of the method
+// receiver after all implicit field selections (but not implicit * or
+// & operations) have been applied.
+//
+// The boolean indicates whether any implicit field selection was indirect.
+func effectiveReceiver(seln *types.Selection) (types.Type, bool) {
+ assert(seln.Kind() == types.MethodVal, "not MethodVal")
+ t := seln.Recv()
+ indices := seln.Index()
+ indirect := false
+ for _, index := range indices[:len(indices)-1] {
+ if isPointer(t) {
+ indirect = true
+ t = typeparams.MustDeref(t)
+ }
+ t = typeparams.CoreType(t).(*types.Struct).Field(index).Type()
+ }
+ return t, indirect
+}
golang.org/x/tools/go/analysis/passes/hostport
golang.org/x/tools/go/analysis/passes/httpresponse
golang.org/x/tools/go/analysis/passes/ifaceassert
+golang.org/x/tools/go/analysis/passes/inline
golang.org/x/tools/go/analysis/passes/inspect
+golang.org/x/tools/go/analysis/passes/internal/gofixdirective
golang.org/x/tools/go/analysis/passes/loopclosure
golang.org/x/tools/go/analysis/passes/lostcancel
+golang.org/x/tools/go/analysis/passes/modernize
golang.org/x/tools/go/analysis/passes/nilfunc
golang.org/x/tools/go/analysis/passes/printf
golang.org/x/tools/go/analysis/passes/shift
golang.org/x/tools/go/analysis/passes/unusedresult
golang.org/x/tools/go/analysis/passes/waitgroup
golang.org/x/tools/go/analysis/unitchecker
+golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/ast/edge
golang.org/x/tools/go/ast/inspector
golang.org/x/tools/go/cfg
golang.org/x/tools/go/types/typeutil
golang.org/x/tools/internal/aliases
golang.org/x/tools/internal/analysisinternal
+golang.org/x/tools/internal/analysisinternal/generated
golang.org/x/tools/internal/analysisinternal/typeindex
golang.org/x/tools/internal/astutil
golang.org/x/tools/internal/bisect
golang.org/x/tools/internal/diff/lcs
golang.org/x/tools/internal/facts
golang.org/x/tools/internal/fmtstr
+golang.org/x/tools/internal/goplsexport
golang.org/x/tools/internal/moreiters
golang.org/x/tools/internal/packagepath
golang.org/x/tools/internal/refactor
+golang.org/x/tools/internal/refactor/inline
golang.org/x/tools/internal/stdlib
golang.org/x/tools/internal/typeparams
golang.org/x/tools/internal/typesinternal
package main
+// TODO(adonovan): replace this test by a script test
+// in cmd/go/testdata/script/vet_suite.txt like we do
+// for 'go fix'.
+
import (
"bytes"
"errors"