]> Cypherpunks repositories - gostls13.git/commitdiff
vendor/golang.org/x/tools: update to 1ad6f3d
authorAlan Donovan <adonovan@google.com>
Mon, 1 Dec 2025 15:26:29 +0000 (10:26 -0500)
committerAlan Donovan <adonovan@google.com>
Mon, 1 Dec 2025 16:00:02 +0000 (08:00 -0800)
cmd$ go get golang.org/x/tools@1ad6f3d
cmd$ GOWORK=off go mod tidy
cmd$ GOWORK=off go mod vendor

This merge pulls in the following commits, which include several fixes
needed for go1.26, marked by an asterisk. None of the unmarked commits
affects vendored packages, so it is safe (and simpler) to merge rather
than cherrypick via a release branch.

tools$ git log --oneline 68724afed209...1ad6f3d02713
*4a3f2f81eb go/analysis/passes/printf: panic when function literal is assigned to the blank identifier
*d5d7d21fe7 gopls/internal/cache: fix %q verb use with wrong type
*92a094998a go/analysis/passes/modernize: rangeint: handle usages of loop label
*ffbdcac342 go/analysis/passes/modernize: stditerators: add reflect iters
*2e3e83a050 internal/refactor/inline: preserve local package name used by callee
 d32ec34454 gopls/internal/protocol/generate: move injections to tables.go
 98d172d8bd gopls/internal/protocol: add form field in type CodeAction
 e1317381e4 go/packages: suppress test on (e.g.) wasm
*e31ed53b51 internal/stdlib: regenerate
*6f1f89817d internal/analysis/driverutil: include end positions in -json output
 7839abf5e8 gopls/internal/metadata: document when Module can be nil
 98aa9a7d0b gopls/internal/cache: make unimported completions deterministic
 4c5faddb0f internal/modindex: unescape import paths
 c2c902c441 gopls/completion: avoid nil dereference
*4bf3169c8a go/analysis/passes/modernize: waitgroup: highlight "go func" part
 ba5189b063 gopls/internal/template: fix printf mistake in test
*a7d12506a0 go/analysis/passes/printf: clarify checkForward
 c7a1a29f93 internal/pkgbits: fix printf mistake in test
 af205c0a29 gopls/doc/release/v0.21.0.md: tweaks

Change-Id: I23c991987afeb2db3e0f98f76f8ee5000c8a6e02
Reviewed-on: https://go-review.googlesource.com/c/go/+/725460
Auto-Submit: Alan Donovan <adonovan@google.com>
TryBot-Bypass: Alan Donovan <adonovan@google.com>
Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org>
Reviewed-by: Dmitri Shuralyov <dmitshur@google.com>
Commit-Queue: Alan Donovan <adonovan@google.com>

19 files changed:
src/cmd/go.mod
src/cmd/go.sum
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inline/inline.go
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/fix.go
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/print.go
src/cmd/vendor/golang.org/x/tools/internal/refactor/delete.go
src/cmd/vendor/golang.org/x/tools/internal/refactor/edit.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/internal/refactor/imports.go
src/cmd/vendor/golang.org/x/tools/internal/refactor/inline/inline.go
src/cmd/vendor/golang.org/x/tools/internal/refactor/refactor.go
src/cmd/vendor/golang.org/x/tools/internal/stdlib/deps.go
src/cmd/vendor/golang.org/x/tools/internal/stdlib/manifest.go
src/cmd/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
src/cmd/vendor/modules.txt

index 3915c16da33a5528994c322fb6f09a1af6ac5e70..a23387699df0d120c8557724afcafbbeec100d56 100644 (file)
@@ -11,7 +11,7 @@ require (
        golang.org/x/sys v0.38.0
        golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54
        golang.org/x/term v0.34.0
-       golang.org/x/tools v0.39.1-0.20251120214200-68724afed209
+       golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713
 )
 
 require (
index 100ea28a7febe975372c06c84ec7a78c90b93269..5a49e61a4a35a2e877b7be27076cfb4d1b12ab30 100644 (file)
@@ -22,7 +22,7 @@ golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
 golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
 golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
 golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
-golang.org/x/tools v0.39.1-0.20251120214200-68724afed209 h1:BGuEUnbWU1H+VhF4Z52lwCvzRT8Q/Z7kJC3okSME58w=
-golang.org/x/tools v0.39.1-0.20251120214200-68724afed209/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
+golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713 h1:i4GzAuZW4RuKXltwKyLYAfk7E1TSKQBxRAI7XKfLjSk=
+golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
 rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef h1:mqLYrXCXYEZOop9/Dbo6RPX11539nwiCNBb1icVPmw8=
 rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ=
index c0b75202589c9f5afecd32f59ab397f3b37ac825..9049145e2252472ecbe2d9cd008321f38aa9e893 100644 (file)
@@ -7,7 +7,6 @@ package inline
 import (
        "fmt"
        "go/ast"
-       "go/token"
        "go/types"
        "slices"
        "strings"
@@ -23,7 +22,6 @@ import (
        "golang.org/x/tools/internal/analysis/analyzerutil"
        typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
        "golang.org/x/tools/internal/astutil"
-       "golang.org/x/tools/internal/diff"
        "golang.org/x/tools/internal/moreiters"
        "golang.org/x/tools/internal/packagepath"
        "golang.org/x/tools/internal/refactor"
@@ -204,19 +202,12 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
                var edits []analysis.TextEdit
                if !lazyEdits {
                        // Inline the call.
-                       content, err := a.readFile(call)
-                       if err != nil {
-                               a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err)
-                               return
-                       }
-                       curFile := astutil.EnclosingFile(cur)
                        caller := &inline.Caller{
-                               Fset:    a.pass.Fset,
-                               Types:   a.pass.Pkg,
-                               Info:    a.pass.TypesInfo,
-                               File:    curFile,
-                               Call:    call,
-                               Content: content,
+                               Fset:  a.pass.Fset,
+                               Types: a.pass.Pkg,
+                               Info:  a.pass.TypesInfo,
+                               File:  astutil.EnclosingFile(cur),
+                               Call:  call,
                                CountUses: func(pkgname *types.PkgName) int {
                                        return moreiters.Len(a.index.Uses(pkgname))
                                },
@@ -245,15 +236,7 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
                                // The flag allows them to decline such fixes.
                                return
                        }
-                       got := res.Content
-
-                       for _, edit := range diff.Bytes(content, got) {
-                               edits = append(edits, analysis.TextEdit{
-                                       Pos:     curFile.FileStart + token.Pos(edit.Start),
-                                       End:     curFile.FileStart + token.Pos(edit.End),
-                                       NewText: []byte(edit.New),
-                               })
-                       }
+                       edits = res.Edits
                }
 
                a.pass.Report(analysis.Diagnostic{
index 67f60acaaf388537f520ea1556bd906afa8ac405..ba54daebbfc65fb5df2a1a49df8673980013b04d 100644 (file)
@@ -35,7 +35,7 @@ var ForVarAnalyzer = &analysis.Analyzer{
 // where the two idents are the same,
 // and the ident is defined (:=) as a variable in the for statement.
 // (Note that this 'fix' does not work for three clause loops
-// because the Go specfilesUsingGoVersionsays "The variable used by each subsequent iteration
+// because the Go spec says "The variable used by each subsequent iteration
 // is declared implicitly before executing the post statement and initialized to the
 // value of the previous iteration's variable at that moment.")
 //
index 6b1edf38b37b2873be571b4ab337c9c33228e011..c42ec58ec3a7a7c19bfc3a758125709de7d48c78 100644 (file)
@@ -161,7 +161,22 @@ func rangeint(pass *analysis.Pass) (any, error) {
                                                // don't offer a fix, as a range loop
                                                // leaves i with a different final value (limit-1).
                                                if init.Tok == token.ASSIGN {
-                                                       for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) {
+                                                       // Find the nearest ancestor that is not a label.
+                                                       // Otherwise, checking for i usage outside of a for
+                                                       // loop might not function properly further below.
+                                                       // This is because the i usage might be a child of
+                                                       // the loop's parent's parent, for example:
+                                                       //     var i int
+                                                       // Loop:
+                                                       //     for i = 0; i < 10; i++ { break loop }
+                                                       //     // i is in the sibling of the label, not the loop
+                                                       //     fmt.Println(i)
+                                                       //
+                                                       ancestor := curLoop.Parent()
+                                                       for is[*ast.LabeledStmt](ancestor.Node()) {
+                                                               ancestor = ancestor.Parent()
+                                                       }
+                                                       for curId := range ancestor.Preorder((*ast.Ident)(nil)) {
                                                                id := curId.Node().(*ast.Ident)
                                                                if info.Uses[id] == v {
                                                                        // Is i used after loop?
index cc59580671466c2bc12951b8ad7652eb48a4e819..f7318b123daba1c16c10e32897995c50eb8a7613 100644 (file)
@@ -43,23 +43,29 @@ func init() {
 // iter.Seq.
 var stditeratorsTable = [...]struct {
        pkgpath, typename, lenmethod, atmethod, itermethod, elemname string
+
+       seqn int // 1 or 2 => "for x" or "for _, x"
 }{
        // Example: in go/types, (*Tuple).Variables returns an
        // iterator that replaces a loop over (*Tuple).{Len,At}.
        // The loop variable is named "v".
-       {"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp"},
-       {"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method"},
-       {"go/types", "Interface", "NumMethods", "Method", "Methods", "method"},
-       {"go/types", "MethodSet", "Len", "At", "Methods", "method"},
-       {"go/types", "Named", "NumMethods", "Method", "Methods", "method"},
-       {"go/types", "Scope", "NumChildren", "Child", "Children", "child"},
-       {"go/types", "Struct", "NumFields", "Field", "Fields", "field"},
-       {"go/types", "Tuple", "Len", "At", "Variables", "v"},
-       {"go/types", "TypeList", "Len", "At", "Types", "t"},
-       {"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam"},
-       {"go/types", "Union", "Len", "Term", "Terms", "term"},
-       // TODO(adonovan): support Seq2. Bonus: transform uses of both key and value.
-       // {"reflect", "Value", "NumFields", "Field", "Fields", "field"},
+       {"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp", 1},
+       {"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method", 1},
+       {"go/types", "Interface", "NumMethods", "Method", "Methods", "method", 1},
+       {"go/types", "MethodSet", "Len", "At", "Methods", "method", 1},
+       {"go/types", "Named", "NumMethods", "Method", "Methods", "method", 1},
+       {"go/types", "Scope", "NumChildren", "Child", "Children", "child", 1},
+       {"go/types", "Struct", "NumFields", "Field", "Fields", "field", 1},
+       {"go/types", "Tuple", "Len", "At", "Variables", "v", 1},
+       {"go/types", "TypeList", "Len", "At", "Types", "t", 1},
+       {"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam", 1},
+       {"go/types", "Union", "Len", "Term", "Terms", "term", 1},
+       {"reflect", "Type", "NumField", "Field", "Fields", "field", 1},
+       {"reflect", "Type", "NumMethod", "Method", "Methods", "method", 1},
+       {"reflect", "Type", "NumIn", "In", "Ins", "in", 1},
+       {"reflect", "Type", "NumOut", "Out", "Outs", "out", 1},
+       {"reflect", "Value", "NumField", "Field", "Fields", "field", 2},
+       {"reflect", "Value", "NumMethod", "Method", "Methods", "method", 2},
 }
 
 // stditerators suggests fixes to replace loops using Len/At-style
@@ -86,6 +92,19 @@ var stditeratorsTable = [...]struct {
 // the user hasn't intentionally chosen not to use an
 // iterator for that reason? We don't want to go fix to
 // undo optimizations. Do we need a suppression mechanism?
+//
+// TODO(adonovan): recognize the more complex patterns that
+// could make full use of both components of an iter.Seq2, e.g.
+//
+//     for i := 0; i < v.NumField(); i++ {
+//             use(v.Field(i), v.Type().Field(i))
+//     }
+//
+// =>
+//
+//     for structField, field := range v.Fields() {
+//             use(structField, field)
+//     }
 func stditerators(pass *analysis.Pass) (any, error) {
        var (
                index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
@@ -228,15 +247,17 @@ func stditerators(pass *analysis.Pass) (any, error) {
                                        indexVar = v
                                        curBody = curFor.ChildAt(edge.ForStmt_Body, -1)
                                        elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
+                                       elemPrefix := cond(row.seqn == 2, "_, ", "")
 
-                                       //      for i    := 0; i < x.Len(); i++ {
-                                       //          ----    -------  ---  -----
-                                       //      for elem := range  x.All()      {
+                                       //      for i       := 0; i < x.Len(); i++ {
+                                       //          ----       -------  ---  -----
+                                       //      for elem    := range  x.All()      {
+                                       // or   for _, elem := ...
                                        edits = []analysis.TextEdit{
                                                {
                                                        Pos:     v.Pos(),
                                                        End:     v.Pos() + token.Pos(len(v.Name())),
-                                                       NewText: []byte(elem),
+                                                       NewText: []byte(elemPrefix + elem),
                                                },
                                                {
                                                        Pos:     loop.Init.(*ast.AssignStmt).Rhs[0].Pos(),
@@ -271,6 +292,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
                                        indexVar = info.Defs[id].(*types.Var)
                                        curBody = curRange.ChildAt(edge.RangeStmt_Body, -1)
                                        elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
+                                       elemPrefix := cond(row.seqn == 2, "_, ", "")
 
                                        //      for i    := range x.Len() {
                                        //          ----            ---
@@ -279,7 +301,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
                                                {
                                                        Pos:     loop.Key.Pos(),
                                                        End:     loop.Key.End(),
-                                                       NewText: []byte(elem),
+                                                       NewText: []byte(elemPrefix + elem),
                                                },
                                                {
                                                        Pos:     lenSel.Sel.Pos(),
@@ -344,8 +366,8 @@ func stditerators(pass *analysis.Pass) (any, error) {
                        // (In the long run, version filters are not highly selective,
                        // so there's no need to do them first, especially as this check
                        // may be somewhat expensive.)
-                       if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok {
-                               panic("no version found")
+                       if v, err := methodGoVersion(row.pkgpath, row.typename, row.itermethod); err != nil {
+                               panic(err)
                        } else if !analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(curLenCall), v.String()) {
                                continue nextCall
                        }
@@ -371,7 +393,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
 
 // methodGoVersion reports the version at which the method
 // (pkgpath.recvtype).method appeared in the standard library.
-func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
+func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, error) {
        // TODO(adonovan): opt: this might be inefficient for large packages
        // like go/types. If so, memoize using a map (and kill two birds with
        // one stone by also memoizing the 'within' check above).
@@ -379,9 +401,9 @@ func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
                if sym.Kind == stdlib.Method {
                        _, recv, name := sym.SplitMethod()
                        if recv == recvtype && name == method {
-                               return sym.Version, true
+                               return sym.Version, nil
                        }
                }
        }
-       return 0, false
+       return 0, fmt.Errorf("methodGoVersion: %s.%s.%s missing from stdlib manifest", pkgpath, recvtype, method)
 }
index 19564c69b60991c480660a670b02d67ed05f961d..abf5885cee28cc00eaf60dc5ce53bc98d6e79b6c 100644 (file)
@@ -137,8 +137,10 @@ func waitgroup(pass *analysis.Pass) (any, error) {
                }
 
                pass.Report(analysis.Diagnostic{
-                       Pos:     addCall.Pos(),
-                       End:     goStmt.End(),
+                       // go func() {
+                       // ~~~~~~~~~
+                       Pos:     goStmt.Pos(),
+                       End:     lit.Type.End(),
                        Message: "Goroutine creation can be simplified using WaitGroup.Go",
                        SuggestedFixes: []analysis.SuggestedFix{{
                                Message: "Simplify by using WaitGroup.Go",
index fd9fe16472338d3584f3f622c8c34c7749a7892c..1afb07c452bdf909c56dec57df8f98f986dd159f 100644 (file)
@@ -137,6 +137,7 @@ type wrapper struct {
        callers []printfCaller
 }
 
+// printfCaller is a candidate print{,f} forwarding call from candidate wrapper w.
 type printfCaller struct {
        w    *wrapper
        call *ast.CallExpr // forwarding call (nil for implicit interface method -> impl calls)
@@ -246,7 +247,7 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
                        switch lhs := lhs.(type) {
                        case *ast.Ident:
                                // variable: wrapf = func(...)
-                               v = info.ObjectOf(lhs).(*types.Var)
+                               v, _ = info.ObjectOf(lhs).(*types.Var)
                        case *ast.SelectorExpr:
                                if sel, ok := info.Selections[lhs]; ok {
                                        // struct field: x.wrapf = func(...)
@@ -291,35 +292,35 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
        //   var _ Logger = myLogger{}
        impls := methodImplementations(pass)
 
+       // doCall records a call from one wrapper to another.
+       doCall := func(w *wrapper, callee types.Object, call *ast.CallExpr) {
+               // Call from one wrapper candidate to another?
+               // Record the edge so that if callee is found to be
+               // a true wrapper, w will be too.
+               if w2, ok := byObj[callee]; ok {
+                       w2.callers = append(w2.callers, printfCaller{w, call})
+               }
+
+               // Is the candidate a true wrapper, because it calls
+               // a known print{,f}-like function from the allowlist
+               // or an imported fact, or another wrapper found
+               // to be a true wrapper?
+               // If so, convert all w's callers to kind.
+               kind := callKind(pass, callee, res)
+               if kind != KindNone {
+                       propagate(pass, w, call, kind, res)
+               }
+       }
+
        // Pass 2: scan the body of each wrapper function
        // for calls to other printf-like functions.
        for _, w := range wrappers {
 
-               // doCall records a call from one wrapper to another.
-               doCall := func(callee types.Object, call *ast.CallExpr) {
-                       // Call from one wrapper candidate to another?
-                       // Record the edge so that if callee is found to be
-                       // a true wrapper, w will be too.
-                       if w2, ok := byObj[callee]; ok {
-                               w2.callers = append(w2.callers, printfCaller{w, call})
-                       }
-
-                       // Is the candidate a true wrapper, because it calls
-                       // a known print{,f}-like function from the allowlist
-                       // or an imported fact, or another wrapper found
-                       // to be a true wrapper?
-                       // If so, convert all w's callers to kind.
-                       kind := callKind(pass, callee, res)
-                       if kind != KindNone {
-                               checkForward(pass, w, call, kind, res)
-                       }
-               }
-
                // An interface method has no body, but acts
                // like an implicit call to each implementing method.
                if w.curBody.Inspector() == nil {
                        for impl := range impls[w.obj.(*types.Func)] {
-                               doCall(impl, nil)
+                               doCall(w, impl, nil)
                        }
                        continue // (no body)
                }
@@ -360,7 +361,7 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
                        case *ast.CallExpr:
                                if len(n.Args) > 0 && match(info, n.Args[len(n.Args)-1], w.args) {
                                        if callee := typeutil.Callee(pass.TypesInfo, n); callee != nil {
-                                               doCall(callee, n)
+                                               doCall(w, callee, n)
                                        }
                                }
                        }
@@ -414,44 +415,15 @@ func match(info *types.Info, arg ast.Expr, param *types.Var) bool {
        return ok && info.ObjectOf(id) == param
 }
 
-// checkForward checks whether a forwarding wrapper is forwarding correctly.
-// If so, it propagates changes in wrapper kind information backwards
-// through through the wrapper.callers graph of forwarding calls.
-//
-// If not, it reports a diagnostic that the user wrote
-// fmt.Printf(format, args) instead of fmt.Printf(format, args...).
-func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind, res *Result) {
+// propagate propagates changes in wrapper (non-None) kind information backwards
+// through through the wrapper.callers graph of well-formed forwarding calls.
+func propagate(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind, res *Result) {
        // Check correct call forwarding.
-       // (Interface methods forward correctly by construction.)
-       if call != nil {
-               matched := kind == KindPrint ||
-                       kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
-               if !matched {
-                       return
-               }
-
-               if !call.Ellipsis.IsValid() {
-                       typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature)
-                       if !ok {
-                               return
-                       }
-                       if len(call.Args) > typ.Params().Len() {
-                               // If we're passing more arguments than what the
-                               // print/printf function can take, adding an ellipsis
-                               // would break the program. For example:
-                               //
-                               //   func foo(arg1 string, arg2 ...interface{}) {
-                               //       fmt.Printf("%s %v", arg1, arg2)
-                               //   }
-                               return
-                       }
-                       desc := "printf"
-                       if kind == KindPrint {
-                               desc = "print"
-                       }
-                       pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc)
-                       return
-               }
+       //
+       // Interface methods (call==nil) forward
+       // correctly by construction.
+       if call != nil && !checkForward(pass, w, call, kind) {
+               return
        }
 
        // If the candidate's print{,f} status becomes known,
@@ -471,9 +443,48 @@ func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind
 
                // Propagate kind back to known callers.
                for _, caller := range w.callers {
-                       checkForward(pass, caller.w, caller.call, kind, res)
+                       propagate(pass, caller.w, caller.call, kind, res)
+               }
+       }
+}
+
+// checkForward checks whether a call from wrapper w is a well-formed
+// forwarding call of the specified (non-None) kind.
+//
+// If not, it reports a diagnostic that the user wrote
+// fmt.Printf(format, args) instead of fmt.Printf(format, args...).
+func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind) bool {
+       // Printf/Errorf calls must delegate the format string.
+       switch kind {
+       case KindPrintf, KindErrorf:
+               if len(call.Args) < 2 || !match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) {
+                       return false
                }
        }
+
+       // The args... delegation must be variadic.
+       // (That args is actually delegated was
+       // established before the root call to doCall.)
+       if !call.Ellipsis.IsValid() {
+               typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature)
+               if !ok {
+                       return false
+               }
+               if len(call.Args) > typ.Params().Len() {
+                       // If we're passing more arguments than what the
+                       // print/printf function can take, adding an ellipsis
+                       // would break the program. For example:
+                       //
+                       //   func foo(arg1 string, arg2 ...interface{}) {
+                       //       fmt.Printf("%s %v", arg1, arg2)
+                       //   }
+                       return false
+               }
+               pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", kind)
+               return false
+       }
+
+       return true
 }
 
 func origin(obj types.Object) types.Object {
index ef06cf9bde2d7501f4ab0feb4a1d0ac4c169ca59..37b09588a7a1b8d0aef66320d66ad86f65f6a009 100644 (file)
@@ -339,6 +339,9 @@ fixloop:
 // information for the fixed file and thus cannot accurately tell
 // whether k is among the free names of T{k: 0}, which requires
 // knowledge of whether T is a struct type.
+//
+// Like [imports.Process] (the core of x/tools/cmd/goimports), it also
+// merges import decls.
 func FormatSourceRemoveImports(pkg *types.Package, src []byte) ([]byte, error) {
        // This function was reduced from the "strict entire file"
        // path through [format.Source].
@@ -353,6 +356,10 @@ func FormatSourceRemoveImports(pkg *types.Package, src []byte) ([]byte, error) {
 
        removeUnneededImports(fset, pkg, file)
 
+       // TODO(adonovan): to generate cleaner edits when adding an import,
+       // consider adding a call to imports.mergeImports; however, it does
+       // cause comments to migrate.
+
        // printerNormalizeNumbers means to canonicalize number literal prefixes
        // and exponents while printing. See https://golang.org/doc/go1.13#gofmt.
        //
index 7fc42a5ef7bb555c3b797003576a32aeeb914742..5458846857d518d74ee870246bc008798cbf485d 100644 (file)
@@ -7,6 +7,7 @@ package driverutil
 // This file defined output helpers common to all drivers.
 
 import (
+       "cmp"
        "encoding/json"
        "fmt"
        "go/token"
@@ -76,11 +77,10 @@ type JSONSuggestedFix struct {
 }
 
 // A JSONDiagnostic describes the JSON schema of an analysis.Diagnostic.
-//
-// TODO(matloob): include End position if present.
 type JSONDiagnostic struct {
        Category       string                   `json:"category,omitempty"`
        Posn           string                   `json:"posn"` // e.g. "file.go:line:column"
+       End            string                   `json:"end"`  // (ditto)
        Message        string                   `json:"message"`
        SuggestedFixes []JSONSuggestedFix       `json:"suggested_fixes,omitempty"`
        Related        []JSONRelatedInformation `json:"related,omitempty"`
@@ -88,10 +88,9 @@ type JSONDiagnostic struct {
 
 // A JSONRelated describes a secondary position and message related to
 // a primary diagnostic.
-//
-// TODO(adonovan): include End position if present.
 type JSONRelatedInformation struct {
        Posn    string `json:"posn"` // e.g. "file.go:line:column"
+       End     string `json:"end"`  // (ditto)
        Message string `json:"message"`
 }
 
@@ -127,12 +126,14 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.
                        for _, r := range f.Related {
                                related = append(related, JSONRelatedInformation{
                                        Posn:    fset.Position(r.Pos).String(),
+                                       End:     fset.Position(cmp.Or(r.End, r.Pos)).String(),
                                        Message: r.Message,
                                })
                        }
                        jdiag := JSONDiagnostic{
                                Category:       f.Category,
                                Posn:           fset.Position(f.Pos).String(),
+                               End:            fset.Position(cmp.Or(f.End, f.Pos)).String(),
                                Message:        f.Message,
                                SuggestedFixes: fixes,
                                Related:        related,
index 9b96b1dbf1fcdf695d0e23bfbe965ad87430a9f8..54d0b5f0386fe8cfc8065d69507db07bf1ff427b 100644 (file)
@@ -13,7 +13,6 @@ import (
        "go/types"
        "slices"
 
-       "golang.org/x/tools/go/analysis"
        "golang.org/x/tools/go/ast/edge"
        "golang.org/x/tools/go/ast/inspector"
        "golang.org/x/tools/internal/astutil"
@@ -32,7 +31,7 @@ import (
 //
 // If it cannot make the necessary edits, such as for a function
 // parameter or result, it returns nil.
-func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []analysis.TextEdit {
+func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []Edit {
        switch ek, _ := curId.ParentEdge(); ek {
        case edge.ValueSpec_Names:
                return deleteVarFromValueSpec(tokFile, info, curId)
@@ -52,7 +51,7 @@ func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []
 // Precondition: curId is Ident beneath ValueSpec.Names beneath GenDecl.
 //
 // See also [deleteVarFromAssignStmt], which has parallel structure.
-func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit {
+func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []Edit {
        var (
                id      = curIdent.Node().(*ast.Ident)
                curSpec = curIdent.Parent()
@@ -95,7 +94,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
                        pos = spec.Names[index].Pos()
                        end = spec.Names[index+1].Pos()
                }
-               return []analysis.TextEdit{{
+               return []Edit{{
                        Pos: pos,
                        End: end,
                }}
@@ -111,7 +110,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
                        //
                        // var _, lhs1 = rhs0, rhs1
                        //      ------       ------
-                       return []analysis.TextEdit{
+                       return []Edit{
                                {
                                        Pos: spec.Names[index-1].End(),
                                        End: spec.Names[index].End(),
@@ -126,7 +125,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
                        //
                        // var lhs0, _ = rhs0, rhs1
                        //     ------    ------
-                       return []analysis.TextEdit{
+                       return []Edit{
                                {
                                        Pos: spec.Names[index].Pos(),
                                        End: spec.Names[index+1].Pos(),
@@ -141,7 +140,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
 
        // We cannot delete the RHS.
        // Blank out the LHS.
-       return []analysis.TextEdit{{
+       return []Edit{{
                Pos:     id.Pos(),
                End:     id.End(),
                NewText: []byte("_"),
@@ -151,7 +150,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
 // Precondition: curId is Ident beneath AssignStmt.Lhs.
 //
 // See also [deleteVarFromValueSpec], which has parallel structure.
-func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit {
+func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []Edit {
        var (
                id      = curIdent.Node().(*ast.Ident)
                curStmt = curIdent.Parent()
@@ -192,7 +191,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
                        //
                        // _, lhs1 := rhs0, rhs1
                        //  ------        ------
-                       return []analysis.TextEdit{
+                       return []Edit{
                                {
                                        Pos: assign.Lhs[index-1].End(),
                                        End: assign.Lhs[index].End(),
@@ -207,7 +206,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
                        //
                        // lhs0, _ := rhs0, rhs1
                        // ------     ------
-                       return []analysis.TextEdit{
+                       return []Edit{
                                {
                                        Pos: assign.Lhs[index].Pos(),
                                        End: assign.Lhs[index+1].Pos(),
@@ -222,7 +221,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
 
        // We cannot delete the RHS.
        // Blank out the LHS.
-       edits := []analysis.TextEdit{{
+       edits := []Edit{{
                Pos:     id.Pos(),
                End:     id.End(),
                NewText: []byte("_"),
@@ -233,7 +232,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
        // assignment to avoid a "no new variables on left
        // side of :=" error.
        if !declaresOtherNames {
-               edits = append(edits, analysis.TextEdit{
+               edits = append(edits, Edit{
                        Pos:     assign.TokPos,
                        End:     assign.TokPos + token.Pos(len(":=")),
                        NewText: []byte("="),
@@ -246,7 +245,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
 // DeleteSpec returns edits to delete the {Type,Value}Spec identified by curSpec.
 //
 // TODO(adonovan): add test suite. Test for consts as well.
-func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEdit {
+func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []Edit {
        var (
                spec    = curSpec.Node().(ast.Spec)
                curDecl = curSpec.Parent()
@@ -277,7 +276,7 @@ func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEd
                //         -----
                end = decl.Specs[index+1].Pos()
        }
-       return []analysis.TextEdit{{
+       return []Edit{{
                Pos: pos,
                End: end,
        }}
@@ -286,7 +285,7 @@ func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEd
 // DeleteDecl returns edits to delete the ast.Decl identified by curDecl.
 //
 // TODO(adonovan): add test suite.
-func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []analysis.TextEdit {
+func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []Edit {
        decl := curDecl.Node().(ast.Decl)
 
        ek, _ := curDecl.ParentEdge()
@@ -321,7 +320,7 @@ func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []analysis.TextEd
                        }
                }
 
-               return []analysis.TextEdit{{
+               return []Edit{{
                        Pos: pos,
                        End: end,
                }}
@@ -366,7 +365,7 @@ func filterPos(nds []*ast.Comment, start, end token.Pos) (token.Pos, token.Pos,
 // it removes whole lines like
 //
 //     stmt // comment
-func DeleteStmt(file *token.File, curStmt inspector.Cursor) []analysis.TextEdit {
+func DeleteStmt(file *token.File, curStmt inspector.Cursor) []Edit {
        // if the stmt is on a line by itself, or a range of lines, delete the whole thing
        // including comments. Except for the heads of switches, type
        // switches, and for-statements that's the usual case. Complexity occurs where
@@ -516,13 +515,13 @@ Big:
                }
        }
 
-       return []analysis.TextEdit{{Pos: leftEdit, End: rightEdit}}
+       return []Edit{{Pos: leftEdit, End: rightEdit}}
 }
 
 // DeleteUnusedVars computes the edits required to delete the
 // declarations of any local variables whose last uses are in the
 // curDelend subtree, which is about to be deleted.
-func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.File, curDelend inspector.Cursor) []analysis.TextEdit {
+func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.File, curDelend inspector.Cursor) []Edit {
        // TODO(adonovan): we might want to generalize this by
        // splitting the two phases below, so that we can gather
        // across a whole sequence of deletions then finally compute the
@@ -539,7 +538,7 @@ func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.F
        }
 
        // Delete declaration of each var that became unused.
-       var edits []analysis.TextEdit
+       var edits []Edit
        for v, count := range delcount {
                if len(slices.Collect(index.Uses(v))) == count {
                        if curDefId, ok := index.Def(v); ok {
diff --git a/src/cmd/vendor/golang.org/x/tools/internal/refactor/edit.go b/src/cmd/vendor/golang.org/x/tools/internal/refactor/edit.go
new file mode 100644 (file)
index 0000000..42be9a5
--- /dev/null
@@ -0,0 +1,15 @@
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.p
+
+package refactor
+
+// This is the only file in this package that should import analysis.
+//
+// TODO(adonovan): consider unaliasing the type to break the
+// dependency. (The ergonomics of slice append are unfortunate.)
+
+import "golang.org/x/tools/go/analysis"
+
+// An Edit describes a deletion and/or an insertion.
+type Edit = analysis.TextEdit
index b5440d896b9bfc6dde4d525bcb9baa9d9876c708..e1860ab06598802e469d893399c02e87b5d544b7 100644 (file)
@@ -7,13 +7,12 @@ package refactor
 // This file defines operations for computing edits to imports.
 
 import (
-       "fmt"
        "go/ast"
        "go/token"
        "go/types"
        pathpkg "path"
+       "strconv"
 
-       "golang.org/x/tools/go/analysis"
        "golang.org/x/tools/internal/packagepath"
 )
 
@@ -35,7 +34,7 @@ import (
 // package declares member.
 //
 // AddImport does not mutate its arguments.
-func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (prefix string, edits []analysis.TextEdit) {
+func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (prefix string, edits []Edit) {
        // Find innermost enclosing lexical block.
        scope := info.Scopes[file].Innermost(pos)
        if scope == nil {
@@ -69,33 +68,53 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member
        newName := preferredName
        if preferredName != "_" {
                newName = FreshName(scope, pos, preferredName)
+               prefix = newName + "."
        }
 
-       // Create a new import declaration either before the first existing
-       // declaration (which must exist), including its comments; or
-       // inside the declaration, if it is an import group.
-       //
        // Use a renaming import whenever the preferred name is not
        // available, or the chosen name does not match the last
        // segment of its path.
-       newText := fmt.Sprintf("%q", pkgpath)
-       if newName != preferredName || newName != pathpkg.Base(pkgpath) {
-               newText = fmt.Sprintf("%s %q", newName, pkgpath)
+       if newName == preferredName && newName == pathpkg.Base(pkgpath) {
+               newName = ""
+       }
+
+       return prefix, AddImportEdits(file, newName, pkgpath)
+}
+
+// AddImportEdits returns the edits to add an import of the specified
+// package, without any analysis of whether this is necessary or safe.
+// If name is nonempty, it is used as an explicit [ImportSpec.Name].
+//
+// A sequence of calls to AddImportEdits that each add the file's
+// first import (or in a file that does not have a grouped import) may
+// result in multiple import declarations, rather than a single one
+// with multiple ImportSpecs. However, a subsequent run of
+// x/tools/cmd/goimports ([imports.Process]) will combine them.
+//
+// AddImportEdits does not mutate the AST.
+func AddImportEdits(file *ast.File, name, pkgpath string) []Edit {
+       newText := strconv.Quote(pkgpath)
+       if name != "" {
+               newText = name + " " + newText
        }
 
+       // Create a new import declaration either before the first existing
+       // declaration (which must exist), including its comments; or
+       // inside the declaration, if it is an import group.
        decl0 := file.Decls[0]
-       var before ast.Node = decl0
+       before := decl0.Pos()
        switch decl0 := decl0.(type) {
        case *ast.GenDecl:
                if decl0.Doc != nil {
-                       before = decl0.Doc
+                       before = decl0.Doc.Pos()
                }
        case *ast.FuncDecl:
                if decl0.Doc != nil {
-                       before = decl0.Doc
+                       before = decl0.Doc.Pos()
                }
        }
-       if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
+       var pos token.Pos
+       if gd, ok := decl0.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
                // Have existing grouped import ( ... ) decl.
                if packagepath.IsStdPackage(pkgpath) && len(gd.Specs) > 0 {
                        // Add spec for a std package before
@@ -116,10 +135,13 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member
                // No import decl, or non-grouped import.
                // Add a new import decl before first decl.
                // (gofmt will merge multiple import decls.)
-               pos = before.Pos()
+               //
+               // TODO(adonovan): do better here; plunder the
+               // mergeImports logic from [imports.Process].
+               pos = before
                newText = "import " + newText + "\n\n"
        }
-       return newName + ".", []analysis.TextEdit{{
+       return []Edit{{
                Pos:     pos,
                End:     pos,
                NewText: []byte(newText),
index af1252cee86c5aab999f299070be39982e66fc95..f7e37fd7da8483bb1677450061f8f4b7c2997373 100644 (file)
@@ -11,14 +11,12 @@ import (
        "go/constant"
        "go/format"
        "go/parser"
-       "go/printer"
        "go/token"
        "go/types"
        "maps"
        pathpkg "path"
        "reflect"
        "slices"
-       "strconv"
        "strings"
 
        "golang.org/x/tools/go/ast/astutil"
@@ -26,6 +24,7 @@ import (
        internalastutil "golang.org/x/tools/internal/astutil"
        "golang.org/x/tools/internal/astutil/free"
        "golang.org/x/tools/internal/packagepath"
+       "golang.org/x/tools/internal/refactor"
        "golang.org/x/tools/internal/typeparams"
        "golang.org/x/tools/internal/typesinternal"
        "golang.org/x/tools/internal/versions"
@@ -35,12 +34,11 @@ import (
 //
 // The client is responsible for populating this struct and passing it to Inline.
 type Caller struct {
-       Fset    *token.FileSet
-       Types   *types.Package
-       Info    *types.Info
-       File    *ast.File
-       Call    *ast.CallExpr
-       Content []byte // source of file containing (TODO(adonovan): see comment at Result.Content)
+       Fset  *token.FileSet
+       Types *types.Package
+       Info  *types.Info
+       File  *ast.File
+       Call  *ast.CallExpr
 
        // CountUses is an optional optimized computation of
        // the number of times pkgname appears in Info.Uses.
@@ -61,26 +59,9 @@ type Options struct {
 
 // Result holds the result of code transformation.
 type Result struct {
-       // TODO(adonovan): the only textual results that should be
-       // needed are (1) an edit in the vicinity of the call (either
-       // to the CallExpr or one of its ancestors), and optionally
-       // (2) an edit to the import declaration.
-       // Change the inliner API to return a list of edits,
-       // and not to accept a Caller.Content, as it is only
-       // temptation to use such algorithmically expensive
-       // operations as reformatting the entire file, which is
-       // a significant source of non-linear dynamic behavior;
-       // see https://go.dev/issue/75773.
-       // This will require a sequence of changes to the tests
-       // and the inliner algorithm itself.
-       Content     []byte // formatted, transformed content of caller file
-       Literalized bool   // chosen strategy replaced callee() with func(){...}()
-       BindingDecl bool   // transformation added "var params = args" declaration
-
-       // TODO(adonovan): provide an API for clients that want structured
-       // output: a list of import additions and deletions plus one or more
-       // localized diffs (or even AST transformations, though ownership and
-       // mutation are tricky) near the call site.
+       Edits       []refactor.Edit // edits around CallExpr and imports
+       Literalized bool            // chosen strategy replaced callee() with func(){...}()
+       BindingDecl bool            // transformation added "var params = args" declaration
 }
 
 // Inline inlines the called function (callee) into the function call (caller)
@@ -117,14 +98,8 @@ func (st *state) inline() (*Result, error) {
                debugFormatNode(caller.Fset, caller.Call),
                caller.Fset.PositionFor(caller.Call.Lparen, false))
 
-       if !consistentOffsets(caller) {
-               return nil, fmt.Errorf("internal error: caller syntax positions are inconsistent with file content (did you forget to use FileSet.PositionFor when computing the file name?)")
-       }
-
-       // Break the string literal so we can use inlining in this file. :)
-       if ast.IsGenerated(caller.File) &&
-               bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) {
-               return nil, fmt.Errorf("cannot inline calls from files that import \"C\"")
+       if ast.IsGenerated(caller.File) {
+               return nil, fmt.Errorf("cannot inline calls from generated files")
        }
 
        res, err := st.inlineCall()
@@ -224,37 +199,10 @@ func (st *state) inline() (*Result, error) {
                }
        }
 
-       // File rewriting. This proceeds in multiple passes, in order to maximally
-       // preserve comment positioning. (This could be greatly simplified once
-       // comments are stored in the tree.)
-       //
-       // Don't call replaceNode(caller.File, res.old, res.new)
-       // as it mutates the caller's syntax tree.
-       // Instead, splice the file, replacing the extent of the "old"
-       // node by a formatting of the "new" node, and re-parse.
-       // We'll fix up the imports on this new tree, and format again.
-       //
-       // Inv: f is the result of parsing content, using fset.
-       var (
-               content = caller.Content
-               fset    = caller.Fset
-               f       *ast.File // parsed below
-       )
-       reparse := func() error {
-               const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
-               f, err = parser.ParseFile(fset, "callee.go", content, mode)
-               if err != nil {
-                       // Something has gone very wrong.
-                       logf("failed to reparse <<%s>>: %v", string(content), err) // debugging
-                       return err
-               }
-               return nil
-       }
+       var edits []refactor.Edit
+
+       // Format the cloned callee.
        {
-               start := offsetOf(fset, res.old.Pos())
-               end := offsetOf(fset, res.old.End())
-               var out bytes.Buffer
-               out.Write(content[:start])
                // TODO(adonovan): might it make more sense to use
                // callee.Fset when formatting res.new?
                // The new tree is a mix of (cloned) caller nodes for
@@ -269,148 +217,106 @@ func (st *state) inline() (*Result, error) {
                // Precise comment handling would make this a
                // non-issue. Formatting wouldn't really need a
                // FileSet at all.
+
+               var out bytes.Buffer
                if elideBraces {
                        for i, stmt := range res.new.(*ast.BlockStmt).List {
                                if i > 0 {
                                        out.WriteByte('\n')
                                }
-                               if err := format.Node(&out, fset, stmt); err != nil {
+                               if err := format.Node(&out, caller.Fset, stmt); err != nil {
                                        return nil, err
                                }
                        }
                } else {
-                       if err := format.Node(&out, fset, res.new); err != nil {
+                       if err := format.Node(&out, caller.Fset, res.new); err != nil {
                                return nil, err
                        }
                }
-               out.Write(content[end:])
-               content = out.Bytes()
-               if err := reparse(); err != nil {
-                       return nil, err
-               }
-       }
 
-       // Add new imports that are still used.
-       newImports := trimNewImports(res.newImports, res.new)
-       // Insert new imports after last existing import,
-       // to avoid migration of pre-import comments.
-       // The imports will be organized below.
-       if len(newImports) > 0 {
-               // If we have imports to add, do so independent of the rest of the file.
-               // Otherwise, the length of the new imports may consume floating comments,
-               // causing them to be printed inside the imports block.
-               var (
-                       importDecl    *ast.GenDecl
-                       comments      []*ast.CommentGroup // relevant comments.
-                       before, after []byte              // pre- and post-amble for the imports block.
-               )
-               if len(f.Imports) > 0 {
-                       // Append specs to existing import decl
-                       importDecl = f.Decls[0].(*ast.GenDecl)
-                       for _, comment := range f.Comments {
-                               // Filter comments. Don't use CommentMap.Filter here, because we don't
-                               // want to include comments that document the import decl itself, for
-                               // example:
-                               //
-                               //  // We don't want this comment to be duplicated.
-                               //  import (
-                               //    "something"
-                               //  )
-                               if importDecl.Pos() <= comment.Pos() && comment.Pos() < importDecl.End() {
-                                       comments = append(comments, comment)
-                               }
-                       }
-                       before = content[:offsetOf(fset, importDecl.Pos())]
-                       importDecl.Doc = nil // present in before
-                       after = content[offsetOf(fset, importDecl.End()):]
-               } else {
-                       // Insert new import decl.
-                       importDecl = &ast.GenDecl{Tok: token.IMPORT}
-                       f.Decls = prepend[ast.Decl](importDecl, f.Decls...)
-
-                       // Make room for the new declaration after the package declaration.
-                       pkgEnd := f.Name.End()
-                       file := fset.File(pkgEnd)
-                       if file == nil {
-                               logf("internal error: missing pkg file")
-                               return nil, fmt.Errorf("missing pkg file for %s", f.Name.Name)
-                       }
-                       // Preserve any comments after the package declaration, by splicing in
-                       // the new import block after the end of the package declaration line.
-                       line := file.Line(pkgEnd)
-                       if line < len(file.Lines()) { // line numbers are 1-based
-                               nextLinePos := file.LineStart(line + 1)
-                               nextLine := offsetOf(fset, nextLinePos)
-                               before = slices.Concat(content[:nextLine], []byte("\n"))
-                               after = slices.Concat([]byte("\n\n"), content[nextLine:])
-                       } else {
-                               before = slices.Concat(content, []byte("\n\n"))
-                       }
-               }
-               // Add new imports.
-               // Set their position to after the last position of the old imports, to keep
-               // comments on the old imports from moving.
-               lastPos := token.NoPos
-               if lastSpec := last(importDecl.Specs); lastSpec != nil {
-                       lastPos = lastSpec.Pos()
-                       if c := lastSpec.(*ast.ImportSpec).Comment; c != nil {
-                               lastPos = c.Pos()
-                       }
-               }
-               for _, imp := range newImports {
-                       // Check that the new imports are accessible.
-                       path, _ := strconv.Unquote(imp.spec.Path.Value)
-                       if !packagepath.CanImport(caller.Types.Path(), path) {
-                               return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, path)
-                       }
-                       if lastPos.IsValid() {
-                               lastPos++
-                               imp.spec.Path.ValuePos = lastPos
-                       }
-                       importDecl.Specs = append(importDecl.Specs, imp.spec)
-               }
+               edits = append(edits, refactor.Edit{
+                       Pos:     res.old.Pos(),
+                       End:     res.old.End(),
+                       NewText: out.Bytes(),
+               })
+       }
 
-               var out bytes.Buffer
-               out.Write(before)
-               commented := &printer.CommentedNode{
-                       Node:     importDecl,
-                       Comments: comments,
+       // Add new imports.
+       //
+       // It's possible that not all are needed (e.g. for type names
+       // that melted away), but we'll let the client (such as an
+       // analysis driver) clean it up since it must remove unused
+       // imports anyway.
+       for _, imp := range res.newImports {
+               // Check that the new imports are accessible.
+               if !packagepath.CanImport(caller.Types.Path(), imp.path) {
+                       return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, imp.path)
                }
 
-               if err := format.Node(&out, fset, commented); err != nil {
-                       logf("failed to format new importDecl: %v", err) // debugging
-                       return nil, err
-               }
-               out.Write(after)
-               content = out.Bytes()
-               if err := reparse(); err != nil {
-                       return nil, err
-               }
-       }
-       // Delete imports referenced only by caller.Call.Fun.
-       for _, oldImport := range res.oldImports {
-               specToDelete := oldImport.spec
+               // We've already validated the import, so we call
+               // AddImportEdits directly to compute the edit.
                name := ""
-               if specToDelete.Name != nil {
-                       name = specToDelete.Name.Name
+               if imp.explicit {
+                       name = imp.name
                }
-               path, _ := strconv.Unquote(specToDelete.Path.Value)
-               astutil.DeleteNamedImport(caller.Fset, f, name, path)
-       }
-
-       var out bytes.Buffer
-       if err := format.Node(&out, caller.Fset, f); err != nil {
-               return nil, err
+               edits = append(edits, refactor.AddImportEdits(caller.File, name, imp.path)...)
        }
-       newSrc := out.Bytes()
 
        literalized := false
        if call, ok := res.new.(*ast.CallExpr); ok && is[*ast.FuncLit](call.Fun) {
                literalized = true
        }
 
+       // Delete imports referenced only by caller.Call.Fun.
+       //
+       // It's ambiguous to let the client (e.g. analysis driver)
+       // remove unneeded imports in this case because it is common
+       // to inlining a call from "dir1/a".F to "dir2/a".F, which
+       // leaves two imports of packages named 'a', both providing a.F.
+       //
+       // However, the only two import deletion tools at our disposal
+       // are astutil.DeleteNamedImport, which mutates the AST, and
+       // refactor.Delete{Spec,Decl}, which need a Cursor. So we need
+       // to reinvent the wheel here.
+       for _, oldImport := range res.oldImports {
+               spec := oldImport.spec
+
+               // Include adjacent comments.
+               pos := spec.Pos()
+               if doc := spec.Doc; doc != nil {
+                       pos = doc.Pos()
+               }
+               end := spec.End()
+               if doc := spec.Comment; doc != nil {
+                       end = doc.End()
+               }
+
+               // Find the enclosing import decl.
+               // If it's paren-less, we must delete it too.
+               for _, decl := range caller.File.Decls {
+                       decl, ok := decl.(*ast.GenDecl)
+                       if !(ok && decl.Tok == token.IMPORT) {
+                               break // stop at first non-import decl
+                       }
+                       if internalastutil.NodeContainsPos(decl, spec.Pos()) && !decl.Rparen.IsValid() {
+                               // Include adjacent comments.
+                               pos = decl.Pos()
+                               if doc := decl.Doc; doc != nil {
+                                       pos = doc.Pos()
+                               }
+                               end = decl.End()
+                               break
+                       }
+               }
+
+               edits = append(edits, refactor.Edit{
+                       Pos: pos,
+                       End: end,
+               })
+       }
+
        return &Result{
-               Content:     newSrc,
+               Edits:       edits,
                Literalized: literalized,
                BindingDecl: res.bindingDecl,
        }, nil
@@ -424,8 +330,9 @@ type oldImport struct {
 
 // A newImport is an import that will be added to the caller file.
 type newImport struct {
-       pkgName string
-       spec    *ast.ImportSpec
+       name     string
+       path     string
+       explicit bool // use name as ImportSpec.Name
 }
 
 // importState tracks information about imports.
@@ -526,16 +433,12 @@ func (i *importState) importName(pkgPath string, shadow shadowMap) string {
        return ""
 }
 
-// localName returns the local name for a given imported package path,
-// adding one if it doesn't exists.
-func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) string {
-       // Does an import already exist that works in this shadowing context?
-       if name := i.importName(pkgPath, shadow); name != "" {
-               return name
-       }
-
+// findNewLocalName returns a new local package name to use in a particular shadowing context.
+// It considers the existing local name used by the callee, or construct a new local name
+// based on the package name.
+func (i *importState) findNewLocalName(pkgName, calleePkgName string, shadow shadowMap) string {
        newlyAdded := func(name string) bool {
-               return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.pkgName == name })
+               return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.name == name })
        }
 
        // shadowedInCaller reports whether a candidate package name
@@ -551,76 +454,46 @@ func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) strin
 
        // import added by callee
        //
-       // Choose local PkgName based on last segment of
-       // package path plus, if needed, a numeric suffix to
-       // ensure uniqueness.
+       // Try to preserve the local package name used by the callee first.
        //
-       // "init" is not a legal PkgName.
+       // If that is shadowed, choose a local package name based on last segment of
+       // package path plus, if needed, a numeric suffix to ensure uniqueness.
        //
-       // TODO(rfindley): is it worth preserving local package names for callee
-       // imports? Are they likely to be better or worse than the name we choose
-       // here?
+       // "init" is not a legal PkgName.
+       if shadow[calleePkgName] == 0 && !shadowedInCaller(calleePkgName) && !newlyAdded(calleePkgName) && calleePkgName != "init" {
+               return calleePkgName
+       }
+
        base := pkgName
        name := base
        for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ {
                name = fmt.Sprintf("%s%d", base, n)
        }
-       i.logf("adding import %s %q", name, pkgPath)
-       spec := &ast.ImportSpec{
-               Path: &ast.BasicLit{
-                       Kind:  token.STRING,
-                       Value: strconv.Quote(pkgPath),
-               },
+
+       return name
+}
+
+// localName returns the local name for a given imported package path,
+// adding one if it doesn't exists.
+func (i *importState) localName(pkgPath, pkgName, calleePkgName string, shadow shadowMap) string {
+       // Does an import already exist that works in this shadowing context?
+       if name := i.importName(pkgPath, shadow); name != "" {
+               return name
        }
+
+       name := i.findNewLocalName(pkgName, calleePkgName, shadow)
+       i.logf("adding import %s %q", name, pkgPath)
        // Use explicit pkgname (out of necessity) when it differs from the declared name,
        // or (for good style) when it differs from base(pkgpath).
-       if name != pkgName || name != pathpkg.Base(pkgPath) {
-               spec.Name = makeIdent(name)
-       }
        i.newImports = append(i.newImports, newImport{
-               pkgName: name,
-               spec:    spec,
+               name:     name,
+               path:     pkgPath,
+               explicit: name != pkgName || name != pathpkg.Base(pkgPath),
        })
        i.importMap[pkgPath] = append(i.importMap[pkgPath], name)
        return name
 }
 
-// trimNewImports removes imports that are no longer needed.
-//
-// The list of new imports as constructed by calls to [importState.localName]
-// includes all of the packages referenced by the callee.
-// But in the process of inlining, we may have dropped some of those references.
-// For example, if the callee looked like this:
-//
-//     func F(x int) (p.T) {... /* no mention of p */ ...}
-//
-// and we inlined by assignment:
-//
-//     v := ...
-//
-// then the reference to package p drops away.
-//
-// Remove the excess imports by seeing which remain in new, the expression
-// to be inlined.
-// We can find those by looking at the free names in new.
-// The list of free names cannot include spurious package names.
-// Free-name tracking is precise except for the case of an identifier
-// key in a composite literal, which names either a field or a value.
-// Neither fields nor values are package names.
-// Since they are not relevant to removing unused imports, we instruct
-// freeishNames to omit composite-literal keys that are identifiers.
-func trimNewImports(newImports []newImport, new ast.Node) []newImport {
-       const omitComplitIdents = false
-       free := free.Names(new, omitComplitIdents)
-       var res []newImport
-       for _, ni := range newImports {
-               if free[ni.pkgName] {
-                       res = append(res, ni)
-               }
-       }
-       return res
-}
-
 type inlineCallResult struct {
        newImports []newImport // to add
        oldImports []oldImport // to remove
@@ -655,14 +528,6 @@ type inlineCallResult struct {
 // allows inlining a statement list. However, due to loss of comments, more
 // sophisticated rewrites are challenging.
 //
-// TODO(adonovan): in earlier drafts, the transformation was expressed
-// by splicing substrings of the two source files because syntax
-// trees don't preserve comments faithfully (see #20744), but such
-// transformations don't compose. The current implementation is
-// tree-based but is very lossy wrt comments. It would make a good
-// candidate for evaluating an alternative fully self-contained tree
-// representation, such as any proposed solution to #20744, or even
-// dst or some private fork of go/ast.)
 // TODO(rfindley): see if we can reduce the amount of comment lossiness by
 // using printer.CommentedNode, which has been useful elsewhere.
 //
@@ -1381,7 +1246,7 @@ func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) {
                var newName ast.Expr
                if obj.Kind == "pkgname" {
                        // Use locally appropriate import, creating as needed.
-                       n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
+                       n := istate.localName(obj.PkgPath, obj.PkgName, obj.Name, obj.Shadow)
                        newName = makeIdent(n) // imported package
                } else if !obj.ValidPos {
                        // Built-in function, type, or value (e.g. nil, zero):
@@ -1426,7 +1291,7 @@ func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) {
 
                        // Form a qualified identifier, pkg.Name.
                        if qualify {
-                               pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
+                               pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.PkgName, obj.Shadow)
                                newName = &ast.SelectorExpr{
                                        X:   makeIdent(pkgName),
                                        Sel: makeIdent(obj.Name),
@@ -3272,25 +3137,6 @@ func last[T any](slice []T) T {
        return *new(T)
 }
 
-// consistentOffsets reports whether the portion of caller.Content
-// that corresponds to caller.Call can be parsed as a call expression.
-// If not, the client has provided inconsistent information, possibly
-// because they forgot to ignore line directives when computing the
-// filename enclosing the call.
-// This is just a heuristic.
-func consistentOffsets(caller *Caller) bool {
-       start := offsetOf(caller.Fset, caller.Call.Pos())
-       end := offsetOf(caller.Fset, caller.Call.End())
-       if !(0 < start && start < end && end <= len(caller.Content)) {
-               return false
-       }
-       expr, err := parser.ParseExpr(string(caller.Content[start:end]))
-       if err != nil {
-               return false
-       }
-       return is[*ast.CallExpr](expr)
-}
-
 // needsParens reports whether parens are required to avoid ambiguity
 // around the new node replacing the specified old node (which is some
 // ancestor of the CallExpr identified by its PathEnclosingInterval).
index 26bc079808f0fdca25860612d4d172817fdaa22c..8664377f8549353d2bc9302ac9916061bb0ded8a 100644 (file)
@@ -5,8 +5,7 @@
 // Package refactor provides operators to compute common textual edits
 // for refactoring tools.
 //
-// This package should not use features of the analysis API
-// other than [analysis.TextEdit].
+// This package should not use features of the analysis API other than [Edit].
 package refactor
 
 import (
index 581784da4351b59740e02f3c71075a3be959c7bb..f7b9c12865a227c8643ef8d42af28e1fac8c5675 100644 (file)
@@ -12,360 +12,364 @@ type pkginfo struct {
 }
 
 var deps = [...]pkginfo{
-       {"archive/tar", "\x03n\x03E<\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
-       {"archive/zip", "\x02\x04d\a\x03\x12\x021<\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
-       {"bufio", "\x03n\x84\x01D\x14"},
-       {"bytes", "q*Z\x03\fG\x02\x02"},
+       {"archive/tar", "\x03p\x03F=\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
+       {"archive/zip", "\x02\x04f\a\x03\x13\x021=\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
+       {"bufio", "\x03p\x86\x01D\x14"},
+       {"bytes", "s+[\x03\fG\x02\x02"},
        {"cmp", ""},
-       {"compress/bzip2", "\x02\x02\xf1\x01A"},
-       {"compress/flate", "\x02o\x03\x81\x01\f\x033\x01\x03"},
-       {"compress/gzip", "\x02\x04d\a\x03\x14mT"},
-       {"compress/lzw", "\x02o\x03\x81\x01"},
-       {"compress/zlib", "\x02\x04d\a\x03\x12\x01n"},
-       {"container/heap", "\xb7\x02"},
+       {"compress/bzip2", "\x02\x02\xf5\x01A"},
+       {"compress/flate", "\x02q\x03\x83\x01\f\x033\x01\x03"},
+       {"compress/gzip", "\x02\x04f\a\x03\x15nT"},
+       {"compress/lzw", "\x02q\x03\x83\x01"},
+       {"compress/zlib", "\x02\x04f\a\x03\x13\x01o"},
+       {"container/heap", "\xbb\x02"},
        {"container/list", ""},
        {"container/ring", ""},
-       {"context", "q[o\x01\r"},
-       {"crypto", "\x86\x01oC"},
-       {"crypto/aes", "\x10\n\t\x95\x02"},
-       {"crypto/cipher", "\x03 \x01\x01\x1f\x11\x1c+Y"},
-       {"crypto/des", "\x10\x15\x1f-+\x9c\x01\x03"},
-       {"crypto/dsa", "D\x04)\x84\x01\r"},
-       {"crypto/ecdh", "\x03\v\f\x10\x04\x16\x04\r\x1c\x84\x01"},
-       {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x10\a\v\x06\x01\x04\f\x01\x1c\x84\x01\r\x05K\x01"},
-       {"crypto/ed25519", "\x0e\x1e\x11\a\n\a\x1c\x84\x01C"},
-       {"crypto/elliptic", "2?\x84\x01\r9"},
+       {"context", "s\\p\x01\r"},
+       {"crypto", "\x89\x01pC"},
+       {"crypto/aes", "\x10\n\t\x99\x02"},
+       {"crypto/cipher", "\x03 \x01\x01 \x12\x1c,Z"},
+       {"crypto/des", "\x10\x15 .,\x9d\x01\x03"},
+       {"crypto/dsa", "E\x04*\x86\x01\r"},
+       {"crypto/ecdh", "\x03\v\f\x10\x04\x17\x04\x0e\x1c\x86\x01"},
+       {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x10\b\v\x06\x01\x04\r\x01\x1c\x86\x01\r\x05K\x01"},
+       {"crypto/ed25519", "\x0e\x1e\x12\a\v\a\x1c\x86\x01C"},
+       {"crypto/elliptic", "3@\x86\x01\r9"},
        {"crypto/fips140", "\"\x05"},
-       {"crypto/hkdf", "/\x14\x01-\x15"},
-       {"crypto/hmac", "\x1a\x16\x13\x01\x111"},
-       {"crypto/internal/boring", "\x0e\x02\ri"},
-       {"crypto/internal/boring/bbig", "\x1a\xe8\x01M"},
-       {"crypto/internal/boring/bcache", "\xbc\x02\x13"},
+       {"crypto/hkdf", "/\x15\x01.\x16"},
+       {"crypto/hmac", "\x1a\x16\x14\x01\x122"},
+       {"crypto/internal/boring", "\x0e\x02\rl"},
+       {"crypto/internal/boring/bbig", "\x1a\xec\x01M"},
+       {"crypto/internal/boring/bcache", "\xc0\x02\x13"},
        {"crypto/internal/boring/sig", ""},
        {"crypto/internal/constanttime", ""},
-       {"crypto/internal/cryptotest", "\x03\r\n\b%\x0e\x19\x06\x12\x12 \x04\x06\t\x18\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
-       {"crypto/internal/entropy", "I"},
-       {"crypto/internal/entropy/v1.0.0", "B/\x93\x018\x13"},
-       {"crypto/internal/fips140", "A0\xbd\x01\v\x16"},
-       {"crypto/internal/fips140/aes", "\x03\x1f\x03\x02\x13\x05\x01\x01\x06*\x93\x014"},
-       {"crypto/internal/fips140/aes/gcm", "\"\x01\x02\x02\x02\x11\x05\x01\a*\x90\x01"},
-       {"crypto/internal/fips140/alias", "\xcf\x02"},
-       {"crypto/internal/fips140/bigmod", "'\x18\x01\a*\x93\x01"},
-       {"crypto/internal/fips140/check", "\"\x0e\x06\t\x02\xb4\x01Z"},
-       {"crypto/internal/fips140/check/checktest", "'\x87\x02!"},
-       {"crypto/internal/fips140/drbg", "\x03\x1e\x01\x01\x04\x13\x05\t\x01(\x84\x01\x0f7\x01"},
-       {"crypto/internal/fips140/ecdh", "\x03\x1f\x05\x02\t\r2\x84\x01\x0f7"},
-       {"crypto/internal/fips140/ecdsa", "\x03\x1f\x04\x01\x02\a\x02\x069\x15oF"},
-       {"crypto/internal/fips140/ed25519", "\x03\x1f\x05\x02\x04\v9\xc7\x01\x03"},
-       {"crypto/internal/fips140/edwards25519", "\x1e\t\a\x112\x93\x017"},
-       {"crypto/internal/fips140/edwards25519/field", "'\x13\x052\x93\x01"},
-       {"crypto/internal/fips140/hkdf", "\x03\x1f\x05\t\x06;\x15"},
-       {"crypto/internal/fips140/hmac", "\x03\x1f\x14\x01\x019\x15"},
-       {"crypto/internal/fips140/mlkem", "\x03\x1f\x05\x02\x0e\x03\x052\xca\x01"},
-       {"crypto/internal/fips140/nistec", "\x1e\t\f\f2\x93\x01*\r\x14"},
-       {"crypto/internal/fips140/nistec/fiat", "'\x137\x93\x01"},
-       {"crypto/internal/fips140/pbkdf2", "\x03\x1f\x05\t\x06;\x15"},
-       {"crypto/internal/fips140/rsa", "\x03\x1b\x04\x04\x01\x02\r\x01\x01\x027\x15oF"},
-       {"crypto/internal/fips140/sha256", "\x03\x1f\x1d\x01\a*\x15~"},
-       {"crypto/internal/fips140/sha3", "\x03\x1f\x18\x05\x011\x93\x01K"},
-       {"crypto/internal/fips140/sha512", "\x03\x1f\x1d\x01\a*\x15~"},
-       {"crypto/internal/fips140/ssh", "'_"},
-       {"crypto/internal/fips140/subtle", "\x1e\a\x1a\xc5\x01"},
-       {"crypto/internal/fips140/tls12", "\x03\x1f\x05\t\x06\x029\x15"},
-       {"crypto/internal/fips140/tls13", "\x03\x1f\x05\b\a\t2\x15"},
-       {"crypto/internal/fips140cache", "\xae\x02\r&"},
+       {"crypto/internal/cryptotest", "\x03\r\n\b&\x0f\x19\x06\x13\x12 \x04\x06\t\x19\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
+       {"crypto/internal/entropy", "J"},
+       {"crypto/internal/entropy/v1.0.0", "C0\x95\x018\x13"},
+       {"crypto/internal/fips140", "B1\xbf\x01\v\x16"},
+       {"crypto/internal/fips140/aes", "\x03\x1f\x03\x02\x14\x05\x01\x01\x06+\x95\x014"},
+       {"crypto/internal/fips140/aes/gcm", "\"\x01\x02\x02\x02\x12\x05\x01\a+\x92\x01"},
+       {"crypto/internal/fips140/alias", "\xd3\x02"},
+       {"crypto/internal/fips140/bigmod", "'\x19\x01\a+\x95\x01"},
+       {"crypto/internal/fips140/check", "\"\x0e\a\t\x02\xb7\x01Z"},
+       {"crypto/internal/fips140/check/checktest", "'\x8b\x02!"},
+       {"crypto/internal/fips140/drbg", "\x03\x1e\x01\x01\x04\x14\x05\t\x01)\x86\x01\x0f7\x01"},
+       {"crypto/internal/fips140/ecdh", "\x03\x1f\x05\x02\n\r3\x86\x01\x0f7"},
+       {"crypto/internal/fips140/ecdsa", "\x03\x1f\x04\x01\x02\a\x03\x06:\x16pF"},
+       {"crypto/internal/fips140/ed25519", "\x03\x1f\x05\x02\x04\f:\xc9\x01\x03"},
+       {"crypto/internal/fips140/edwards25519", "\x1e\t\a\x123\x95\x017"},
+       {"crypto/internal/fips140/edwards25519/field", "'\x14\x053\x95\x01"},
+       {"crypto/internal/fips140/hkdf", "\x03\x1f\x05\t\a<\x16"},
+       {"crypto/internal/fips140/hmac", "\x03\x1f\x15\x01\x01:\x16"},
+       {"crypto/internal/fips140/mldsa", "\x03\x1b\x04\x05\x02\x0e\x01\x03\x053\x95\x017"},
+       {"crypto/internal/fips140/mlkem", "\x03\x1f\x05\x02\x0f\x03\x053\xcc\x01"},
+       {"crypto/internal/fips140/nistec", "\x1e\t\r\f3\x95\x01*\r\x14"},
+       {"crypto/internal/fips140/nistec/fiat", "'\x148\x95\x01"},
+       {"crypto/internal/fips140/pbkdf2", "\x03\x1f\x05\t\a<\x16"},
+       {"crypto/internal/fips140/rsa", "\x03\x1b\x04\x04\x01\x02\x0e\x01\x01\x028\x16pF"},
+       {"crypto/internal/fips140/sha256", "\x03\x1f\x1e\x01\a+\x16\x7f"},
+       {"crypto/internal/fips140/sha3", "\x03\x1f\x19\x05\x012\x95\x01K"},
+       {"crypto/internal/fips140/sha512", "\x03\x1f\x1e\x01\a+\x16\x7f"},
+       {"crypto/internal/fips140/ssh", "'b"},
+       {"crypto/internal/fips140/subtle", "\x1e\a\x1b\xc8\x01"},
+       {"crypto/internal/fips140/tls12", "\x03\x1f\x05\t\a\x02:\x16"},
+       {"crypto/internal/fips140/tls13", "\x03\x1f\x05\b\b\t3\x16"},
+       {"crypto/internal/fips140cache", "\xb2\x02\r&"},
        {"crypto/internal/fips140deps", ""},
-       {"crypto/internal/fips140deps/byteorder", "\x9c\x01"},
-       {"crypto/internal/fips140deps/cpu", "\xb1\x01\a"},
-       {"crypto/internal/fips140deps/godebug", "\xb9\x01"},
-       {"crypto/internal/fips140deps/time", "\xc9\x02"},
-       {"crypto/internal/fips140hash", "7\x1c3\xc9\x01"},
-       {"crypto/internal/fips140only", ")\r\x01\x01N3<"},
+       {"crypto/internal/fips140deps/byteorder", "\x9f\x01"},
+       {"crypto/internal/fips140deps/cpu", "\xb4\x01\a"},
+       {"crypto/internal/fips140deps/godebug", "\xbc\x01"},
+       {"crypto/internal/fips140deps/time", "\xcd\x02"},
+       {"crypto/internal/fips140hash", "8\x1d4\xca\x01"},
+       {"crypto/internal/fips140only", ")\x0e\x01\x01P3="},
        {"crypto/internal/fips140test", ""},
-       {"crypto/internal/hpke", "\x0e\x01\x01\x03\x056#+hM"},
-       {"crypto/internal/impl", "\xb9\x02"},
-       {"crypto/internal/randutil", "\xf5\x01\x12"},
-       {"crypto/internal/sysrand", "qo! \r\r\x01\x01\f\x06"},
-       {"crypto/internal/sysrand/internal/seccomp", "q"},
-       {"crypto/md5", "\x0e6-\x15\x16h"},
-       {"crypto/mlkem", "1"},
-       {"crypto/pbkdf2", "4\x0f\x01-\x15"},
-       {"crypto/rand", "\x1a\b\a\x1b\x04\x01(\x84\x01\rM"},
-       {"crypto/rc4", "%\x1f-\xc7\x01"},
-       {"crypto/rsa", "\x0e\f\x01\v\x0f\x0e\x01\x04\x06\a\x1c\x03\x123<\f\x01"},
-       {"crypto/sha1", "\x0e\f*\x03*\x15\x16\x15S"},
-       {"crypto/sha256", "\x0e\f\x1cP"},
-       {"crypto/sha3", "\x0e)O\xc9\x01"},
-       {"crypto/sha512", "\x0e\f\x1eN"},
-       {"crypto/subtle", "\x1e\x1c\x9c\x01X"},
-       {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\t\x01\r\n\x01\n\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x12\x16\x15\b<\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
-       {"crypto/tls/internal/fips140tls", "\x17\xa5\x02"},
-       {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x015\x05\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x039\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\x02\x05\b\x02\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
-       {"crypto/x509/pkix", "g\x06\a\x8e\x01G"},
-       {"database/sql", "\x03\nN\x16\x03\x81\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
-       {"database/sql/driver", "\rd\x03\xb5\x01\x0f\x11"},
-       {"debug/buildinfo", "\x03[\x02\x01\x01\b\a\x03e\x1a\x02\x01+\x0f\x1f"},
-       {"debug/dwarf", "\x03g\a\x03\x81\x011\x11\x01\x01"},
-       {"debug/elf", "\x03\x06T\r\a\x03e\x1b\x01\f \x17\x01\x16"},
-       {"debug/gosym", "\x03g\n\xc3\x01\x01\x01\x02"},
-       {"debug/macho", "\x03\x06T\r\ne\x1c,\x17\x01"},
-       {"debug/pe", "\x03\x06T\r\a\x03e\x1c,\x17\x01\x16"},
-       {"debug/plan9obj", "j\a\x03e\x1c,"},
-       {"embed", "q*A\x19\x01S"},
+       {"crypto/internal/hpke", "\x03\v\x01\x01\x03\x055\x03\x04\x01\x01\x16\a\x03\x13\xcc\x01"},
+       {"crypto/internal/impl", "\xbd\x02"},
+       {"crypto/internal/randutil", "\xf9\x01\x12"},
+       {"crypto/internal/sysrand", "sq! \r\r\x01\x01\f\x06"},
+       {"crypto/internal/sysrand/internal/seccomp", "s"},
+       {"crypto/md5", "\x0e7.\x16\x16i"},
+       {"crypto/mlkem", "\x0e$"},
+       {"crypto/mlkem/mlkemtest", "2\x1b&"},
+       {"crypto/pbkdf2", "5\x0f\x01.\x16"},
+       {"crypto/rand", "\x1a\b\a\x1c\x04\x01)\x86\x01\rM"},
+       {"crypto/rc4", "% .\xc9\x01"},
+       {"crypto/rsa", "\x0e\f\x01\v\x10\x0e\x01\x04\a\a\x1c\x03\x133=\f\x01"},
+       {"crypto/sha1", "\x0e\f+\x03+\x16\x16\x15T"},
+       {"crypto/sha256", "\x0e\f\x1dR"},
+       {"crypto/sha3", "\x0e*Q\xca\x01"},
+       {"crypto/sha512", "\x0e\f\x1fP"},
+       {"crypto/subtle", "\x1e\x1d\x9f\x01X"},
+       {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\t\x01\x0e\n\x01\n\x05\x04\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x15\b=\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
+       {"crypto/tls/internal/fips140tls", "\x17\xa9\x02"},
+       {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x016\x06\x01\x01\x02\x05\x0e\x06\x02\x02\x03F\x03:\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\a\b\x02\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
+       {"crypto/x509/pkix", "i\x06\a\x90\x01G"},
+       {"database/sql", "\x03\nP\x16\x03\x83\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
+       {"database/sql/driver", "\rf\x03\xb7\x01\x0f\x11"},
+       {"debug/buildinfo", "\x03]\x02\x01\x01\b\a\x03g\x1a\x02\x01+\x0f\x1f"},
+       {"debug/dwarf", "\x03i\a\x03\x83\x011\x11\x01\x01"},
+       {"debug/elf", "\x03\x06V\r\a\x03g\x1b\x01\f \x17\x01\x16"},
+       {"debug/gosym", "\x03i\n\xc5\x01\x01\x01\x02"},
+       {"debug/macho", "\x03\x06V\r\ng\x1c,\x17\x01"},
+       {"debug/pe", "\x03\x06V\r\a\x03g\x1c,\x17\x01\x16"},
+       {"debug/plan9obj", "l\a\x03g\x1c,"},
+       {"embed", "s+B\x19\x01S"},
        {"embed/internal/embedtest", ""},
        {"encoding", ""},
-       {"encoding/ascii85", "\xf5\x01C"},
-       {"encoding/asn1", "\x03n\x03e(\x01'\r\x02\x01\x10\x03\x01"},
-       {"encoding/base32", "\xf5\x01A\x02"},
-       {"encoding/base64", "\x9c\x01YA\x02"},
-       {"encoding/binary", "q\x84\x01\f(\r\x05"},
-       {"encoding/csv", "\x02\x01n\x03\x81\x01D\x12\x02"},
-       {"encoding/gob", "\x02c\x05\a\x03e\x1c\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
-       {"encoding/hex", "q\x03\x81\x01A\x03"},
-       {"encoding/json", "\x03\x01a\x04\b\x03\x81\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
-       {"encoding/pem", "\x03f\b\x84\x01A\x03"},
-       {"encoding/xml", "\x02\x01b\f\x03\x81\x014\x05\n\x01\x02\x10\x02"},
-       {"errors", "\xcc\x01\x83\x01"},
-       {"expvar", "nK@\b\v\x15\r\b\x02\x03\x01\x11"},
-       {"flag", "e\f\x03\x81\x01,\b\x05\b\x02\x01\x10"},
-       {"fmt", "qE&\x19\f \b\r\x02\x03\x12"},
-       {"go/ast", "\x03\x01p\x0e\x01r\x03)\b\r\x02\x01\x12\x02"},
-       {"go/build", "\x02\x01n\x03\x01\x02\x02\a\x02\x01\x17\x1f\x04\x02\b\x1b\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
-       {"go/build/constraint", "q\xc7\x01\x01\x12\x02"},
-       {"go/constant", "t\x0f~\x01\x024\x01\x02\x12"},
-       {"go/doc", "\x04p\x01\x05\t=51\x10\x02\x01\x12\x02"},
-       {"go/doc/comment", "\x03q\xc2\x01\x01\x01\x01\x12\x02"},
-       {"go/format", "\x03q\x01\v\x01\x02rD"},
-       {"go/importer", "v\a\x01\x01\x04\x01q9"},
-       {"go/internal/gccgoimporter", "\x02\x01[\x13\x03\x04\v\x01o\x02,\x01\x05\x11\x01\f\b"},
-       {"go/internal/gcimporter", "\x02r\x0f\x010\x05\r/,\x15\x03\x02"},
-       {"go/internal/srcimporter", "t\x01\x01\n\x03\x01q,\x01\x05\x12\x02\x14"},
-       {"go/parser", "\x03n\x03\x01\x02\v\x01r\x01+\x06\x12"},
-       {"go/printer", "t\x01\x02\x03\tr\f \x15\x02\x01\x02\v\x05\x02"},
-       {"go/scanner", "\x03q\x0fr2\x10\x01\x13\x02"},
-       {"go/token", "\x04p\x84\x01>\x02\x03\x01\x0f\x02"},
-       {"go/types", "\x03\x01\x06g\x03\x01\x03\b\x03\x024\x062\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
-       {"go/version", "\xbe\x01{"},
-       {"hash", "\xf5\x01"},
-       {"hash/adler32", "q\x15\x16"},
-       {"hash/crc32", "q\x15\x16\x15\x8a\x01\x01\x13"},
-       {"hash/crc64", "q\x15\x16\x9f\x01"},
-       {"hash/fnv", "q\x15\x16h"},
-       {"hash/maphash", "\x86\x01\x11<|"},
-       {"html", "\xb9\x02\x02\x12"},
-       {"html/template", "\x03k\x06\x18-<\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
-       {"image", "\x02o\x1ef\x0f4\x03\x01"},
+       {"encoding/ascii85", "\xf9\x01C"},
+       {"encoding/asn1", "\x03p\x03g(\x01'\r\x02\x01\x10\x03\x01"},
+       {"encoding/base32", "\xf9\x01A\x02"},
+       {"encoding/base64", "\x9f\x01ZA\x02"},
+       {"encoding/binary", "s\x86\x01\f(\r\x05"},
+       {"encoding/csv", "\x02\x01p\x03\x83\x01D\x12\x02"},
+       {"encoding/gob", "\x02e\x05\a\x03g\x1c\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
+       {"encoding/hex", "s\x03\x83\x01A\x03"},
+       {"encoding/json", "\x03\x01c\x04\b\x03\x83\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
+       {"encoding/pem", "\x03h\b\x86\x01A\x03"},
+       {"encoding/xml", "\x02\x01d\f\x03\x83\x014\x05\n\x01\x02\x10\x02"},
+       {"errors", "\xcf\x01\x84\x01"},
+       {"expvar", "pLA\b\v\x15\r\b\x02\x03\x01\x11"},
+       {"flag", "g\f\x03\x83\x01,\b\x05\b\x02\x01\x10"},
+       {"fmt", "sF'\x19\f \b\r\x02\x03\x12"},
+       {"go/ast", "\x03\x01r\x0f\x01s\x03)\b\r\x02\x01\x12\x02"},
+       {"go/build", "\x02\x01p\x03\x01\x02\x02\b\x02\x01\x17\x1f\x04\x02\b\x1c\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
+       {"go/build/constraint", "s\xc9\x01\x01\x12\x02"},
+       {"go/constant", "v\x10\x7f\x01\x024\x01\x02\x12"},
+       {"go/doc", "\x04r\x01\x05\n=61\x10\x02\x01\x12\x02"},
+       {"go/doc/comment", "\x03s\xc4\x01\x01\x01\x01\x12\x02"},
+       {"go/format", "\x03s\x01\f\x01\x02sD"},
+       {"go/importer", "x\a\x01\x02\x04\x01r9"},
+       {"go/internal/gccgoimporter", "\x02\x01]\x13\x03\x04\f\x01p\x02,\x01\x05\x11\x01\f\b"},
+       {"go/internal/gcimporter", "\x02t\x10\x010\x05\r0,\x15\x03\x02"},
+       {"go/internal/scannerhooks", "\x86\x01"},
+       {"go/internal/srcimporter", "v\x01\x01\v\x03\x01r,\x01\x05\x12\x02\x14"},
+       {"go/parser", "\x03p\x03\x01\x02\b\x04\x01s\x01+\x06\x12"},
+       {"go/printer", "v\x01\x02\x03\ns\f \x15\x02\x01\x02\v\x05\x02"},
+       {"go/scanner", "\x03s\v\x05s2\x10\x01\x13\x02"},
+       {"go/token", "\x04r\x86\x01>\x02\x03\x01\x0f\x02"},
+       {"go/types", "\x03\x01\x06i\x03\x01\x03\t\x03\x024\x063\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
+       {"go/version", "\xc1\x01|"},
+       {"hash", "\xf9\x01"},
+       {"hash/adler32", "s\x16\x16"},
+       {"hash/crc32", "s\x16\x16\x15\x8b\x01\x01\x13"},
+       {"hash/crc64", "s\x16\x16\xa0\x01"},
+       {"hash/fnv", "s\x16\x16i"},
+       {"hash/maphash", "\x89\x01\x11<}"},
+       {"html", "\xbd\x02\x02\x12"},
+       {"html/template", "\x03m\x06\x19-=\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
+       {"image", "\x02q\x1fg\x0f4\x03\x01"},
        {"image/color", ""},
-       {"image/color/palette", "\x8f\x01"},
-       {"image/draw", "\x8e\x01\x01\x04"},
-       {"image/gif", "\x02\x01\x05i\x03\x1a\x01\x01\x01\vY"},
-       {"image/internal/imageutil", "\x8e\x01"},
-       {"image/jpeg", "\x02o\x1d\x01\x04b"},
-       {"image/png", "\x02\aa\n\x12\x02\x06\x01fC"},
-       {"index/suffixarray", "\x03g\a\x84\x01\f+\n\x01"},
-       {"internal/abi", "\xb8\x01\x97\x01"},
-       {"internal/asan", "\xcf\x02"},
-       {"internal/bisect", "\xae\x02\r\x01"},
-       {"internal/buildcfg", "tGf\x06\x02\x05\n\x01"},
-       {"internal/bytealg", "\xb1\x01\x9e\x01"},
+       {"image/color/palette", "\x92\x01"},
+       {"image/draw", "\x91\x01\x01\x04"},
+       {"image/gif", "\x02\x01\x05k\x03\x1b\x01\x01\x01\vZ\x0f"},
+       {"image/internal/imageutil", "\x91\x01"},
+       {"image/jpeg", "\x02q\x1e\x01\x04c"},
+       {"image/png", "\x02\ac\n\x13\x02\x06\x01gC"},
+       {"index/suffixarray", "\x03i\a\x86\x01\f+\n\x01"},
+       {"internal/abi", "\xbb\x01\x98\x01"},
+       {"internal/asan", "\xd3\x02"},
+       {"internal/bisect", "\xb2\x02\r\x01"},
+       {"internal/buildcfg", "vHg\x06\x02\x05\n\x01"},
+       {"internal/bytealg", "\xb4\x01\x9f\x01"},
        {"internal/byteorder", ""},
        {"internal/cfg", ""},
-       {"internal/cgrouptest", "tZS\x06\x0f\x02\x01\x04\x01"},
-       {"internal/chacha8rand", "\x9c\x01\x15\a\x97\x01"},
+       {"internal/cgrouptest", "v[T\x06\x0f\x02\x01\x04\x01"},
+       {"internal/chacha8rand", "\x9f\x01\x15\a\x98\x01"},
        {"internal/copyright", ""},
        {"internal/coverage", ""},
        {"internal/coverage/calloc", ""},
-       {"internal/coverage/cfile", "n\x06\x16\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01\"\x02&,\x06\a\n\x01\x03\r\x06"},
-       {"internal/coverage/cformat", "\x04p-\x04P\v6\x01\x02\r"},
-       {"internal/coverage/cmerge", "t-`"},
-       {"internal/coverage/decodecounter", "j\n-\v\x02G,\x17\x17"},
-       {"internal/coverage/decodemeta", "\x02h\n\x16\x17\v\x02G,"},
-       {"internal/coverage/encodecounter", "\x02h\n-\f\x01\x02E\v!\x15"},
-       {"internal/coverage/encodemeta", "\x02\x01g\n\x12\x04\x17\r\x02E,."},
-       {"internal/coverage/pods", "\x04p-\x80\x01\x06\x05\n\x02\x01"},
-       {"internal/coverage/rtcov", "\xcf\x02"},
-       {"internal/coverage/slicereader", "j\n\x81\x01Z"},
-       {"internal/coverage/slicewriter", "t\x81\x01"},
-       {"internal/coverage/stringtab", "t8\x04E"},
+       {"internal/coverage/cfile", "p\x06\x17\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01\"\x02',\x06\a\n\x01\x03\r\x06"},
+       {"internal/coverage/cformat", "\x04r.\x04Q\v6\x01\x02\r"},
+       {"internal/coverage/cmerge", "v.a"},
+       {"internal/coverage/decodecounter", "l\n.\v\x02H,\x17\x17"},
+       {"internal/coverage/decodemeta", "\x02j\n\x17\x17\v\x02H,"},
+       {"internal/coverage/encodecounter", "\x02j\n.\f\x01\x02F\v!\x15"},
+       {"internal/coverage/encodemeta", "\x02\x01i\n\x13\x04\x17\r\x02F,."},
+       {"internal/coverage/pods", "\x04r.\x81\x01\x06\x05\n\x02\x01"},
+       {"internal/coverage/rtcov", "\xd3\x02"},
+       {"internal/coverage/slicereader", "l\n\x83\x01Z"},
+       {"internal/coverage/slicewriter", "v\x83\x01"},
+       {"internal/coverage/stringtab", "v9\x04F"},
        {"internal/coverage/test", ""},
        {"internal/coverage/uleb128", ""},
-       {"internal/cpu", "\xcf\x02"},
-       {"internal/dag", "\x04p\xc2\x01\x03"},
-       {"internal/diff", "\x03q\xc3\x01\x02"},
-       {"internal/exportdata", "\x02\x01n\x03\x02c\x1c,\x01\x05\x11\x01\x02"},
-       {"internal/filepathlite", "q*A\x1a@"},
-       {"internal/fmtsort", "\x04\xa5\x02\r"},
-       {"internal/fuzz", "\x03\nE\x18\x04\x03\x03\x01\v\x036<\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
+       {"internal/cpu", "\xd3\x02"},
+       {"internal/dag", "\x04r\xc4\x01\x03"},
+       {"internal/diff", "\x03s\xc5\x01\x02"},
+       {"internal/exportdata", "\x02\x01p\x03\x02e\x1c,\x01\x05\x11\x01\x02"},
+       {"internal/filepathlite", "s+B\x1a@"},
+       {"internal/fmtsort", "\x04\xa9\x02\r"},
+       {"internal/fuzz", "\x03\nG\x18\x04\x03\x03\x01\f\x036=\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
        {"internal/goarch", ""},
-       {"internal/godebug", "\x99\x01!\x81\x01\x01\x13"},
+       {"internal/godebug", "\x9c\x01!\x82\x01\x01\x13"},
        {"internal/godebugs", ""},
        {"internal/goexperiment", ""},
        {"internal/goos", ""},
-       {"internal/goroot", "\xa1\x02\x01\x05\x12\x02"},
+       {"internal/goroot", "\xa5\x02\x01\x05\x12\x02"},
        {"internal/gover", "\x04"},
        {"internal/goversion", ""},
-       {"internal/lazyregexp", "\xa1\x02\v\r\x02"},
-       {"internal/lazytemplate", "\xf5\x01,\x18\x02\f"},
-       {"internal/msan", "\xcf\x02"},
+       {"internal/lazyregexp", "\xa5\x02\v\r\x02"},
+       {"internal/lazytemplate", "\xf9\x01,\x18\x02\f"},
+       {"internal/msan", "\xd3\x02"},
        {"internal/nettrace", ""},
-       {"internal/obscuretestdata", "i\x8c\x01,"},
-       {"internal/oserror", "q"},
-       {"internal/pkgbits", "\x03O\x18\a\x03\x04\vr\r\x1f\r\n\x01"},
+       {"internal/obscuretestdata", "k\x8e\x01,"},
+       {"internal/oserror", "s"},
+       {"internal/pkgbits", "\x03Q\x18\a\x03\x04\fs\r\x1f\r\n\x01"},
        {"internal/platform", ""},
-       {"internal/poll", "qj\x05\x159\r\x01\x01\f\x06"},
-       {"internal/profile", "\x03\x04j\x03\x81\x017\n\x01\x01\x01\x10"},
+       {"internal/poll", "sl\x05\x159\r\x01\x01\f\x06"},
+       {"internal/profile", "\x03\x04l\x03\x83\x017\n\x01\x01\x01\x10"},
        {"internal/profilerecord", ""},
-       {"internal/race", "\x97\x01\xb8\x01"},
-       {"internal/reflectlite", "\x97\x01!:<!"},
-       {"internal/runtime/atomic", "\xb8\x01\x97\x01"},
-       {"internal/runtime/cgroup", "\x9b\x01<\x04t"},
-       {"internal/runtime/exithook", "\xcd\x01\x82\x01"},
-       {"internal/runtime/gc", "\xb8\x01"},
-       {"internal/runtime/gc/internal/gen", "\n`\n\x17j\x04\v\x1d\b\x10\x02"},
-       {"internal/runtime/gc/scan", "\xb1\x01\a\x18\x06y"},
-       {"internal/runtime/maps", "\x97\x01\x01 \n\t\t\x02y"},
-       {"internal/runtime/math", "\xb8\x01"},
+       {"internal/race", "\x9a\x01\xb9\x01"},
+       {"internal/reflectlite", "\x9a\x01!;<!"},
+       {"internal/runtime/atomic", "\xbb\x01\x98\x01"},
+       {"internal/runtime/cgroup", "\x9e\x01=\x04t"},
+       {"internal/runtime/exithook", "\xd0\x01\x83\x01"},
+       {"internal/runtime/gc", "\xbb\x01"},
+       {"internal/runtime/gc/internal/gen", "\nb\n\x18k\x04\v\x1d\b\x10\x02"},
+       {"internal/runtime/gc/scan", "\xb4\x01\a\x18\ay"},
+       {"internal/runtime/maps", "\x9a\x01\x01 \n\t\t\x03y"},
+       {"internal/runtime/math", "\xbb\x01"},
+       {"internal/runtime/pprof/label", ""},
        {"internal/runtime/startlinetest", ""},
-       {"internal/runtime/sys", "\xb8\x01\x04"},
-       {"internal/runtime/syscall/linux", "\xb8\x01\x97\x01"},
+       {"internal/runtime/sys", "\xbb\x01\x04"},
+       {"internal/runtime/syscall/linux", "\xbb\x01\x98\x01"},
        {"internal/runtime/wasitest", ""},
-       {"internal/saferio", "\xf5\x01Z"},
-       {"internal/singleflight", "\xbb\x02"},
-       {"internal/strconv", "\x84\x02K"},
-       {"internal/stringslite", "\x9b\x01\xb4\x01"},
-       {"internal/sync", "\x97\x01!\x13q\x13"},
-       {"internal/synctest", "\x97\x01\xb8\x01"},
-       {"internal/syscall/execenv", "\xbd\x02"},
-       {"internal/syscall/unix", "\xae\x02\x0e\x01\x12"},
-       {"internal/sysinfo", "\x02\x01\xae\x01D,\x18\x02"},
+       {"internal/saferio", "\xf9\x01Z"},
+       {"internal/singleflight", "\xbf\x02"},
+       {"internal/strconv", "\x88\x02K"},
+       {"internal/stringslite", "\x9e\x01\xb5\x01"},
+       {"internal/sync", "\x9a\x01!\x13r\x13"},
+       {"internal/synctest", "\x9a\x01\xb9\x01"},
+       {"internal/syscall/execenv", "\xc1\x02"},
+       {"internal/syscall/unix", "\xb2\x02\x0e\x01\x12"},
+       {"internal/sysinfo", "\x02\x01\xb1\x01E,\x18\x02"},
        {"internal/syslist", ""},
-       {"internal/testenv", "\x03\nd\x02\x01)\x1b\x0f/+\x01\x05\a\n\x01\x02\x02\x01\v"},
-       {"internal/testhash", "\x03\x83\x01o\x118\v"},
-       {"internal/testlog", "\xbb\x02\x01\x13"},
-       {"internal/testpty", "q\x03\xad\x01"},
-       {"internal/trace", "\x02\x01\x01\x06`\a\x03u\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
-       {"internal/trace/internal/testgen", "\x03g\ns\x03\x02\x03\x011\v\r\x10"},
-       {"internal/trace/internal/tracev1", "\x03\x01f\a\x03{\x06\f5\x01"},
-       {"internal/trace/raw", "\x02h\nx\x03\x06C\x01\x12"},
-       {"internal/trace/testtrace", "\x02\x01n\x03o\x04\x03\x05\x01\x05,\v\x02\b\x02\x01\x05"},
+       {"internal/testenv", "\x03\nf\x02\x01*\x1b\x0f0+\x01\x05\a\n\x01\x02\x02\x01\v"},
+       {"internal/testhash", "\x03\x86\x01p\x118\v"},
+       {"internal/testlog", "\xbf\x02\x01\x13"},
+       {"internal/testpty", "s\x03\xaf\x01"},
+       {"internal/trace", "\x02\x01\x01\x06b\a\x03w\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
+       {"internal/trace/internal/testgen", "\x03i\nu\x03\x02\x03\x011\v\r\x10"},
+       {"internal/trace/internal/tracev1", "\x03\x01h\a\x03}\x06\f5\x01"},
+       {"internal/trace/raw", "\x02j\nz\x03\x06C\x01\x12"},
+       {"internal/trace/testtrace", "\x02\x01p\x03q\x04\x03\x05\x01\x05,\v\x02\b\x02\x01\x05"},
        {"internal/trace/tracev2", ""},
-       {"internal/trace/traceviewer", "\x02a\v\x06\x19<\x1e\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
+       {"internal/trace/traceviewer", "\x02c\v\x06\x1a<\x1f\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
        {"internal/trace/traceviewer/format", ""},
-       {"internal/trace/version", "tx\t"},
-       {"internal/txtar", "\x03q\xad\x01\x18"},
-       {"internal/types/errors", "\xb8\x02"},
-       {"internal/unsafeheader", "\xcf\x02"},
-       {"internal/xcoff", "]\r\a\x03e\x1c,\x17\x01"},
-       {"internal/zstd", "j\a\x03\x81\x01\x0f"},
-       {"io", "q\xca\x01"},
-       {"io/fs", "q**01\x10\x13\x04"},
-       {"io/ioutil", "\xf5\x01\x01+\x15\x03"},
-       {"iter", "\xcb\x01c!"},
-       {"log", "t\x81\x01\x05'\r\r\x01\r"},
+       {"internal/trace/version", "vz\t"},
+       {"internal/txtar", "\x03s\xaf\x01\x18"},
+       {"internal/types/errors", "\xbc\x02"},
+       {"internal/unsafeheader", "\xd3\x02"},
+       {"internal/xcoff", "_\r\a\x03g\x1c,\x17\x01"},
+       {"internal/zstd", "l\a\x03\x83\x01\x0f"},
+       {"io", "s\xcc\x01"},
+       {"io/fs", "s+*11\x10\x13\x04"},
+       {"io/ioutil", "\xf9\x01\x01+\x15\x03"},
+       {"iter", "\xce\x01d!"},
+       {"log", "v\x83\x01\x05'\r\r\x01\r"},
        {"log/internal", ""},
-       {"log/slog", "\x03\nX\t\x03\x03\x81\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
+       {"log/slog", "\x03\nZ\t\x03\x03\x83\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
        {"log/slog/internal", ""},
-       {"log/slog/internal/benchmarks", "\rd\x03\x81\x01\x06\x03:\x11"},
-       {"log/slog/internal/buffer", "\xbb\x02"},
-       {"log/syslog", "q\x03\x85\x01\x12\x16\x18\x02\x0e"},
-       {"maps", "\xf8\x01W"},
-       {"math", "\xb1\x01SK"},
-       {"math/big", "\x03n\x03(\x15D\f\x03\x020\x02\x01\x02\x14"},
-       {"math/big/internal/asmgen", "\x03\x01p\x90\x012\x03"},
-       {"math/bits", "\xcf\x02"},
-       {"math/cmplx", "\x81\x02\x03"},
-       {"math/rand", "\xb9\x01H:\x01\x13"},
-       {"math/rand/v2", "q+\x03b\x03K"},
-       {"mime", "\x02\x01f\b\x03\x81\x01\v!\x15\x03\x02\x10\x02"},
-       {"mime/multipart", "\x02\x01K#\x03E<\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
-       {"mime/quotedprintable", "\x02\x01q\x81\x01"},
-       {"net", "\x04\td*\x1e\n\x05\x12\x01\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
-       {"net/http", "\x02\x01\x03\x01\x04\x02A\b\x13\x01\a\x03E<\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
-       {"net/http/cgi", "\x02T\x1b\x03\x81\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
-       {"net/http/cookiejar", "\x04m\x03\x97\x01\x01\b\f\x16\x03\x02\x0e\x04"},
-       {"net/http/fcgi", "\x02\x01\n]\a\x03\x81\x01\x16\x01\x01\x14\x18\x02\x0e"},
-       {"net/http/httptest", "\x02\x01\nI\x02\x1b\x01\x81\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
-       {"net/http/httptrace", "\rImH\x14\n "},
-       {"net/http/httputil", "\x02\x01\nd\x03\x81\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x0e\x0e"},
-       {"net/http/internal", "\x02\x01n\x03\x81\x01"},
-       {"net/http/internal/ascii", "\xb9\x02\x12"},
-       {"net/http/internal/httpcommon", "\rd\x03\x9d\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
-       {"net/http/internal/testcert", "\xb9\x02"},
-       {"net/http/pprof", "\x02\x01\ng\x18-\x02\x0e,\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
+       {"log/slog/internal/benchmarks", "\rf\x03\x83\x01\x06\x03:\x11"},
+       {"log/slog/internal/buffer", "\xbf\x02"},
+       {"log/syslog", "s\x03\x87\x01\x12\x16\x18\x02\x0e"},
+       {"maps", "\xfc\x01W"},
+       {"math", "\xb4\x01TK"},
+       {"math/big", "\x03p\x03)\x15E\f\x03\x020\x02\x01\x02\x14"},
+       {"math/big/internal/asmgen", "\x03\x01r\x92\x012\x03"},
+       {"math/bits", "\xd3\x02"},
+       {"math/cmplx", "\x85\x02\x03"},
+       {"math/rand", "\xbc\x01I:\x01\x13"},
+       {"math/rand/v2", "s,\x03c\x03K"},
+       {"mime", "\x02\x01h\b\x03\x83\x01\v!\x15\x03\x02\x10\x02"},
+       {"mime/multipart", "\x02\x01M#\x03F=\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
+       {"mime/quotedprintable", "\x02\x01s\x83\x01"},
+       {"net", "\x04\tf+\x1e\n\x05\x13\x01\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
+       {"net/http", "\x02\x01\x03\x01\x04\x02C\b\x13\x01\a\x03F=\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
+       {"net/http/cgi", "\x02V\x1b\x03\x83\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
+       {"net/http/cookiejar", "\x04o\x03\x99\x01\x01\b\a\x05\x16\x03\x02\x0e\x04"},
+       {"net/http/fcgi", "\x02\x01\n_\a\x03\x83\x01\x16\x01\x01\x14\x18\x02\x0e"},
+       {"net/http/httptest", "\x02\x01\nK\x02\x1b\x01\x83\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
+       {"net/http/httptrace", "\rKnI\x14\n "},
+       {"net/http/httputil", "\x02\x01\nf\x03\x83\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x01\r\x0e"},
+       {"net/http/internal", "\x02\x01p\x03\x83\x01"},
+       {"net/http/internal/ascii", "\xbd\x02\x12"},
+       {"net/http/internal/httpcommon", "\rf\x03\x9f\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
+       {"net/http/internal/testcert", "\xbd\x02"},
+       {"net/http/pprof", "\x02\x01\ni\x19-\x02\x0e-\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
        {"net/internal/cgotest", ""},
-       {"net/internal/socktest", "t\xc7\x01\x02"},
-       {"net/mail", "\x02o\x03\x81\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
-       {"net/netip", "\x04m*\x01e\x034\x16"},
-       {"net/rpc", "\x02j\x05\x03\x0f\nh\x04\x12\x01\x1d\r\x03\x02"},
-       {"net/rpc/jsonrpc", "n\x03\x03\x81\x01\x16\x11\x1f"},
-       {"net/smtp", "\x192\v\x13\b\x03\x81\x01\x16\x14\x1a"},
-       {"net/textproto", "\x02\x01n\x03\x81\x01\f\n-\x01\x02\x14"},
-       {"net/url", "q\x03\xa7\x01\v\x10\x02\x01\x16"},
-       {"os", "q*\x01\x19\x03\x10\x13\x01\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
-       {"os/exec", "\x03\ndH&\x01\x15\x01+\x06\a\n\x01\x04\f"},
-       {"os/exec/internal/fdtest", "\xbd\x02"},
-       {"os/signal", "\r\x94\x02\x15\x05\x02"},
-       {"os/user", "\x02\x01n\x03\x81\x01,\r\n\x01\x02"},
-       {"path", "q*\xb2\x01"},
-       {"path/filepath", "q*\x1aA+\r\b\x03\x04\x10"},
-       {"plugin", "q"},
-       {"reflect", "q&\x04\x1d\x13\b\x03\x05\x17\x06\t-\n\x03\x10\x02\x02"},
+       {"net/internal/socktest", "v\xc9\x01\x02"},
+       {"net/mail", "\x02q\x03\x83\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
+       {"net/netip", "\x04o+\x01f\x034\x16"},
+       {"net/rpc", "\x02l\x05\x03\x10\ni\x04\x12\x01\x1d\r\x03\x02"},
+       {"net/rpc/jsonrpc", "p\x03\x03\x83\x01\x16\x11\x1f"},
+       {"net/smtp", "\x193\f\x13\b\x03\x83\x01\x16\x14\x1a"},
+       {"net/textproto", "\x02\x01p\x03\x83\x01\f\n-\x01\x02\x14"},
+       {"net/url", "s\x03Fc\v\x10\x02\x01\x16"},
+       {"os", "s+\x01\x19\x03\x10\x14\x01\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
+       {"os/exec", "\x03\nfI'\x01\x15\x01+\x06\a\n\x01\x04\f"},
+       {"os/exec/internal/fdtest", "\xc1\x02"},
+       {"os/signal", "\r\x98\x02\x15\x05\x02"},
+       {"os/user", "\x02\x01p\x03\x83\x01,\r\n\x01\x02"},
+       {"path", "s+\xb3\x01"},
+       {"path/filepath", "s+\x1aB+\r\b\x03\x04\x10"},
+       {"plugin", "s"},
+       {"reflect", "s'\x04\x1d\x13\b\x04\x05\x17\x06\t-\n\x03\x10\x02\x02"},
        {"reflect/internal/example1", ""},
        {"reflect/internal/example2", ""},
-       {"regexp", "\x03\xf2\x018\t\x02\x01\x02\x10\x02"},
-       {"regexp/syntax", "\xb6\x02\x01\x01\x01\x02\x10\x02"},
-       {"runtime", "\x97\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0e\x03\x01\x01\x01\x02\x01\x01\x02\x01\x04\x01\x10c"},
-       {"runtime/coverage", "\xa3\x01R"},
-       {"runtime/debug", "tTY\r\b\x02\x01\x10\x06"},
-       {"runtime/metrics", "\xba\x01G-!"},
-       {"runtime/pprof", "\x02\x01\x01\x03\x06]\a\x03#$\x0f+\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
-       {"runtime/race", "\xb4\x02"},
+       {"regexp", "\x03\xf6\x018\t\x02\x01\x02\x10\x02"},
+       {"regexp/syntax", "\xba\x02\x01\x01\x01\x02\x10\x02"},
+       {"runtime", "\x9a\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0e\x03\x01\x01\x01\x02\x01\x01\x01\x02\x01\x04\x01\x10\x18K"},
+       {"runtime/coverage", "\xa6\x01S"},
+       {"runtime/debug", "vUZ\r\b\x02\x01\x10\x06"},
+       {"runtime/metrics", "\xbd\x01H-!"},
+       {"runtime/pprof", "\x02\x01\x01\x03\x06_\a\x03$$\x0f\v!\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
+       {"runtime/race", "\xb8\x02"},
        {"runtime/race/internal/amd64v1", ""},
-       {"runtime/trace", "\rd\x03x\t9\b\x05\x01\r\x06"},
-       {"slices", "\x04\xf4\x01\fK"},
-       {"sort", "\xcc\x0182"},
-       {"strconv", "q*@\x01q"},
-       {"strings", "q&\x04A\x19\x03\f7\x10\x02\x02"},
+       {"runtime/trace", "\rf\x03z\t9\b\x05\x01\r\x06"},
+       {"slices", "\x04\xf8\x01\fK"},
+       {"sort", "\xcf\x0192"},
+       {"strconv", "s+A\x01q"},
+       {"strings", "s'\x04B\x19\x03\f7\x10\x02\x02"},
        {"structs", ""},
-       {"sync", "\xcb\x01\x12\x01P\x0e\x13"},
-       {"sync/atomic", "\xcf\x02"},
-       {"syscall", "q'\x03\x01\x1c\n\x03\x06\f\x04S\b\x05\x01\x13"},
-       {"testing", "\x03\nd\x02\x01W\x16\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x04"},
-       {"testing/fstest", "q\x03\x81\x01\x01\n&\x10\x03\b\b"},
-       {"testing/internal/testdeps", "\x02\v\xaa\x01.\x10,\x03\x05\x03\x06\a\x02\x0e"},
-       {"testing/iotest", "\x03n\x03\x81\x01\x04"},
-       {"testing/quick", "s\x01\x8d\x01\x05#\x10\x10"},
-       {"testing/slogtest", "\rd\x03\x87\x01.\x05\x10\v"},
-       {"testing/synctest", "\xde\x01`\x11"},
-       {"text/scanner", "\x03q\x81\x01,*\x02"},
-       {"text/tabwriter", "t\x81\x01X"},
-       {"text/template", "q\x03B?\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
-       {"text/template/parse", "\x03q\xba\x01\n\x01\x12\x02"},
-       {"time", "q*\x1e#(*\r\x02\x12"},
-       {"time/tzdata", "q\xcc\x01\x12"},
+       {"sync", "\xce\x01\x13\x01P\x0e\x13"},
+       {"sync/atomic", "\xd3\x02"},
+       {"syscall", "s(\x03\x01\x1c\n\x03\x06\r\x04S\b\x05\x01\x13"},
+       {"testing", "\x03\nf\x02\x01X\x17\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x04"},
+       {"testing/fstest", "s\x03\x83\x01\x01\n&\x10\x03\b\b"},
+       {"testing/internal/testdeps", "\x02\v\xad\x01/\x10,\x03\x05\x03\x06\a\x02\x0e"},
+       {"testing/iotest", "\x03p\x03\x83\x01\x04"},
+       {"testing/quick", "u\x01\x8f\x01\x05#\x10\x10"},
+       {"testing/slogtest", "\rf\x03\x89\x01.\x05\x10\v"},
+       {"testing/synctest", "\xe2\x01`\x11"},
+       {"text/scanner", "\x03s\x83\x01,*\x02"},
+       {"text/tabwriter", "v\x83\x01X"},
+       {"text/template", "s\x03C@\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
+       {"text/template/parse", "\x03s\xbc\x01\n\x01\x12\x02"},
+       {"time", "s+\x1e$(*\r\x02\x12"},
+       {"time/tzdata", "s\xce\x01\x12"},
        {"unicode", ""},
        {"unicode/utf16", ""},
        {"unicode/utf8", ""},
-       {"unique", "\x97\x01!$\x01Q\r\x01\x13\x12"},
+       {"unique", "\x9a\x01!%\x01Q\r\x01\x13\x12"},
        {"unsafe", ""},
-       {"vendor/golang.org/x/crypto/chacha20", "\x10Z\a\x93\x01*&"},
-       {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10Z\a\xdf\x01\x04\x01\a"},
-       {"vendor/golang.org/x/crypto/cryptobyte", "g\n\x03\x8e\x01' \n"},
+       {"vendor/golang.org/x/crypto/chacha20", "\x10\\\a\x95\x01*&"},
+       {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10\\\a\xe1\x01\x04\x01\a"},
+       {"vendor/golang.org/x/crypto/cryptobyte", "i\n\x03\x90\x01' \n"},
        {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""},
-       {"vendor/golang.org/x/crypto/internal/alias", "\xcf\x02"},
-       {"vendor/golang.org/x/crypto/internal/poly1305", "U\x15\x9a\x01"},
-       {"vendor/golang.org/x/net/dns/dnsmessage", "q"},
-       {"vendor/golang.org/x/net/http/httpguts", "\x8b\x02\x14\x1a\x14\r"},
-       {"vendor/golang.org/x/net/http/httpproxy", "q\x03\x97\x01\x10\x05\x01\x18\x14\r"},
-       {"vendor/golang.org/x/net/http2/hpack", "\x03n\x03\x81\x01F"},
-       {"vendor/golang.org/x/net/idna", "t\x8d\x018\x14\x10\x02\x01"},
-       {"vendor/golang.org/x/net/nettest", "\x03g\a\x03\x81\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
-       {"vendor/golang.org/x/sys/cpu", "\xa1\x02\r\n\x01\x16"},
-       {"vendor/golang.org/x/text/secure/bidirule", "q\xdc\x01\x11\x01"},
-       {"vendor/golang.org/x/text/transform", "\x03n\x84\x01X"},
-       {"vendor/golang.org/x/text/unicode/bidi", "\x03\bi\x85\x01>\x16"},
-       {"vendor/golang.org/x/text/unicode/norm", "j\n\x81\x01F\x12\x11"},
-       {"weak", "\x97\x01\x97\x01!"},
+       {"vendor/golang.org/x/crypto/internal/alias", "\xd3\x02"},
+       {"vendor/golang.org/x/crypto/internal/poly1305", "W\x15\x9c\x01"},
+       {"vendor/golang.org/x/net/dns/dnsmessage", "s\xc7\x01"},
+       {"vendor/golang.org/x/net/http/httpguts", "\x8f\x02\x14\x1a\x14\r"},
+       {"vendor/golang.org/x/net/http/httpproxy", "s\x03\x99\x01\x10\x05\x01\x18\x14\r"},
+       {"vendor/golang.org/x/net/http2/hpack", "\x03p\x03\x83\x01F"},
+       {"vendor/golang.org/x/net/idna", "v\x8f\x018\x14\x10\x02\x01"},
+       {"vendor/golang.org/x/net/nettest", "\x03i\a\x03\x83\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
+       {"vendor/golang.org/x/sys/cpu", "\xa5\x02\r\n\x01\x16"},
+       {"vendor/golang.org/x/text/secure/bidirule", "s\xde\x01\x11\x01"},
+       {"vendor/golang.org/x/text/transform", "\x03p\x86\x01X"},
+       {"vendor/golang.org/x/text/unicode/bidi", "\x03\bk\x87\x01>\x16"},
+       {"vendor/golang.org/x/text/unicode/norm", "l\n\x83\x01F\x12\x11"},
+       {"weak", "\x9a\x01\x98\x01!"},
 }
 
 // bootstrap is the list of bootstrap packages extracted from cmd/dist.
@@ -385,6 +389,7 @@ var bootstrap = map[string]bool{
        "cmd/compile/internal/arm64":              true,
        "cmd/compile/internal/base":               true,
        "cmd/compile/internal/bitvec":             true,
+       "cmd/compile/internal/bloop":              true,
        "cmd/compile/internal/compare":            true,
        "cmd/compile/internal/coverage":           true,
        "cmd/compile/internal/deadlocals":         true,
@@ -413,6 +418,7 @@ var bootstrap = map[string]bool{
        "cmd/compile/internal/riscv64":            true,
        "cmd/compile/internal/rttype":             true,
        "cmd/compile/internal/s390x":              true,
+       "cmd/compile/internal/slice":              true,
        "cmd/compile/internal/ssa":                true,
        "cmd/compile/internal/ssagen":             true,
        "cmd/compile/internal/staticdata":         true,
index 362f23c436c548c064a65ed26a0bc1274db52362..f1e24625a7adabd42ef1bd999ceecc62cf106e1c 100644 (file)
@@ -16,6 +16,14 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Writer).Flush", Method, 0, ""},
                {"(*Writer).Write", Method, 0, ""},
                {"(*Writer).WriteHeader", Method, 0, ""},
+               {"(FileInfoNames).Gname", Method, 23, ""},
+               {"(FileInfoNames).IsDir", Method, 23, ""},
+               {"(FileInfoNames).ModTime", Method, 23, ""},
+               {"(FileInfoNames).Mode", Method, 23, ""},
+               {"(FileInfoNames).Name", Method, 23, ""},
+               {"(FileInfoNames).Size", Method, 23, ""},
+               {"(FileInfoNames).Sys", Method, 23, ""},
+               {"(FileInfoNames).Uname", Method, 23, ""},
                {"(Format).String", Method, 10, ""},
                {"ErrFieldTooLong", Var, 0, ""},
                {"ErrHeader", Var, 0, ""},
@@ -338,6 +346,9 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Writer).Write", Method, 0, ""},
                {"(CorruptInputError).Error", Method, 0, ""},
                {"(InternalError).Error", Method, 0, ""},
+               {"(Reader).Read", Method, 0, ""},
+               {"(Reader).ReadByte", Method, 0, ""},
+               {"(Resetter).Reset", Method, 4, ""},
                {"BestCompression", Const, 0, ""},
                {"BestSpeed", Const, 0, ""},
                {"CorruptInputError", Type, 0, ""},
@@ -409,6 +420,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Writer).Flush", Method, 0, ""},
                {"(*Writer).Reset", Method, 2, ""},
                {"(*Writer).Write", Method, 0, ""},
+               {"(Resetter).Reset", Method, 4, ""},
                {"BestCompression", Const, 0, ""},
                {"BestSpeed", Const, 0, ""},
                {"DefaultCompression", Const, 0, ""},
@@ -426,6 +438,11 @@ var PackageSymbols = map[string][]Symbol{
                {"Writer", Type, 0, ""},
        },
        "container/heap": {
+               {"(Interface).Len", Method, 0, ""},
+               {"(Interface).Less", Method, 0, ""},
+               {"(Interface).Pop", Method, 0, ""},
+               {"(Interface).Push", Method, 0, ""},
+               {"(Interface).Swap", Method, 0, ""},
                {"Fix", Func, 2, "func(h Interface, i int)"},
                {"Init", Func, 0, "func(h Interface)"},
                {"Interface", Type, 0, ""},
@@ -469,6 +486,10 @@ var PackageSymbols = map[string][]Symbol{
                {"Ring.Value", Field, 0, ""},
        },
        "context": {
+               {"(Context).Deadline", Method, 7, ""},
+               {"(Context).Done", Method, 7, ""},
+               {"(Context).Err", Method, 7, ""},
+               {"(Context).Value", Method, 7, ""},
                {"AfterFunc", Func, 21, "func(ctx Context, f func()) (stop func() bool)"},
                {"Background", Func, 7, "func() Context"},
                {"CancelCauseFunc", Type, 20, ""},
@@ -488,17 +509,31 @@ var PackageSymbols = map[string][]Symbol{
                {"WithoutCancel", Func, 21, "func(parent Context) Context"},
        },
        "crypto": {
+               {"(Decapsulator).Decapsulate", Method, 26, ""},
+               {"(Decapsulator).Encapsulator", Method, 26, ""},
+               {"(Decrypter).Decrypt", Method, 5, ""},
+               {"(Decrypter).Public", Method, 5, ""},
+               {"(Encapsulator).Bytes", Method, 26, ""},
+               {"(Encapsulator).Encapsulate", Method, 26, ""},
                {"(Hash).Available", Method, 0, ""},
                {"(Hash).HashFunc", Method, 4, ""},
                {"(Hash).New", Method, 0, ""},
                {"(Hash).Size", Method, 0, ""},
                {"(Hash).String", Method, 15, ""},
+               {"(MessageSigner).Public", Method, 25, ""},
+               {"(MessageSigner).Sign", Method, 25, ""},
+               {"(MessageSigner).SignMessage", Method, 25, ""},
+               {"(Signer).Public", Method, 4, ""},
+               {"(Signer).Sign", Method, 4, ""},
+               {"(SignerOpts).HashFunc", Method, 4, ""},
                {"BLAKE2b_256", Const, 9, ""},
                {"BLAKE2b_384", Const, 9, ""},
                {"BLAKE2b_512", Const, 9, ""},
                {"BLAKE2s_256", Const, 9, ""},
+               {"Decapsulator", Type, 26, ""},
                {"Decrypter", Type, 5, ""},
                {"DecrypterOpts", Type, 5, ""},
+               {"Encapsulator", Type, 26, ""},
                {"Hash", Type, 0, ""},
                {"MD4", Const, 0, ""},
                {"MD5", Const, 0, ""},
@@ -530,6 +565,16 @@ var PackageSymbols = map[string][]Symbol{
                {"NewCipher", Func, 0, "func(key []byte) (cipher.Block, error)"},
        },
        "crypto/cipher": {
+               {"(AEAD).NonceSize", Method, 2, ""},
+               {"(AEAD).Open", Method, 2, ""},
+               {"(AEAD).Overhead", Method, 2, ""},
+               {"(AEAD).Seal", Method, 2, ""},
+               {"(Block).BlockSize", Method, 0, ""},
+               {"(Block).Decrypt", Method, 0, ""},
+               {"(Block).Encrypt", Method, 0, ""},
+               {"(BlockMode).BlockSize", Method, 0, ""},
+               {"(BlockMode).CryptBlocks", Method, 0, ""},
+               {"(Stream).XORKeyStream", Method, 0, ""},
                {"(StreamReader).Read", Method, 0, ""},
                {"(StreamWriter).Close", Method, 0, ""},
                {"(StreamWriter).Write", Method, 0, ""},
@@ -594,7 +639,13 @@ var PackageSymbols = map[string][]Symbol{
                {"(*PublicKey).Bytes", Method, 20, ""},
                {"(*PublicKey).Curve", Method, 20, ""},
                {"(*PublicKey).Equal", Method, 20, ""},
-               {"Curve", Type, 20, ""},
+               {"(Curve).GenerateKey", Method, 20, ""},
+               {"(Curve).NewPrivateKey", Method, 20, ""},
+               {"(Curve).NewPublicKey", Method, 20, ""},
+               {"(KeyExchanger).Curve", Method, 26, ""},
+               {"(KeyExchanger).ECDH", Method, 26, ""},
+               {"(KeyExchanger).PublicKey", Method, 26, ""},
+               {"KeyExchanger", Type, 26, ""},
                {"P256", Func, 20, "func() Curve"},
                {"P384", Func, 20, "func() Curve"},
                {"P521", Func, 20, "func() Curve"},
@@ -667,6 +718,12 @@ var PackageSymbols = map[string][]Symbol{
                {"(*CurveParams).Params", Method, 0, ""},
                {"(*CurveParams).ScalarBaseMult", Method, 0, ""},
                {"(*CurveParams).ScalarMult", Method, 0, ""},
+               {"(Curve).Add", Method, 0, ""},
+               {"(Curve).Double", Method, 0, ""},
+               {"(Curve).IsOnCurve", Method, 0, ""},
+               {"(Curve).Params", Method, 0, ""},
+               {"(Curve).ScalarBaseMult", Method, 0, ""},
+               {"(Curve).ScalarMult", Method, 0, ""},
                {"Curve", Type, 0, ""},
                {"CurveParams", Type, 0, ""},
                {"CurveParams.B", Field, 0, ""},
@@ -688,6 +745,7 @@ var PackageSymbols = map[string][]Symbol{
        },
        "crypto/fips140": {
                {"Enabled", Func, 24, "func() bool"},
+               {"Version", Func, 26, "func() string"},
        },
        "crypto/hkdf": {
                {"Expand", Func, 24, "func[H hash.Hash](h func() H, pseudorandomKey []byte, info string, keyLength int) ([]byte, error)"},
@@ -708,9 +766,11 @@ var PackageSymbols = map[string][]Symbol{
                {"(*DecapsulationKey1024).Bytes", Method, 24, ""},
                {"(*DecapsulationKey1024).Decapsulate", Method, 24, ""},
                {"(*DecapsulationKey1024).EncapsulationKey", Method, 24, ""},
+               {"(*DecapsulationKey1024).Encapsulator", Method, 26, ""},
                {"(*DecapsulationKey768).Bytes", Method, 24, ""},
                {"(*DecapsulationKey768).Decapsulate", Method, 24, ""},
                {"(*DecapsulationKey768).EncapsulationKey", Method, 24, ""},
+               {"(*DecapsulationKey768).Encapsulator", Method, 26, ""},
                {"(*EncapsulationKey1024).Bytes", Method, 24, ""},
                {"(*EncapsulationKey1024).Encapsulate", Method, 24, ""},
                {"(*EncapsulationKey768).Bytes", Method, 24, ""},
@@ -732,6 +792,10 @@ var PackageSymbols = map[string][]Symbol{
                {"SeedSize", Const, 24, ""},
                {"SharedKeySize", Const, 24, ""},
        },
+       "crypto/mlkem/mlkemtest": {
+               {"Encapsulate1024", Func, 26, "func(ek *mlkem.EncapsulationKey1024, random []byte) (sharedKey []byte, ciphertext []byte, err error)"},
+               {"Encapsulate768", Func, 26, "func(ek *mlkem.EncapsulationKey768, random []byte) (sharedKey []byte, ciphertext []byte, err error)"},
+       },
        "crypto/pbkdf2": {
                {"Key", Func, 24, "func[Hash hash.Hash](h func() Hash, password string, salt []byte, iter int, keyLength int) ([]byte, error)"},
        },
@@ -769,6 +833,7 @@ var PackageSymbols = map[string][]Symbol{
                {"DecryptPKCS1v15", Func, 0, "func(random io.Reader, priv *PrivateKey, ciphertext []byte) ([]byte, error)"},
                {"DecryptPKCS1v15SessionKey", Func, 0, "func(random io.Reader, priv *PrivateKey, ciphertext []byte, key []byte) error"},
                {"EncryptOAEP", Func, 0, "func(hash hash.Hash, random io.Reader, pub *PublicKey, msg []byte, label []byte) ([]byte, error)"},
+               {"EncryptOAEPWithOptions", Func, 26, "func(random io.Reader, pub *PublicKey, msg []byte, opts *OAEPOptions) ([]byte, error)"},
                {"EncryptPKCS1v15", Func, 0, "func(random io.Reader, pub *PublicKey, msg []byte) ([]byte, error)"},
                {"ErrDecryption", Var, 0, ""},
                {"ErrMessageTooLong", Var, 0, ""},
@@ -921,6 +986,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*SessionState).Bytes", Method, 21, ""},
                {"(AlertError).Error", Method, 21, ""},
                {"(ClientAuthType).String", Method, 15, ""},
+               {"(ClientSessionCache).Get", Method, 3, ""},
+               {"(ClientSessionCache).Put", Method, 3, ""},
                {"(CurveID).String", Method, 15, ""},
                {"(QUICEncryptionLevel).String", Method, 21, ""},
                {"(RecordHeaderError).Error", Method, 6, ""},
@@ -953,6 +1020,7 @@ var PackageSymbols = map[string][]Symbol{
                {"ClientHelloInfo.CipherSuites", Field, 4, ""},
                {"ClientHelloInfo.Conn", Field, 8, ""},
                {"ClientHelloInfo.Extensions", Field, 24, ""},
+               {"ClientHelloInfo.HelloRetryRequest", Field, 26, ""},
                {"ClientHelloInfo.ServerName", Field, 4, ""},
                {"ClientHelloInfo.SignatureSchemes", Field, 8, ""},
                {"ClientHelloInfo.SupportedCurves", Field, 4, ""},
@@ -1001,6 +1069,7 @@ var PackageSymbols = map[string][]Symbol{
                {"ConnectionState.DidResume", Field, 1, ""},
                {"ConnectionState.ECHAccepted", Field, 23, ""},
                {"ConnectionState.HandshakeComplete", Field, 0, ""},
+               {"ConnectionState.HelloRetryRequest", Field, 26, ""},
                {"ConnectionState.NegotiatedProtocol", Field, 0, ""},
                {"ConnectionState.NegotiatedProtocolIsMutual", Field, 0, ""},
                {"ConnectionState.OCSPResponse", Field, 5, ""},
@@ -1055,8 +1124,10 @@ var PackageSymbols = map[string][]Symbol{
                {"QUICEncryptionLevelEarly", Const, 21, ""},
                {"QUICEncryptionLevelHandshake", Const, 21, ""},
                {"QUICEncryptionLevelInitial", Const, 21, ""},
+               {"QUICErrorEvent", Const, 26, ""},
                {"QUICEvent", Type, 21, ""},
                {"QUICEvent.Data", Field, 21, ""},
+               {"QUICEvent.Err", Field, 26, ""},
                {"QUICEvent.Kind", Field, 21, ""},
                {"QUICEvent.Level", Field, 21, ""},
                {"QUICEvent.SessionState", Field, 23, ""},
@@ -1151,8 +1222,10 @@ var PackageSymbols = map[string][]Symbol{
                {"(*RevocationList).CheckSignatureFrom", Method, 19, ""},
                {"(CertificateInvalidError).Error", Method, 0, ""},
                {"(ConstraintViolationError).Error", Method, 0, ""},
+               {"(ExtKeyUsage).String", Method, 26, ""},
                {"(HostnameError).Error", Method, 0, ""},
                {"(InsecureAlgorithmError).Error", Method, 6, ""},
+               {"(KeyUsage).String", Method, 26, ""},
                {"(OID).AppendBinary", Method, 24, ""},
                {"(OID).AppendText", Method, 24, ""},
                {"(OID).Equal", Method, 22, ""},
@@ -1516,6 +1589,9 @@ var PackageSymbols = map[string][]Symbol{
                {"(NullInt64).Value", Method, 0, ""},
                {"(NullString).Value", Method, 0, ""},
                {"(NullTime).Value", Method, 13, ""},
+               {"(Result).LastInsertId", Method, 0, ""},
+               {"(Result).RowsAffected", Method, 0, ""},
+               {"(Scanner).Scan", Method, 0, ""},
                {"ColumnType", Type, 8, ""},
                {"Conn", Type, 9, ""},
                {"DB", Type, 0, ""},
@@ -1547,8 +1623,6 @@ var PackageSymbols = map[string][]Symbol{
                {"NamedArg.Name", Field, 8, ""},
                {"NamedArg.Value", Field, 8, ""},
                {"Null", Type, 22, ""},
-               {"Null.V", Field, 22, ""},
-               {"Null.Valid", Field, 22, ""},
                {"NullBool", Type, 0, ""},
                {"NullBool.Bool", Field, 0, ""},
                {"NullBool.Valid", Field, 0, ""},
@@ -1591,10 +1665,72 @@ var PackageSymbols = map[string][]Symbol{
                {"TxOptions.ReadOnly", Field, 8, ""},
        },
        "database/sql/driver": {
+               {"(ColumnConverter).ColumnConverter", Method, 0, ""},
+               {"(Conn).Begin", Method, 0, ""},
+               {"(Conn).Close", Method, 0, ""},
+               {"(Conn).Prepare", Method, 0, ""},
+               {"(ConnBeginTx).BeginTx", Method, 8, ""},
+               {"(ConnPrepareContext).PrepareContext", Method, 8, ""},
+               {"(Connector).Connect", Method, 10, ""},
+               {"(Connector).Driver", Method, 10, ""},
+               {"(Driver).Open", Method, 0, ""},
+               {"(DriverContext).OpenConnector", Method, 10, ""},
+               {"(Execer).Exec", Method, 0, ""},
+               {"(ExecerContext).ExecContext", Method, 8, ""},
+               {"(NamedValueChecker).CheckNamedValue", Method, 9, ""},
                {"(NotNull).ConvertValue", Method, 0, ""},
                {"(Null).ConvertValue", Method, 0, ""},
+               {"(Pinger).Ping", Method, 8, ""},
+               {"(Queryer).Query", Method, 1, ""},
+               {"(QueryerContext).QueryContext", Method, 8, ""},
+               {"(Result).LastInsertId", Method, 0, ""},
+               {"(Result).RowsAffected", Method, 0, ""},
+               {"(Rows).Close", Method, 0, ""},
+               {"(Rows).Columns", Method, 0, ""},
+               {"(Rows).Next", Method, 0, ""},
                {"(RowsAffected).LastInsertId", Method, 0, ""},
                {"(RowsAffected).RowsAffected", Method, 0, ""},
+               {"(RowsColumnScanner).Close", Method, 26, ""},
+               {"(RowsColumnScanner).Columns", Method, 26, ""},
+               {"(RowsColumnScanner).Next", Method, 26, ""},
+               {"(RowsColumnScanner).ScanColumn", Method, 26, ""},
+               {"(RowsColumnTypeDatabaseTypeName).Close", Method, 8, ""},
+               {"(RowsColumnTypeDatabaseTypeName).ColumnTypeDatabaseTypeName", Method, 8, ""},
+               {"(RowsColumnTypeDatabaseTypeName).Columns", Method, 8, ""},
+               {"(RowsColumnTypeDatabaseTypeName).Next", Method, 8, ""},
+               {"(RowsColumnTypeLength).Close", Method, 8, ""},
+               {"(RowsColumnTypeLength).ColumnTypeLength", Method, 8, ""},
+               {"(RowsColumnTypeLength).Columns", Method, 8, ""},
+               {"(RowsColumnTypeLength).Next", Method, 8, ""},
+               {"(RowsColumnTypeNullable).Close", Method, 8, ""},
+               {"(RowsColumnTypeNullable).ColumnTypeNullable", Method, 8, ""},
+               {"(RowsColumnTypeNullable).Columns", Method, 8, ""},
+               {"(RowsColumnTypeNullable).Next", Method, 8, ""},
+               {"(RowsColumnTypePrecisionScale).Close", Method, 8, ""},
+               {"(RowsColumnTypePrecisionScale).ColumnTypePrecisionScale", Method, 8, ""},
+               {"(RowsColumnTypePrecisionScale).Columns", Method, 8, ""},
+               {"(RowsColumnTypePrecisionScale).Next", Method, 8, ""},
+               {"(RowsColumnTypeScanType).Close", Method, 8, ""},
+               {"(RowsColumnTypeScanType).ColumnTypeScanType", Method, 8, ""},
+               {"(RowsColumnTypeScanType).Columns", Method, 8, ""},
+               {"(RowsColumnTypeScanType).Next", Method, 8, ""},
+               {"(RowsNextResultSet).Close", Method, 8, ""},
+               {"(RowsNextResultSet).Columns", Method, 8, ""},
+               {"(RowsNextResultSet).HasNextResultSet", Method, 8, ""},
+               {"(RowsNextResultSet).Next", Method, 8, ""},
+               {"(RowsNextResultSet).NextResultSet", Method, 8, ""},
+               {"(SessionResetter).ResetSession", Method, 10, ""},
+               {"(Stmt).Close", Method, 0, ""},
+               {"(Stmt).Exec", Method, 0, ""},
+               {"(Stmt).NumInput", Method, 0, ""},
+               {"(Stmt).Query", Method, 0, ""},
+               {"(StmtExecContext).ExecContext", Method, 8, ""},
+               {"(StmtQueryContext).QueryContext", Method, 8, ""},
+               {"(Tx).Commit", Method, 0, ""},
+               {"(Tx).Rollback", Method, 0, ""},
+               {"(Validator).IsValid", Method, 15, ""},
+               {"(ValueConverter).ConvertValue", Method, 0, ""},
+               {"(Valuer).Value", Method, 0, ""},
                {"Bool", Var, 0, ""},
                {"ColumnConverter", Type, 0, ""},
                {"Conn", Type, 0, ""},
@@ -1756,6 +1892,9 @@ var PackageSymbols = map[string][]Symbol{
                {"(DecodeError).Error", Method, 0, ""},
                {"(Tag).GoString", Method, 0, ""},
                {"(Tag).String", Method, 0, ""},
+               {"(Type).Common", Method, 0, ""},
+               {"(Type).Size", Method, 0, ""},
+               {"(Type).String", Method, 0, ""},
                {"AddrType", Type, 0, ""},
                {"AddrType.BasicType", Field, 0, ""},
                {"ArrayType", Type, 0, ""},
@@ -3163,6 +3302,7 @@ var PackageSymbols = map[string][]Symbol{
                {"R_LARCH_B16", Const, 20, ""},
                {"R_LARCH_B21", Const, 20, ""},
                {"R_LARCH_B26", Const, 20, ""},
+               {"R_LARCH_CALL36", Const, 26, ""},
                {"R_LARCH_CFA", Const, 22, ""},
                {"R_LARCH_COPY", Const, 19, ""},
                {"R_LARCH_DELETE", Const, 22, ""},
@@ -3220,11 +3360,25 @@ var PackageSymbols = map[string][]Symbol{
                {"R_LARCH_SUB64", Const, 19, ""},
                {"R_LARCH_SUB8", Const, 19, ""},
                {"R_LARCH_SUB_ULEB128", Const, 22, ""},
+               {"R_LARCH_TLS_DESC32", Const, 26, ""},
+               {"R_LARCH_TLS_DESC64", Const, 26, ""},
+               {"R_LARCH_TLS_DESC64_HI12", Const, 26, ""},
+               {"R_LARCH_TLS_DESC64_LO20", Const, 26, ""},
+               {"R_LARCH_TLS_DESC64_PC_HI12", Const, 26, ""},
+               {"R_LARCH_TLS_DESC64_PC_LO20", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_CALL", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_HI20", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_LD", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_LO12", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_PCREL20_S2", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_PC_HI20", Const, 26, ""},
+               {"R_LARCH_TLS_DESC_PC_LO12", Const, 26, ""},
                {"R_LARCH_TLS_DTPMOD32", Const, 19, ""},
                {"R_LARCH_TLS_DTPMOD64", Const, 19, ""},
                {"R_LARCH_TLS_DTPREL32", Const, 19, ""},
                {"R_LARCH_TLS_DTPREL64", Const, 19, ""},
                {"R_LARCH_TLS_GD_HI20", Const, 20, ""},
+               {"R_LARCH_TLS_GD_PCREL20_S2", Const, 26, ""},
                {"R_LARCH_TLS_GD_PC_HI20", Const, 20, ""},
                {"R_LARCH_TLS_IE64_HI12", Const, 20, ""},
                {"R_LARCH_TLS_IE64_LO20", Const, 20, ""},
@@ -3235,11 +3389,15 @@ var PackageSymbols = map[string][]Symbol{
                {"R_LARCH_TLS_IE_PC_HI20", Const, 20, ""},
                {"R_LARCH_TLS_IE_PC_LO12", Const, 20, ""},
                {"R_LARCH_TLS_LD_HI20", Const, 20, ""},
+               {"R_LARCH_TLS_LD_PCREL20_S2", Const, 26, ""},
                {"R_LARCH_TLS_LD_PC_HI20", Const, 20, ""},
                {"R_LARCH_TLS_LE64_HI12", Const, 20, ""},
                {"R_LARCH_TLS_LE64_LO20", Const, 20, ""},
+               {"R_LARCH_TLS_LE_ADD_R", Const, 26, ""},
                {"R_LARCH_TLS_LE_HI20", Const, 20, ""},
+               {"R_LARCH_TLS_LE_HI20_R", Const, 26, ""},
                {"R_LARCH_TLS_LE_LO12", Const, 20, ""},
+               {"R_LARCH_TLS_LE_LO12_R", Const, 26, ""},
                {"R_LARCH_TLS_TPREL32", Const, 19, ""},
                {"R_LARCH_TLS_TPREL64", Const, 19, ""},
                {"R_MIPS", Type, 6, ""},
@@ -3944,6 +4102,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(FatArch).ImportedSymbols", Method, 3, ""},
                {"(FatArch).Section", Method, 3, ""},
                {"(FatArch).Segment", Method, 3, ""},
+               {"(Load).Raw", Method, 0, ""},
                {"(LoadBytes).Raw", Method, 0, ""},
                {"(LoadCmd).GoString", Method, 0, ""},
                {"(LoadCmd).String", Method, 0, ""},
@@ -4590,6 +4749,12 @@ var PackageSymbols = map[string][]Symbol{
                {"FS", Type, 16, ""},
        },
        "encoding": {
+               {"(BinaryAppender).AppendBinary", Method, 24, ""},
+               {"(BinaryMarshaler).MarshalBinary", Method, 2, ""},
+               {"(BinaryUnmarshaler).UnmarshalBinary", Method, 2, ""},
+               {"(TextAppender).AppendText", Method, 24, ""},
+               {"(TextMarshaler).MarshalText", Method, 2, ""},
+               {"(TextUnmarshaler).UnmarshalText", Method, 2, ""},
                {"BinaryAppender", Type, 24, ""},
                {"BinaryMarshaler", Type, 2, ""},
                {"BinaryUnmarshaler", Type, 2, ""},
@@ -4705,6 +4870,17 @@ var PackageSymbols = map[string][]Symbol{
                {"URLEncoding", Var, 0, ""},
        },
        "encoding/binary": {
+               {"(AppendByteOrder).AppendUint16", Method, 19, ""},
+               {"(AppendByteOrder).AppendUint32", Method, 19, ""},
+               {"(AppendByteOrder).AppendUint64", Method, 19, ""},
+               {"(AppendByteOrder).String", Method, 19, ""},
+               {"(ByteOrder).PutUint16", Method, 0, ""},
+               {"(ByteOrder).PutUint32", Method, 0, ""},
+               {"(ByteOrder).PutUint64", Method, 0, ""},
+               {"(ByteOrder).String", Method, 0, ""},
+               {"(ByteOrder).Uint16", Method, 0, ""},
+               {"(ByteOrder).Uint32", Method, 0, ""},
+               {"(ByteOrder).Uint64", Method, 0, ""},
                {"Append", Func, 23, "func(buf []byte, order ByteOrder, data any) ([]byte, error)"},
                {"AppendByteOrder", Type, 19, ""},
                {"AppendUvarint", Func, 19, "func(buf []byte, x uint64) []byte"},
@@ -4767,6 +4943,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Decoder).DecodeValue", Method, 0, ""},
                {"(*Encoder).Encode", Method, 0, ""},
                {"(*Encoder).EncodeValue", Method, 0, ""},
+               {"(GobDecoder).GobDecode", Method, 0, ""},
+               {"(GobEncoder).GobEncode", Method, 0, ""},
                {"CommonType", Type, 0, ""},
                {"CommonType.Id", Field, 0, ""},
                {"CommonType.Name", Field, 0, ""},
@@ -4819,10 +4997,12 @@ var PackageSymbols = map[string][]Symbol{
                {"(*UnsupportedTypeError).Error", Method, 0, ""},
                {"(*UnsupportedValueError).Error", Method, 0, ""},
                {"(Delim).String", Method, 5, ""},
+               {"(Marshaler).MarshalJSON", Method, 0, ""},
                {"(Number).Float64", Method, 1, ""},
                {"(Number).Int64", Method, 1, ""},
                {"(Number).String", Method, 1, ""},
                {"(RawMessage).MarshalJSON", Method, 8, ""},
+               {"(Unmarshaler).UnmarshalJSON", Method, 0, ""},
                {"Compact", Func, 0, "func(dst *bytes.Buffer, src []byte) error"},
                {"Decoder", Type, 0, ""},
                {"Delim", Type, 5, ""},
@@ -4894,10 +5074,15 @@ var PackageSymbols = map[string][]Symbol{
                {"(CharData).Copy", Method, 0, ""},
                {"(Comment).Copy", Method, 0, ""},
                {"(Directive).Copy", Method, 0, ""},
+               {"(Marshaler).MarshalXML", Method, 2, ""},
+               {"(MarshalerAttr).MarshalXMLAttr", Method, 2, ""},
                {"(ProcInst).Copy", Method, 0, ""},
                {"(StartElement).Copy", Method, 0, ""},
                {"(StartElement).End", Method, 2, ""},
+               {"(TokenReader).Token", Method, 10, ""},
                {"(UnmarshalError).Error", Method, 0, ""},
+               {"(Unmarshaler).UnmarshalXML", Method, 2, ""},
+               {"(UnmarshalerAttr).UnmarshalXMLAttr", Method, 2, ""},
                {"Attr", Type, 0, ""},
                {"Attr.Name", Field, 0, ""},
                {"Attr.Value", Field, 0, ""},
@@ -4984,6 +5169,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(*String).Value", Method, 8, ""},
                {"(Func).String", Method, 0, ""},
                {"(Func).Value", Method, 8, ""},
+               {"(Var).String", Method, 0, ""},
                {"Do", Func, 0, "func(f func(KeyValue))"},
                {"Float", Type, 0, ""},
                {"Func", Type, 0, ""},
@@ -5039,6 +5225,11 @@ var PackageSymbols = map[string][]Symbol{
                {"(*FlagSet).Var", Method, 0, ""},
                {"(*FlagSet).Visit", Method, 0, ""},
                {"(*FlagSet).VisitAll", Method, 0, ""},
+               {"(Getter).Get", Method, 2, ""},
+               {"(Getter).Set", Method, 2, ""},
+               {"(Getter).String", Method, 2, ""},
+               {"(Value).Set", Method, 0, ""},
+               {"(Value).String", Method, 0, ""},
                {"Arg", Func, 0, "func(i int) string"},
                {"Args", Func, 0, "func() []string"},
                {"Bool", Func, 0, "func(name string, value bool, usage string) *bool"},
@@ -5090,6 +5281,20 @@ var PackageSymbols = map[string][]Symbol{
                {"VisitAll", Func, 0, "func(fn func(*Flag))"},
        },
        "fmt": {
+               {"(Formatter).Format", Method, 0, ""},
+               {"(GoStringer).GoString", Method, 0, ""},
+               {"(ScanState).Read", Method, 0, ""},
+               {"(ScanState).ReadRune", Method, 0, ""},
+               {"(ScanState).SkipSpace", Method, 0, ""},
+               {"(ScanState).Token", Method, 0, ""},
+               {"(ScanState).UnreadRune", Method, 0, ""},
+               {"(ScanState).Width", Method, 0, ""},
+               {"(Scanner).Scan", Method, 0, ""},
+               {"(State).Flag", Method, 0, ""},
+               {"(State).Precision", Method, 0, ""},
+               {"(State).Width", Method, 0, ""},
+               {"(State).Write", Method, 0, ""},
+               {"(Stringer).String", Method, 0, ""},
                {"Append", Func, 19, "func(b []byte, a ...any) []byte"},
                {"Appendf", Func, 19, "func(b []byte, format string, a ...any) []byte"},
                {"Appendln", Func, 19, "func(b []byte, a ...any) []byte"},
@@ -5248,7 +5453,18 @@ var PackageSymbols = map[string][]Symbol{
                {"(CommentMap).Filter", Method, 1, ""},
                {"(CommentMap).String", Method, 1, ""},
                {"(CommentMap).Update", Method, 1, ""},
+               {"(Decl).End", Method, 0, ""},
+               {"(Decl).Pos", Method, 0, ""},
+               {"(Expr).End", Method, 0, ""},
+               {"(Expr).Pos", Method, 0, ""},
+               {"(Node).End", Method, 0, ""},
+               {"(Node).Pos", Method, 0, ""},
                {"(ObjKind).String", Method, 0, ""},
+               {"(Spec).End", Method, 0, ""},
+               {"(Spec).Pos", Method, 0, ""},
+               {"(Stmt).End", Method, 0, ""},
+               {"(Stmt).Pos", Method, 0, ""},
+               {"(Visitor).Visit", Method, 0, ""},
                {"ArrayType", Type, 0, ""},
                {"ArrayType.Elt", Field, 0, ""},
                {"ArrayType.Lbrack", Field, 0, ""},
@@ -5271,6 +5487,7 @@ var PackageSymbols = map[string][]Symbol{
                {"BasicLit", Type, 0, ""},
                {"BasicLit.Kind", Field, 0, ""},
                {"BasicLit.Value", Field, 0, ""},
+               {"BasicLit.ValueEnd", Field, 26, ""},
                {"BasicLit.ValuePos", Field, 0, ""},
                {"BinaryExpr", Type, 0, ""},
                {"BinaryExpr.Op", Field, 0, ""},
@@ -5320,7 +5537,6 @@ var PackageSymbols = map[string][]Symbol{
                {"CompositeLit.Rbrace", Field, 0, ""},
                {"CompositeLit.Type", Field, 0, ""},
                {"Con", Const, 0, ""},
-               {"Decl", Type, 0, ""},
                {"DeclStmt", Type, 0, ""},
                {"DeclStmt.Decl", Field, 0, ""},
                {"DeferStmt", Type, 0, ""},
@@ -5341,7 +5557,6 @@ var PackageSymbols = map[string][]Symbol{
                {"EmptyStmt", Type, 0, ""},
                {"EmptyStmt.Implicit", Field, 5, ""},
                {"EmptyStmt.Semicolon", Field, 0, ""},
-               {"Expr", Type, 0, ""},
                {"ExprStmt", Type, 0, ""},
                {"ExprStmt.X", Field, 0, ""},
                {"Field", Type, 0, ""},
@@ -5525,11 +5740,9 @@ var PackageSymbols = map[string][]Symbol{
                {"SliceExpr.Slice3", Field, 2, ""},
                {"SliceExpr.X", Field, 0, ""},
                {"SortImports", Func, 0, "func(fset *token.FileSet, f *File)"},
-               {"Spec", Type, 0, ""},
                {"StarExpr", Type, 0, ""},
                {"StarExpr.Star", Field, 0, ""},
                {"StarExpr.X", Field, 0, ""},
-               {"Stmt", Type, 0, ""},
                {"StructType", Type, 0, ""},
                {"StructType.Fields", Field, 0, ""},
                {"StructType.Incomplete", Field, 0, ""},
@@ -5684,10 +5897,11 @@ var PackageSymbols = map[string][]Symbol{
                {"(*SyntaxError).Error", Method, 16, ""},
                {"(*TagExpr).Eval", Method, 16, ""},
                {"(*TagExpr).String", Method, 16, ""},
+               {"(Expr).Eval", Method, 16, ""},
+               {"(Expr).String", Method, 16, ""},
                {"AndExpr", Type, 16, ""},
                {"AndExpr.X", Field, 16, ""},
                {"AndExpr.Y", Field, 16, ""},
-               {"Expr", Type, 16, ""},
                {"GoVersion", Func, 21, "func(x Expr) string"},
                {"IsGoBuild", Func, 16, "func(line string) bool"},
                {"IsPlusBuild", Func, 16, "func(line string) bool"},
@@ -5706,6 +5920,9 @@ var PackageSymbols = map[string][]Symbol{
        },
        "go/constant": {
                {"(Kind).String", Method, 18, ""},
+               {"(Value).ExactString", Method, 6, ""},
+               {"(Value).Kind", Method, 5, ""},
+               {"(Value).String", Method, 5, ""},
                {"BinaryOp", Func, 5, "func(x_ Value, op token.Token, y_ Value) Value"},
                {"BitLen", Func, 5, "func(x Value) int"},
                {"Bool", Const, 5, ""},
@@ -5744,7 +5961,6 @@ var PackageSymbols = map[string][]Symbol{
                {"UnaryOp", Func, 5, "func(op token.Token, y Value, prec uint) Value"},
                {"Unknown", Const, 5, ""},
                {"Val", Func, 13, "func(x Value) any"},
-               {"Value", Type, 5, ""},
        },
        "go/doc": {
                {"(*Package).Filter", Method, 0, ""},
@@ -5828,7 +6044,6 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Printer).HTML", Method, 19, ""},
                {"(*Printer).Markdown", Method, 19, ""},
                {"(*Printer).Text", Method, 19, ""},
-               {"Block", Type, 19, ""},
                {"Code", Type, 19, ""},
                {"Code.Text", Field, 19, ""},
                {"DefaultLookupPackage", Func, 19, "func(name string) (importPath string, ok bool)"},
@@ -5873,7 +6088,6 @@ var PackageSymbols = map[string][]Symbol{
                {"Printer.TextCodePrefix", Field, 19, ""},
                {"Printer.TextPrefix", Field, 19, ""},
                {"Printer.TextWidth", Field, 19, ""},
-               {"Text", Type, 19, ""},
        },
        "go/format": {
                {"Node", Func, 1, "func(dst io.Writer, fset *token.FileSet, node any) error"},
@@ -5945,6 +6159,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(*File).AddLineColumnInfo", Method, 11, ""},
                {"(*File).AddLineInfo", Method, 0, ""},
                {"(*File).Base", Method, 0, ""},
+               {"(*File).End", Method, 26, ""},
                {"(*File).Line", Method, 0, ""},
                {"(*File).LineCount", Method, 0, ""},
                {"(*File).LineStart", Method, 12, ""},
@@ -6307,6 +6522,22 @@ var PackageSymbols = map[string][]Symbol{
                {"(Checker).PkgNameOf", Method, 22, ""},
                {"(Checker).TypeOf", Method, 5, ""},
                {"(Error).Error", Method, 5, ""},
+               {"(Importer).Import", Method, 5, ""},
+               {"(ImporterFrom).Import", Method, 6, ""},
+               {"(ImporterFrom).ImportFrom", Method, 6, ""},
+               {"(Object).Exported", Method, 5, ""},
+               {"(Object).Id", Method, 5, ""},
+               {"(Object).Name", Method, 5, ""},
+               {"(Object).Parent", Method, 5, ""},
+               {"(Object).Pkg", Method, 5, ""},
+               {"(Object).Pos", Method, 5, ""},
+               {"(Object).String", Method, 5, ""},
+               {"(Object).Type", Method, 5, ""},
+               {"(Sizes).Alignof", Method, 5, ""},
+               {"(Sizes).Offsetsof", Method, 5, ""},
+               {"(Sizes).Sizeof", Method, 5, ""},
+               {"(Type).String", Method, 5, ""},
+               {"(Type).Underlying", Method, 5, ""},
                {"(TypeAndValue).Addressable", Method, 5, ""},
                {"(TypeAndValue).Assignable", Method, 5, ""},
                {"(TypeAndValue).HasOk", Method, 5, ""},
@@ -6445,7 +6676,6 @@ var PackageSymbols = map[string][]Symbol{
                {"NewUnion", Func, 18, "func(terms []*Term) *Union"},
                {"NewVar", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type) *Var"},
                {"Nil", Type, 5, ""},
-               {"Object", Type, 5, ""},
                {"ObjectString", Func, 5, "func(obj Object, qf Qualifier) string"},
                {"Package", Type, 5, ""},
                {"PackageVar", Const, 25, ""},
@@ -6516,6 +6746,33 @@ var PackageSymbols = map[string][]Symbol{
                {"Lang", Func, 22, "func(x string) string"},
        },
        "hash": {
+               {"(Cloner).BlockSize", Method, 25, ""},
+               {"(Cloner).Clone", Method, 25, ""},
+               {"(Cloner).Reset", Method, 25, ""},
+               {"(Cloner).Size", Method, 25, ""},
+               {"(Cloner).Sum", Method, 25, ""},
+               {"(Cloner).Write", Method, 25, ""},
+               {"(Hash).BlockSize", Method, 0, ""},
+               {"(Hash).Reset", Method, 0, ""},
+               {"(Hash).Size", Method, 0, ""},
+               {"(Hash).Sum", Method, 0, ""},
+               {"(Hash).Write", Method, 0, ""},
+               {"(Hash32).BlockSize", Method, 0, ""},
+               {"(Hash32).Reset", Method, 0, ""},
+               {"(Hash32).Size", Method, 0, ""},
+               {"(Hash32).Sum", Method, 0, ""},
+               {"(Hash32).Sum32", Method, 0, ""},
+               {"(Hash32).Write", Method, 0, ""},
+               {"(Hash64).BlockSize", Method, 0, ""},
+               {"(Hash64).Reset", Method, 0, ""},
+               {"(Hash64).Size", Method, 0, ""},
+               {"(Hash64).Sum", Method, 0, ""},
+               {"(Hash64).Sum64", Method, 0, ""},
+               {"(Hash64).Write", Method, 0, ""},
+               {"(XOF).BlockSize", Method, 25, ""},
+               {"(XOF).Read", Method, 25, ""},
+               {"(XOF).Reset", Method, 25, ""},
+               {"(XOF).Write", Method, 25, ""},
                {"Cloner", Type, 25, ""},
                {"Hash", Type, 0, ""},
                {"Hash32", Type, 0, ""},
@@ -6781,6 +7038,13 @@ var PackageSymbols = map[string][]Symbol{
                {"(*YCbCr).SubImage", Method, 0, ""},
                {"(*YCbCr).YCbCrAt", Method, 4, ""},
                {"(*YCbCr).YOffset", Method, 0, ""},
+               {"(Image).At", Method, 0, ""},
+               {"(Image).Bounds", Method, 0, ""},
+               {"(Image).ColorModel", Method, 0, ""},
+               {"(PalettedImage).At", Method, 0, ""},
+               {"(PalettedImage).Bounds", Method, 0, ""},
+               {"(PalettedImage).ColorIndexAt", Method, 0, ""},
+               {"(PalettedImage).ColorModel", Method, 0, ""},
                {"(Point).Add", Method, 0, ""},
                {"(Point).Div", Method, 0, ""},
                {"(Point).Eq", Method, 0, ""},
@@ -6789,6 +7053,10 @@ var PackageSymbols = map[string][]Symbol{
                {"(Point).Mul", Method, 0, ""},
                {"(Point).String", Method, 0, ""},
                {"(Point).Sub", Method, 0, ""},
+               {"(RGBA64Image).At", Method, 17, ""},
+               {"(RGBA64Image).Bounds", Method, 17, ""},
+               {"(RGBA64Image).ColorModel", Method, 17, ""},
+               {"(RGBA64Image).RGBA64At", Method, 17, ""},
                {"(Rectangle).Add", Method, 0, ""},
                {"(Rectangle).At", Method, 5, ""},
                {"(Rectangle).Bounds", Method, 5, ""},
@@ -6913,8 +7181,10 @@ var PackageSymbols = map[string][]Symbol{
                {"(Alpha).RGBA", Method, 0, ""},
                {"(Alpha16).RGBA", Method, 0, ""},
                {"(CMYK).RGBA", Method, 5, ""},
+               {"(Color).RGBA", Method, 0, ""},
                {"(Gray).RGBA", Method, 0, ""},
                {"(Gray16).RGBA", Method, 0, ""},
+               {"(Model).Convert", Method, 0, ""},
                {"(NRGBA).RGBA", Method, 0, ""},
                {"(NRGBA64).RGBA", Method, 0, ""},
                {"(NYCbCrA).RGBA", Method, 6, ""},
@@ -6992,7 +7262,19 @@ var PackageSymbols = map[string][]Symbol{
                {"WebSafe", Var, 2, ""},
        },
        "image/draw": {
+               {"(Drawer).Draw", Method, 2, ""},
+               {"(Image).At", Method, 0, ""},
+               {"(Image).Bounds", Method, 0, ""},
+               {"(Image).ColorModel", Method, 0, ""},
+               {"(Image).Set", Method, 0, ""},
                {"(Op).Draw", Method, 2, ""},
+               {"(Quantizer).Quantize", Method, 2, ""},
+               {"(RGBA64Image).At", Method, 17, ""},
+               {"(RGBA64Image).Bounds", Method, 17, ""},
+               {"(RGBA64Image).ColorModel", Method, 17, ""},
+               {"(RGBA64Image).RGBA64At", Method, 17, ""},
+               {"(RGBA64Image).Set", Method, 17, ""},
+               {"(RGBA64Image).SetRGBA64", Method, 17, ""},
                {"Draw", Func, 0, "func(dst Image, r image.Rectangle, src image.Image, sp image.Point, op Op)"},
                {"DrawMask", Func, 0, "func(dst Image, r image.Rectangle, src image.Image, sp image.Point, mask image.Image, mp image.Point, op Op)"},
                {"Drawer", Type, 2, ""},
@@ -7027,6 +7309,8 @@ var PackageSymbols = map[string][]Symbol{
        },
        "image/jpeg": {
                {"(FormatError).Error", Method, 0, ""},
+               {"(Reader).Read", Method, 0, ""},
+               {"(Reader).ReadByte", Method, 0, ""},
                {"(UnsupportedError).Error", Method, 0, ""},
                {"Decode", Func, 0, "func(r io.Reader) (image.Image, error)"},
                {"DecodeConfig", Func, 0, "func(r io.Reader) (image.Config, error)"},
@@ -7040,6 +7324,8 @@ var PackageSymbols = map[string][]Symbol{
        },
        "image/png": {
                {"(*Encoder).Encode", Method, 4, ""},
+               {"(EncoderBufferPool).Get", Method, 9, ""},
+               {"(EncoderBufferPool).Put", Method, 9, ""},
                {"(FormatError).Error", Method, 0, ""},
                {"(UnsupportedError).Error", Method, 0, ""},
                {"BestCompression", Const, 4, ""},
@@ -7083,6 +7369,41 @@ var PackageSymbols = map[string][]Symbol{
                {"(*SectionReader).ReadAt", Method, 0, ""},
                {"(*SectionReader).Seek", Method, 0, ""},
                {"(*SectionReader).Size", Method, 0, ""},
+               {"(ByteReader).ReadByte", Method, 0, ""},
+               {"(ByteScanner).ReadByte", Method, 0, ""},
+               {"(ByteScanner).UnreadByte", Method, 0, ""},
+               {"(ByteWriter).WriteByte", Method, 1, ""},
+               {"(Closer).Close", Method, 0, ""},
+               {"(ReadCloser).Close", Method, 0, ""},
+               {"(ReadCloser).Read", Method, 0, ""},
+               {"(ReadSeekCloser).Close", Method, 16, ""},
+               {"(ReadSeekCloser).Read", Method, 16, ""},
+               {"(ReadSeekCloser).Seek", Method, 16, ""},
+               {"(ReadSeeker).Read", Method, 0, ""},
+               {"(ReadSeeker).Seek", Method, 0, ""},
+               {"(ReadWriteCloser).Close", Method, 0, ""},
+               {"(ReadWriteCloser).Read", Method, 0, ""},
+               {"(ReadWriteCloser).Write", Method, 0, ""},
+               {"(ReadWriteSeeker).Read", Method, 0, ""},
+               {"(ReadWriteSeeker).Seek", Method, 0, ""},
+               {"(ReadWriteSeeker).Write", Method, 0, ""},
+               {"(ReadWriter).Read", Method, 0, ""},
+               {"(ReadWriter).Write", Method, 0, ""},
+               {"(Reader).Read", Method, 0, ""},
+               {"(ReaderAt).ReadAt", Method, 0, ""},
+               {"(ReaderFrom).ReadFrom", Method, 0, ""},
+               {"(RuneReader).ReadRune", Method, 0, ""},
+               {"(RuneScanner).ReadRune", Method, 0, ""},
+               {"(RuneScanner).UnreadRune", Method, 0, ""},
+               {"(Seeker).Seek", Method, 0, ""},
+               {"(StringWriter).WriteString", Method, 12, ""},
+               {"(WriteCloser).Close", Method, 0, ""},
+               {"(WriteCloser).Write", Method, 0, ""},
+               {"(WriteSeeker).Seek", Method, 0, ""},
+               {"(WriteSeeker).Write", Method, 0, ""},
+               {"(Writer).Write", Method, 0, ""},
+               {"(WriterAt).WriteAt", Method, 0, ""},
+               {"(WriterTo).WriteTo", Method, 0, ""},
                {"ByteReader", Type, 0, ""},
                {"ByteScanner", Type, 0, ""},
                {"ByteWriter", Type, 1, ""},
@@ -7142,11 +7463,42 @@ var PackageSymbols = map[string][]Symbol{
                {"(*PathError).Error", Method, 16, ""},
                {"(*PathError).Timeout", Method, 16, ""},
                {"(*PathError).Unwrap", Method, 16, ""},
+               {"(DirEntry).Info", Method, 16, ""},
+               {"(DirEntry).IsDir", Method, 16, ""},
+               {"(DirEntry).Name", Method, 16, ""},
+               {"(DirEntry).Type", Method, 16, ""},
+               {"(FS).Open", Method, 16, ""},
+               {"(File).Close", Method, 16, ""},
+               {"(File).Read", Method, 16, ""},
+               {"(File).Stat", Method, 16, ""},
+               {"(FileInfo).IsDir", Method, 16, ""},
+               {"(FileInfo).ModTime", Method, 16, ""},
+               {"(FileInfo).Mode", Method, 16, ""},
+               {"(FileInfo).Name", Method, 16, ""},
+               {"(FileInfo).Size", Method, 16, ""},
+               {"(FileInfo).Sys", Method, 16, ""},
                {"(FileMode).IsDir", Method, 16, ""},
                {"(FileMode).IsRegular", Method, 16, ""},
                {"(FileMode).Perm", Method, 16, ""},
                {"(FileMode).String", Method, 16, ""},
                {"(FileMode).Type", Method, 16, ""},
+               {"(GlobFS).Glob", Method, 16, ""},
+               {"(GlobFS).Open", Method, 16, ""},
+               {"(ReadDirFS).Open", Method, 16, ""},
+               {"(ReadDirFS).ReadDir", Method, 16, ""},
+               {"(ReadDirFile).Close", Method, 16, ""},
+               {"(ReadDirFile).Read", Method, 16, ""},
+               {"(ReadDirFile).ReadDir", Method, 16, ""},
+               {"(ReadDirFile).Stat", Method, 16, ""},
+               {"(ReadFileFS).Open", Method, 16, ""},
+               {"(ReadFileFS).ReadFile", Method, 16, ""},
+               {"(ReadLinkFS).Lstat", Method, 25, ""},
+               {"(ReadLinkFS).Open", Method, 25, ""},
+               {"(ReadLinkFS).ReadLink", Method, 25, ""},
+               {"(StatFS).Open", Method, 16, ""},
+               {"(StatFS).Stat", Method, 16, ""},
+               {"(SubFS).Open", Method, 16, ""},
+               {"(SubFS).Sub", Method, 16, ""},
                {"DirEntry", Type, 16, ""},
                {"ErrClosed", Var, 16, ""},
                {"ErrExist", Var, 16, ""},
@@ -7299,12 +7651,18 @@ var PackageSymbols = map[string][]Symbol{
                {"(*TextHandler).WithGroup", Method, 21, ""},
                {"(Attr).Equal", Method, 21, ""},
                {"(Attr).String", Method, 21, ""},
+               {"(Handler).Enabled", Method, 21, ""},
+               {"(Handler).Handle", Method, 21, ""},
+               {"(Handler).WithAttrs", Method, 21, ""},
+               {"(Handler).WithGroup", Method, 21, ""},
                {"(Kind).String", Method, 21, ""},
                {"(Level).AppendText", Method, 24, ""},
                {"(Level).Level", Method, 21, ""},
                {"(Level).MarshalJSON", Method, 21, ""},
                {"(Level).MarshalText", Method, 21, ""},
                {"(Level).String", Method, 21, ""},
+               {"(Leveler).Level", Method, 21, ""},
+               {"(LogValuer).LogValue", Method, 21, ""},
                {"(Record).Attrs", Method, 21, ""},
                {"(Record).Clone", Method, 21, ""},
                {"(Record).NumAttrs", Method, 21, ""},
@@ -7833,6 +8191,11 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Rand).Uint32", Method, 0, ""},
                {"(*Rand).Uint64", Method, 8, ""},
                {"(*Zipf).Uint64", Method, 0, ""},
+               {"(Source).Int63", Method, 0, ""},
+               {"(Source).Seed", Method, 0, ""},
+               {"(Source64).Int63", Method, 8, ""},
+               {"(Source64).Seed", Method, 8, ""},
+               {"(Source64).Uint64", Method, 8, ""},
                {"ExpFloat64", Func, 0, "func() float64"},
                {"Float32", Func, 0, "func() float32"},
                {"Float64", Func, 0, "func() float64"},
@@ -7888,6 +8251,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Rand).Uint64N", Method, 22, ""},
                {"(*Rand).UintN", Method, 22, ""},
                {"(*Zipf).Uint64", Method, 22, ""},
+               {"(Source).Uint64", Method, 22, ""},
                {"ChaCha8", Type, 22, ""},
                {"ExpFloat64", Func, 22, "func() float64"},
                {"Float32", Func, 22, "func() float32"},
@@ -7951,6 +8315,10 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Writer).FormDataContentType", Method, 0, ""},
                {"(*Writer).SetBoundary", Method, 1, ""},
                {"(*Writer).WriteField", Method, 0, ""},
+               {"(File).Close", Method, 0, ""},
+               {"(File).Read", Method, 0, ""},
+               {"(File).ReadAt", Method, 0, ""},
+               {"(File).Seek", Method, 0, ""},
                {"ErrMessageTooLarge", Var, 9, ""},
                {"File", Type, 0, ""},
                {"FileContentDisposition", Func, 25, "func(fieldname string, filename string) string"},
@@ -8135,6 +8503,19 @@ var PackageSymbols = map[string][]Symbol{
                {"(*UnixListener).SetDeadline", Method, 0, ""},
                {"(*UnixListener).SetUnlinkOnClose", Method, 8, ""},
                {"(*UnixListener).SyscallConn", Method, 10, ""},
+               {"(Addr).Network", Method, 0, ""},
+               {"(Addr).String", Method, 0, ""},
+               {"(Conn).Close", Method, 0, ""},
+               {"(Conn).LocalAddr", Method, 0, ""},
+               {"(Conn).Read", Method, 0, ""},
+               {"(Conn).RemoteAddr", Method, 0, ""},
+               {"(Conn).SetDeadline", Method, 0, ""},
+               {"(Conn).SetReadDeadline", Method, 0, ""},
+               {"(Conn).SetWriteDeadline", Method, 0, ""},
+               {"(Conn).Write", Method, 0, ""},
+               {"(Error).Error", Method, 0, ""},
+               {"(Error).Temporary", Method, 0, ""},
+               {"(Error).Timeout", Method, 0, ""},
                {"(Flags).String", Method, 0, ""},
                {"(HardwareAddr).String", Method, 0, ""},
                {"(IP).AppendText", Method, 24, ""},
@@ -8158,6 +8539,16 @@ var PackageSymbols = map[string][]Symbol{
                {"(InvalidAddrError).Error", Method, 0, ""},
                {"(InvalidAddrError).Temporary", Method, 0, ""},
                {"(InvalidAddrError).Timeout", Method, 0, ""},
+               {"(Listener).Accept", Method, 0, ""},
+               {"(Listener).Addr", Method, 0, ""},
+               {"(Listener).Close", Method, 0, ""},
+               {"(PacketConn).Close", Method, 0, ""},
+               {"(PacketConn).LocalAddr", Method, 0, ""},
+               {"(PacketConn).ReadFrom", Method, 0, ""},
+               {"(PacketConn).SetDeadline", Method, 0, ""},
+               {"(PacketConn).SetReadDeadline", Method, 0, ""},
+               {"(PacketConn).SetWriteDeadline", Method, 0, ""},
+               {"(PacketConn).WriteTo", Method, 0, ""},
                {"(UnknownNetworkError).Error", Method, 0, ""},
                {"(UnknownNetworkError).Temporary", Method, 0, ""},
                {"(UnknownNetworkError).Timeout", Method, 0, ""},
@@ -8333,6 +8724,14 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Client).Head", Method, 0, ""},
                {"(*Client).Post", Method, 0, ""},
                {"(*Client).PostForm", Method, 0, ""},
+               {"(*ClientConn).Available", Method, 26, ""},
+               {"(*ClientConn).Close", Method, 26, ""},
+               {"(*ClientConn).Err", Method, 26, ""},
+               {"(*ClientConn).InFlight", Method, 26, ""},
+               {"(*ClientConn).Release", Method, 26, ""},
+               {"(*ClientConn).Reserve", Method, 26, ""},
+               {"(*ClientConn).RoundTrip", Method, 26, ""},
+               {"(*ClientConn).SetStateHook", Method, 26, ""},
                {"(*Cookie).String", Method, 0, ""},
                {"(*Cookie).Valid", Method, 18, ""},
                {"(*CrossOriginProtection).AddInsecureBypassPattern", Method, 25, ""},
@@ -8392,10 +8791,22 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Transport).CancelRequest", Method, 1, ""},
                {"(*Transport).Clone", Method, 13, ""},
                {"(*Transport).CloseIdleConnections", Method, 0, ""},
+               {"(*Transport).NewClientConn", Method, 26, ""},
                {"(*Transport).RegisterProtocol", Method, 0, ""},
                {"(*Transport).RoundTrip", Method, 0, ""},
+               {"(CloseNotifier).CloseNotify", Method, 1, ""},
                {"(ConnState).String", Method, 3, ""},
+               {"(CookieJar).Cookies", Method, 0, ""},
+               {"(CookieJar).SetCookies", Method, 0, ""},
                {"(Dir).Open", Method, 0, ""},
+               {"(File).Close", Method, 0, ""},
+               {"(File).Read", Method, 0, ""},
+               {"(File).Readdir", Method, 0, ""},
+               {"(File).Seek", Method, 0, ""},
+               {"(File).Stat", Method, 0, ""},
+               {"(FileSystem).Open", Method, 0, ""},
+               {"(Flusher).Flush", Method, 0, ""},
+               {"(Handler).ServeHTTP", Method, 0, ""},
                {"(HandlerFunc).ServeHTTP", Method, 0, ""},
                {"(Header).Add", Method, 0, ""},
                {"(Header).Clone", Method, 13, ""},
@@ -8405,10 +8816,16 @@ var PackageSymbols = map[string][]Symbol{
                {"(Header).Values", Method, 14, ""},
                {"(Header).Write", Method, 0, ""},
                {"(Header).WriteSubset", Method, 0, ""},
+               {"(Hijacker).Hijack", Method, 0, ""},
                {"(Protocols).HTTP1", Method, 24, ""},
                {"(Protocols).HTTP2", Method, 24, ""},
                {"(Protocols).String", Method, 24, ""},
                {"(Protocols).UnencryptedHTTP2", Method, 24, ""},
+               {"(Pusher).Push", Method, 8, ""},
+               {"(ResponseWriter).Header", Method, 0, ""},
+               {"(ResponseWriter).Write", Method, 0, ""},
+               {"(ResponseWriter).WriteHeader", Method, 0, ""},
+               {"(RoundTripper).RoundTrip", Method, 0, ""},
                {"AllowQuerySemicolons", Func, 17, "func(h Handler) Handler"},
                {"CanonicalHeaderKey", Func, 0, "func(s string) string"},
                {"Client", Type, 0, ""},
@@ -8416,6 +8833,7 @@ var PackageSymbols = map[string][]Symbol{
                {"Client.Jar", Field, 0, ""},
                {"Client.Timeout", Field, 3, ""},
                {"Client.Transport", Field, 0, ""},
+               {"ClientConn", Type, 26, ""},
                {"CloseNotifier", Type, 1, ""},
                {"ConnState", Type, 3, ""},
                {"Cookie", Type, 0, ""},
@@ -8726,6 +9144,8 @@ var PackageSymbols = map[string][]Symbol{
        "net/http/cookiejar": {
                {"(*Jar).Cookies", Method, 1, ""},
                {"(*Jar).SetCookies", Method, 1, ""},
+               {"(PublicSuffixList).PublicSuffix", Method, 1, ""},
+               {"(PublicSuffixList).String", Method, 1, ""},
                {"Jar", Type, 1, ""},
                {"New", Func, 1, "func(o *Options) (*Jar, error)"},
                {"Options", Type, 1, ""},
@@ -8819,6 +9239,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*ServerConn).Pending", Method, 0, ""},
                {"(*ServerConn).Read", Method, 0, ""},
                {"(*ServerConn).Write", Method, 0, ""},
+               {"(BufferPool).Get", Method, 6, ""},
+               {"(BufferPool).Put", Method, 6, ""},
                {"BufferPool", Type, 6, ""},
                {"ClientConn", Type, 0, ""},
                {"DumpRequest", Func, 0, "func(req *http.Request, body bool) ([]byte, error)"},
@@ -8972,6 +9394,14 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Server).ServeConn", Method, 0, ""},
                {"(*Server).ServeHTTP", Method, 0, ""},
                {"(*Server).ServeRequest", Method, 0, ""},
+               {"(ClientCodec).Close", Method, 0, ""},
+               {"(ClientCodec).ReadResponseBody", Method, 0, ""},
+               {"(ClientCodec).ReadResponseHeader", Method, 0, ""},
+               {"(ClientCodec).WriteRequest", Method, 0, ""},
+               {"(ServerCodec).Close", Method, 0, ""},
+               {"(ServerCodec).ReadRequestBody", Method, 0, ""},
+               {"(ServerCodec).ReadRequestHeader", Method, 0, ""},
+               {"(ServerCodec).WriteResponse", Method, 0, ""},
                {"(ServerError).Error", Method, 0, ""},
                {"Accept", Func, 0, "func(lis net.Listener)"},
                {"Call", Type, 0, ""},
@@ -9030,6 +9460,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Client).StartTLS", Method, 0, ""},
                {"(*Client).TLSConnectionState", Method, 5, ""},
                {"(*Client).Verify", Method, 0, ""},
+               {"(Auth).Next", Method, 0, ""},
+               {"(Auth).Start", Method, 0, ""},
                {"Auth", Type, 0, ""},
                {"CRAMMD5Auth", Func, 0, "func(username string, secret string) Auth"},
                {"Client", Type, 0, ""},
@@ -9241,10 +9673,18 @@ var PackageSymbols = map[string][]Symbol{
                {"(*SyscallError).Error", Method, 0, ""},
                {"(*SyscallError).Timeout", Method, 10, ""},
                {"(*SyscallError).Unwrap", Method, 13, ""},
+               {"(FileInfo).IsDir", Method, 0, ""},
+               {"(FileInfo).ModTime", Method, 0, ""},
+               {"(FileInfo).Mode", Method, 0, ""},
+               {"(FileInfo).Name", Method, 0, ""},
+               {"(FileInfo).Size", Method, 0, ""},
+               {"(FileInfo).Sys", Method, 0, ""},
                {"(FileMode).IsDir", Method, 0, ""},
                {"(FileMode).IsRegular", Method, 1, ""},
                {"(FileMode).Perm", Method, 0, ""},
                {"(FileMode).String", Method, 0, ""},
+               {"(Signal).Signal", Method, 0, ""},
+               {"(Signal).String", Method, 0, ""},
                {"Args", Var, 0, ""},
                {"Chdir", Func, 0, "func(dir string) error"},
                {"Chmod", Func, 0, "func(name string, mode FileMode) error"},
@@ -9521,6 +9961,45 @@ var PackageSymbols = map[string][]Symbol{
                {"(StructField).IsExported", Method, 17, ""},
                {"(StructTag).Get", Method, 0, ""},
                {"(StructTag).Lookup", Method, 7, ""},
+               {"(Type).Align", Method, 0, ""},
+               {"(Type).AssignableTo", Method, 0, ""},
+               {"(Type).Bits", Method, 0, ""},
+               {"(Type).CanSeq", Method, 23, ""},
+               {"(Type).CanSeq2", Method, 23, ""},
+               {"(Type).ChanDir", Method, 0, ""},
+               {"(Type).Comparable", Method, 4, ""},
+               {"(Type).ConvertibleTo", Method, 1, ""},
+               {"(Type).Elem", Method, 0, ""},
+               {"(Type).Field", Method, 0, ""},
+               {"(Type).FieldAlign", Method, 0, ""},
+               {"(Type).FieldByIndex", Method, 0, ""},
+               {"(Type).FieldByName", Method, 0, ""},
+               {"(Type).FieldByNameFunc", Method, 0, ""},
+               {"(Type).Fields", Method, 26, ""},
+               {"(Type).Implements", Method, 0, ""},
+               {"(Type).In", Method, 0, ""},
+               {"(Type).Ins", Method, 26, ""},
+               {"(Type).IsVariadic", Method, 0, ""},
+               {"(Type).Key", Method, 0, ""},
+               {"(Type).Kind", Method, 0, ""},
+               {"(Type).Len", Method, 0, ""},
+               {"(Type).Method", Method, 0, ""},
+               {"(Type).MethodByName", Method, 0, ""},
+               {"(Type).Methods", Method, 26, ""},
+               {"(Type).Name", Method, 0, ""},
+               {"(Type).NumField", Method, 0, ""},
+               {"(Type).NumIn", Method, 0, ""},
+               {"(Type).NumMethod", Method, 0, ""},
+               {"(Type).NumOut", Method, 0, ""},
+               {"(Type).Out", Method, 0, ""},
+               {"(Type).Outs", Method, 26, ""},
+               {"(Type).OverflowComplex", Method, 23, ""},
+               {"(Type).OverflowFloat", Method, 23, ""},
+               {"(Type).OverflowInt", Method, 23, ""},
+               {"(Type).OverflowUint", Method, 23, ""},
+               {"(Type).PkgPath", Method, 0, ""},
+               {"(Type).Size", Method, 0, ""},
+               {"(Type).String", Method, 0, ""},
                {"(Value).Addr", Method, 0, ""},
                {"(Value).Bool", Method, 0, ""},
                {"(Value).Bytes", Method, 0, ""},
@@ -9547,6 +10026,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(Value).FieldByIndexErr", Method, 18, ""},
                {"(Value).FieldByName", Method, 0, ""},
                {"(Value).FieldByNameFunc", Method, 0, ""},
+               {"(Value).Fields", Method, 26, ""},
                {"(Value).Float", Method, 0, ""},
                {"(Value).Grow", Method, 20, ""},
                {"(Value).Index", Method, 0, ""},
@@ -9563,6 +10043,7 @@ var PackageSymbols = map[string][]Symbol{
                {"(Value).MapRange", Method, 12, ""},
                {"(Value).Method", Method, 0, ""},
                {"(Value).MethodByName", Method, 0, ""},
+               {"(Value).Methods", Method, 26, ""},
                {"(Value).NumField", Method, 0, ""},
                {"(Value).NumMethod", Method, 0, ""},
                {"(Value).OverflowComplex", Method, 0, ""},
@@ -9678,7 +10159,6 @@ var PackageSymbols = map[string][]Symbol{
                {"StructOf", Func, 7, "func(fields []StructField) Type"},
                {"StructTag", Type, 0, ""},
                {"Swapper", Func, 8, "func(slice any) func(i int, j int)"},
-               {"Type", Type, 0, ""},
                {"TypeAssert", Func, 25, "func[T any](v Value) (T, bool)"},
                {"TypeFor", Func, 22, "func[T any]() Type"},
                {"TypeOf", Func, 0, "func(i any) Type"},
@@ -9880,6 +10360,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*TypeAssertionError).Error", Method, 0, ""},
                {"(*TypeAssertionError).RuntimeError", Method, 0, ""},
                {"(Cleanup).Stop", Method, 24, ""},
+               {"(Error).Error", Method, 0, ""},
+               {"(Error).RuntimeError", Method, 0, ""},
                {"AddCleanup", Func, 24, "func[T, S any](ptr *T, cleanup func(S), arg S) Cleanup"},
                {"BlockProfile", Func, 1, "func(p []BlockProfileRecord) (n int, ok bool)"},
                {"BlockProfileRecord", Type, 1, ""},
@@ -10154,6 +10636,9 @@ var PackageSymbols = map[string][]Symbol{
                {"(IntSlice).Search", Method, 0, ""},
                {"(IntSlice).Sort", Method, 0, ""},
                {"(IntSlice).Swap", Method, 0, ""},
+               {"(Interface).Len", Method, 0, ""},
+               {"(Interface).Less", Method, 0, ""},
+               {"(Interface).Swap", Method, 0, ""},
                {"(StringSlice).Len", Method, 0, ""},
                {"(StringSlice).Less", Method, 0, ""},
                {"(StringSlice).Search", Method, 0, ""},
@@ -10345,6 +10830,8 @@ var PackageSymbols = map[string][]Symbol{
                {"(*WaitGroup).Done", Method, 0, ""},
                {"(*WaitGroup).Go", Method, 25, ""},
                {"(*WaitGroup).Wait", Method, 0, ""},
+               {"(Locker).Lock", Method, 0, ""},
+               {"(Locker).Unlock", Method, 0, ""},
                {"Cond", Type, 0, ""},
                {"Cond.L", Field, 0, ""},
                {"Locker", Type, 0, ""},
@@ -10486,10 +10973,14 @@ var PackageSymbols = map[string][]Symbol{
                {"(*Timeval).Nano", Method, 0, ""},
                {"(*Timeval).Nanoseconds", Method, 0, ""},
                {"(*Timeval).Unix", Method, 0, ""},
+               {"(Conn).SyscallConn", Method, 9, ""},
                {"(Errno).Error", Method, 0, ""},
                {"(Errno).Is", Method, 13, ""},
                {"(Errno).Temporary", Method, 0, ""},
                {"(Errno).Timeout", Method, 0, ""},
+               {"(RawConn).Control", Method, 9, ""},
+               {"(RawConn).Read", Method, 9, ""},
+               {"(RawConn).Write", Method, 9, ""},
                {"(Signal).Signal", Method, 0, ""},
                {"(Signal).String", Method, 0, ""},
                {"(Token).Close", Method, 0, ""},
@@ -14409,7 +14900,7 @@ var PackageSymbols = map[string][]Symbol{
                {"RouteMessage.Data", Field, 0, ""},
                {"RouteMessage.Header", Field, 0, ""},
                {"RouteRIB", Func, 0, ""},
-               {"RoutingMessage", Type, 0, ""},
+               {"RoutingMessage", Type, 14, ""},
                {"RtAttr", Type, 0, ""},
                {"RtAttr.Len", Field, 0, ""},
                {"RtAttr.Type", Field, 0, ""},
@@ -15895,7 +16386,6 @@ var PackageSymbols = map[string][]Symbol{
                {"SockFprog.Filter", Field, 0, ""},
                {"SockFprog.Len", Field, 0, ""},
                {"SockFprog.Pad_cgo_0", Field, 0, ""},
-               {"Sockaddr", Type, 0, ""},
                {"SockaddrDatalink", Type, 0, ""},
                {"SockaddrDatalink.Alen", Field, 0, ""},
                {"SockaddrDatalink.Data", Field, 0, ""},
@@ -16801,6 +17291,29 @@ var PackageSymbols = map[string][]Symbol{
                {"(BenchmarkResult).MemString", Method, 1, ""},
                {"(BenchmarkResult).NsPerOp", Method, 0, ""},
                {"(BenchmarkResult).String", Method, 0, ""},
+               {"(TB).ArtifactDir", Method, 26, ""},
+               {"(TB).Attr", Method, 25, ""},
+               {"(TB).Chdir", Method, 24, ""},
+               {"(TB).Cleanup", Method, 14, ""},
+               {"(TB).Context", Method, 24, ""},
+               {"(TB).Error", Method, 2, ""},
+               {"(TB).Errorf", Method, 2, ""},
+               {"(TB).Fail", Method, 2, ""},
+               {"(TB).FailNow", Method, 2, ""},
+               {"(TB).Failed", Method, 2, ""},
+               {"(TB).Fatal", Method, 2, ""},
+               {"(TB).Fatalf", Method, 2, ""},
+               {"(TB).Helper", Method, 9, ""},
+               {"(TB).Log", Method, 2, ""},
+               {"(TB).Logf", Method, 2, ""},
+               {"(TB).Name", Method, 8, ""},
+               {"(TB).Output", Method, 25, ""},
+               {"(TB).Setenv", Method, 17, ""},
+               {"(TB).Skip", Method, 2, ""},
+               {"(TB).SkipNow", Method, 2, ""},
+               {"(TB).Skipf", Method, 2, ""},
+               {"(TB).Skipped", Method, 2, ""},
+               {"(TB).TempDir", Method, 15, ""},
                {"AllocsPerRun", Func, 1, "func(runs int, f func()) (avg float64)"},
                {"B", Type, 0, ""},
                {"B.N", Field, 0, ""},
@@ -16851,7 +17364,6 @@ var PackageSymbols = map[string][]Symbol{
                {"RunTests", Func, 0, "func(matchString func(pat string, str string) (bool, error), tests []InternalTest) (ok bool)"},
                {"Short", Func, 0, "func() bool"},
                {"T", Type, 0, ""},
-               {"TB", Type, 2, ""},
                {"Testing", Func, 21, "func() bool"},
                {"Verbose", Func, 1, "func() bool"},
        },
@@ -16887,6 +17399,7 @@ var PackageSymbols = map[string][]Symbol{
        "testing/quick": {
                {"(*CheckEqualError).Error", Method, 0, ""},
                {"(*CheckError).Error", Method, 0, ""},
+               {"(Generator).Generate", Method, 0, ""},
                {"(SetupError).Error", Method, 0, ""},
                {"Check", Func, 0, "func(f any, config *Config) error"},
                {"CheckEqual", Func, 0, "func(f any, g any, config *Config) error"},
@@ -17093,6 +17606,10 @@ var PackageSymbols = map[string][]Symbol{
                {"(ListNode).Position", Method, 1, ""},
                {"(ListNode).Type", Method, 0, ""},
                {"(NilNode).Position", Method, 1, ""},
+               {"(Node).Copy", Method, 0, ""},
+               {"(Node).Position", Method, 1, ""},
+               {"(Node).String", Method, 0, ""},
+               {"(Node).Type", Method, 0, ""},
                {"(NodeType).Type", Method, 0, ""},
                {"(NumberNode).Position", Method, 1, ""},
                {"(NumberNode).Type", Method, 0, ""},
index e223e0f3405053f1939756b35ec5530d3eac7bd2..59a5de36a23de91661bcf5b4484a5c1f2f5f0be9 100644 (file)
@@ -39,7 +39,7 @@ const (
        Var                 // "EOF"
        Const               // "Pi"
        Field               // "Point.X"
-       Method              // "(*Buffer).Grow"
+       Method              // "(*Buffer).Grow" or "(Reader).Read"
 )
 
 func (kind Kind) String() string {
index a3c2d2017745795e414b6df035ad1ef0fefe4b2a..7810dcf8b12323792bc03b5de87d0cd32341a192 100644 (file)
@@ -73,7 +73,7 @@ golang.org/x/text/internal/tag
 golang.org/x/text/language
 golang.org/x/text/transform
 golang.org/x/text/unicode/norm
-# golang.org/x/tools v0.39.1-0.20251120214200-68724afed209
+# golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713
 ## explicit; go 1.24.0
 golang.org/x/tools/cmd/bisect
 golang.org/x/tools/cover