github.com/google/pprof v0.0.0-20250630185457-6e76a2b096b5
golang.org/x/arch v0.22.1-0.20251016010524-fea4a9ec4938
golang.org/x/build v0.0.0-20250806225920-b7c66c047964
- golang.org/x/mod v0.29.0
- golang.org/x/sync v0.17.0
- golang.org/x/sys v0.37.0
- golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8
+ golang.org/x/mod v0.30.1-0.20251114215501-3f03020ad526
+ golang.org/x/sync v0.18.0
+ golang.org/x/sys v0.38.0
+ golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54
golang.org/x/term v0.34.0
- golang.org/x/tools v0.38.1-0.20251015192825-7d9453ccc0f5
+ golang.org/x/tools v0.39.1-0.20251114194111-59ff18ce4883
)
require (
golang.org/x/arch v0.22.1-0.20251016010524-fea4a9ec4938/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
golang.org/x/build v0.0.0-20250806225920-b7c66c047964 h1:yRs1K51GKq7hsIO+YHJ8LsslrvwFceNPIv0tYjpcBd0=
golang.org/x/build v0.0.0-20250806225920-b7c66c047964/go.mod h1:i9Vx7+aOQUpYJRxSO+OpRStVBCVL/9ccI51xblWm5WY=
-golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
-golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
-golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
-golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
-golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
-golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
-golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8 h1:LvzTn0GQhWuvKH/kVRS3R3bVAsdQWI7hvfLHGgh9+lU=
-golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8/go.mod h1:Pi4ztBfryZoJEkyFTI5/Ocsu2jXyDr6iSdgJiYE/uwE=
+golang.org/x/mod v0.30.1-0.20251114215501-3f03020ad526 h1:LPpBM4CGUFMC47OqgAr2YIUxEUjH1Ur+D3KR/1LiuuQ=
+golang.org/x/mod v0.30.1-0.20251114215501-3f03020ad526/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
+golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
+golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
+golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
+golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54 h1:E2/AqCUMZGgd73TQkxUMcMla25GB9i/5HOdLr+uH7Vo=
+golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54/go.mod h1:hKdjCMrbv9skySur+Nek8Hd0uJ0GuxJIoIX2payrIdQ=
golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
-golang.org/x/tools v0.38.1-0.20251015192825-7d9453ccc0f5 h1:cz7f45KGWAtyIrz6bm45Gc+lw8beIxBSW3EQh4Bwbg4=
-golang.org/x/tools v0.38.1-0.20251015192825-7d9453ccc0f5/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs=
+golang.org/x/tools v0.39.1-0.20251114194111-59ff18ce4883 h1:aeO0AW8d+a+5+hNQx9f4J5egD89zftrY2x42KGQjLzI=
+golang.org/x/tools v0.39.1-0.20251114194111-59ff18ce4883/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef h1:mqLYrXCXYEZOop9/Dbo6RPX11539nwiCNBb1icVPmw8=
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ=
}
// printf prints to the buffer.
-func (p *printer) printf(format string, args ...interface{}) {
+func (p *printer) printf(format string, args ...any) {
fmt.Fprintf(p, format, args...)
}
// line, the new line is added at the end of the block containing hint,
// extracting hint into a new block if it is not yet in one.
//
-// If the hint is non-nil buts its first token does not match,
+// If the hint is non-nil but its first token does not match,
// the new line is added after the block containing hint
// (or hint itself, if not in a block).
//
// Checked all punctuation. Must be identifier token.
if c := in.peekRune(); !isIdent(c) {
- in.Error(fmt.Sprintf("unexpected input character %#q", c))
+ in.Error(fmt.Sprintf("unexpected input character %#q", rune(c)))
}
// Scan over identifier.
Err: err,
})
}
- errorf := func(format string, args ...interface{}) {
+ errorf := func(format string, args ...any) {
wrapError(fmt.Errorf(format, args...))
}
Err: err,
}
}
- errorf := func(format string, args ...interface{}) *Error {
+ errorf := func(format string, args ...any) *Error {
return wrapError(fmt.Errorf(format, args...))
}
Err: err,
})
}
- errorf := func(format string, args ...interface{}) {
+ errorf := func(format string, args ...any) {
wrapError(fmt.Errorf(format, args...))
}
r.Syntax = f.Syntax.addLine(nil, "retract", "[", AutoQuote(vi.Low), ",", AutoQuote(vi.High), "]")
}
if rationale != "" {
- for _, line := range strings.Split(rationale, "\n") {
+ for line := range strings.SplitSeq(rationale, "\n") {
com := Comment{Token: "// " + line}
r.Syntax.Comment().Before = append(r.Syntax.Comment().Before, com)
}
// importPathOK reports whether r can appear in a package import path element.
//
-// Import paths are intermediate between module paths and file paths: we allow
+// Import paths are intermediate between module paths and file paths: we
// disallow characters that would be confusing or ambiguous as arguments to
// 'go get' (such as '@' and ' ' ), but allow certain characters that are
// otherwise-unambiguous on the command line and historically used for some
for globs != "" {
// Extract next non-empty glob in comma-separated list.
var glob string
- if i := strings.Index(globs, ","); i >= 0 {
- glob, globs = globs[:i], globs[i+1:]
+ if before, after, ok := strings.Cut(globs, ","); ok {
+ glob, globs = before, after
} else {
glob, globs = globs, ""
}
// Canonical returns the canonical formatting of the semantic version v.
// It fills in any missing .MINOR or .PATCH and discards build metadata.
-// Two semantic versions compare equal only if their canonical formattings
-// are identical strings.
+// Two semantic versions compare equal only if their canonical formatting
+// is an identical string.
// The canonical invalid semantic version is the empty string.
func Canonical(v string) string {
p, ok := parse(v)
type cacheEntry struct {
done uint32
mu sync.Mutex
- result interface{}
+ result any
}
// Do calls the function f if and only if Do is being called for the first time with this key.
// No call to Do with a given key returns until the one call to f returns.
// Do returns the value returned by the one call to f.
-func (c *parCache) Do(key interface{}, f func() interface{}) interface{} {
+func (c *parCache) Do(key any, f func() any) any {
entryIface, ok := c.m.Load(key)
if !ok {
entryIface, _ = c.m.LoadOrStore(key, new(cacheEntry))
// Get returns the cached result associated with key.
// It returns nil if there is no such result.
// If the result for key is being computed, Get does not wait for the computation to finish.
-func (c *parCache) Get(key interface{}) interface{} {
+func (c *parCache) Get(key any) any {
entryIface, ok := c.m.Load(key)
if !ok {
return nil
data []byte
err error
}
- result := c.record.Do(file, func() interface{} {
+ result := c.record.Do(file, func() any {
// Try the on-disk cache, or else get from web.
writeCache := false
data, err := c.ops.ReadCache(file)
// (with or without /go.mod).
prefix := path + " " + vers + " "
var hashes []string
- for _, line := range strings.Split(string(result.data), "\n") {
+ for line := range strings.SplitSeq(string(result.data), "\n") {
if strings.HasPrefix(line, prefix) {
hashes = append(hashes, line)
}
err error
}
- result := c.tileCache.Do(tile, func() interface{} {
+ result := c.tileCache.Do(tile, func() any {
// Try the requested tile in on-disk cache.
data, err := c.ops.ReadCache(c.tileCacheKey(tile))
if err == nil {
// NewVerifier construct a new [Verifier] from an encoded verifier key.
func NewVerifier(vkey string) (Verifier, error) {
- name, vkey := chop(vkey, "+")
- hash16, key64 := chop(vkey, "+")
+ name, vkey, _ := strings.Cut(vkey, "+")
+ hash16, key64, _ := strings.Cut(vkey, "+")
hash, err1 := strconv.ParseUint(hash16, 16, 32)
key, err2 := base64.StdEncoding.DecodeString(key64)
if len(hash16) != 8 || err1 != nil || err2 != nil || !isValidName(name) || len(key) == 0 {
// chop chops s at the first instance of sep, if any,
// and returns the text before and after sep.
// If sep is not present, chop returns before is s and after is empty.
-func chop(s, sep string) (before, after string) {
- i := strings.Index(s, sep)
- if i < 0 {
- return s, ""
- }
- return s[:i], s[i+len(sep):]
+func chop(s, sep string) (before, after string, ok bool) {
+ return strings.Cut(s, sep)
}
// verifier is a trivial Verifier implementation.
// NewSigner constructs a new [Signer] from an encoded signer key.
func NewSigner(skey string) (Signer, error) {
- priv1, skey := chop(skey, "+")
- priv2, skey := chop(skey, "+")
- name, skey := chop(skey, "+")
- hash16, key64 := chop(skey, "+")
+ priv1, skey, _ := strings.Cut(skey, "+")
+ priv2, skey, _ := strings.Cut(skey, "+")
+ name, skey, _ := strings.Cut(skey, "+")
+ hash16, key64, _ := strings.Cut(skey, "+")
hash, err1 := strconv.ParseUint(hash16, 16, 32)
key, err2 := base64.StdEncoding.DecodeString(key64)
if priv1 != "PRIVATE" || priv2 != "KEY" || len(hash16) != 8 || err1 != nil || err2 != nil || !isValidName(name) || len(key) == 0 {
return nil, errMalformedNote
}
line = line[len(sigPrefix):]
- name, b64 := chop(string(line), " ")
+ name, b64, _ := chop(string(line), " ")
sig, err := base64.StdEncoding.DecodeString(b64)
if err != nil || !isValidName(name) || b64 == "" || len(sig) < 5 {
return nil, errMalformedNote
http.Error(w, "invalid module@version syntax", http.StatusBadRequest)
return
}
- i := strings.Index(mod, "@")
- escPath, escVers := mod[:i], mod[i+1:]
+ escPath, escVers, _ := strings.Cut(mod, "@")
path, err := module.UnescapePath(escPath)
if err != nil {
reportError(w, err)
defer s.mu.Unlock()
var list [][]byte
- for i := int64(0); i < n; i++ {
+ for i := range n {
if id+i >= int64(len(s.records)) {
return nil, fmt.Errorf("missing records")
}
// A future backwards-incompatible encoding would use a different
// first line (for example, "go.sum database tree v2").
func FormatTree(tree Tree) []byte {
- return []byte(fmt.Sprintf("go.sum database tree\n%d\n%s\n", tree.N, tree.Hash))
+ return fmt.Appendf(nil, "go.sum database tree\n%d\n%s\n", tree.N, tree.Hash)
}
var errMalformedTree = errors.New("malformed tree note")
if !isValidRecordText(text) {
return nil, errMalformedRecord
}
- msg = []byte(fmt.Sprintf("%d\n", id))
+ msg = fmt.Appendf(nil, "%d\n", id)
msg = append(msg, text...)
msg = append(msg, '\n')
return msg, nil
// and consumes a hash from an adjacent subtree.
m := int(bits.TrailingZeros64(uint64(n + 1)))
indexes := make([]int64, m)
- for i := 0; i < m; i++ {
+ for i := range m {
// We arrange indexes in sorted order.
// Note that n>>i is always odd.
indexes[m-1-i] = StoredHashIndex(i, n>>uint(i)-1)
}
// Build new hashes.
- for i := 0; i < m; i++ {
+ for i := range m {
h = NodeHash(old[m-1-i], h)
hashes = append(hashes, h)
}
func (fi dataFileInfo) Mode() os.FileMode { return 0644 }
func (fi dataFileInfo) ModTime() time.Time { return time.Time{} }
func (fi dataFileInfo) IsDir() bool { return false }
-func (fi dataFileInfo) Sys() interface{} { return nil }
+func (fi dataFileInfo) Sys() any { return nil }
// isVendoredPackage attempts to report whether the given filename is contained
// in a package whose import path contains (but does not end with) the component
// license that can be found in the LICENSE file.
// Package errgroup provides synchronization, error propagation, and Context
-// cancelation for groups of goroutines working on subtasks of a common task.
+// cancellation for groups of goroutines working on subtasks of a common task.
//
// [errgroup.Group] is related to [sync.WaitGroup] but adds handling of tasks
// returning errors.
#include <linux/cryptouser.h>
#include <linux/devlink.h>
#include <linux/dm-ioctl.h>
+#include <linux/elf.h>
#include <linux/errqueue.h>
#include <linux/ethtool_netlink.h>
#include <linux/falloc.h>
$2 ~ /^O[CNPFPL][A-Z]+[^_][A-Z]+$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)DLY$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)[0-9]$/ ||
+ $2 ~ /^(DT|EI|ELF|EV|NN|NT|PF|SHF|SHN|SHT|STB|STT|VER)_/ ||
$2 ~ /^O?XTABS$/ ||
$2 ~ /^TC[IO](ON|OFF)$/ ||
$2 ~ /^IN_/ ||
//sys Cachestat(fd uint, crange *CachestatRange, cstat *Cachestat_t, flags uint) (err error)
//sys Mseal(b []byte, flags uint) (err error)
+
+//sys setMemPolicy(mode int, mask *CPUSet, size int) (err error) = SYS_SET_MEMPOLICY
+
+func SetMemPolicy(mode int, mask *CPUSet) error {
+ return setMemPolicy(mode, mask, _CPU_SETSIZE)
+}
DM_VERSION_MAJOR = 0x4
DM_VERSION_MINOR = 0x32
DM_VERSION_PATCHLEVEL = 0x0
+ DT_ADDRRNGHI = 0x6ffffeff
+ DT_ADDRRNGLO = 0x6ffffe00
DT_BLK = 0x6
DT_CHR = 0x2
+ DT_DEBUG = 0x15
DT_DIR = 0x4
+ DT_ENCODING = 0x20
DT_FIFO = 0x1
+ DT_FINI = 0xd
+ DT_FLAGS_1 = 0x6ffffffb
+ DT_GNU_HASH = 0x6ffffef5
+ DT_HASH = 0x4
+ DT_HIOS = 0x6ffff000
+ DT_HIPROC = 0x7fffffff
+ DT_INIT = 0xc
+ DT_JMPREL = 0x17
DT_LNK = 0xa
+ DT_LOOS = 0x6000000d
+ DT_LOPROC = 0x70000000
+ DT_NEEDED = 0x1
+ DT_NULL = 0x0
+ DT_PLTGOT = 0x3
+ DT_PLTREL = 0x14
+ DT_PLTRELSZ = 0x2
DT_REG = 0x8
+ DT_REL = 0x11
+ DT_RELA = 0x7
+ DT_RELACOUNT = 0x6ffffff9
+ DT_RELAENT = 0x9
+ DT_RELASZ = 0x8
+ DT_RELCOUNT = 0x6ffffffa
+ DT_RELENT = 0x13
+ DT_RELSZ = 0x12
+ DT_RPATH = 0xf
DT_SOCK = 0xc
+ DT_SONAME = 0xe
+ DT_STRSZ = 0xa
+ DT_STRTAB = 0x5
+ DT_SYMBOLIC = 0x10
+ DT_SYMENT = 0xb
+ DT_SYMTAB = 0x6
+ DT_TEXTREL = 0x16
DT_UNKNOWN = 0x0
+ DT_VALRNGHI = 0x6ffffdff
+ DT_VALRNGLO = 0x6ffffd00
+ DT_VERDEF = 0x6ffffffc
+ DT_VERDEFNUM = 0x6ffffffd
+ DT_VERNEED = 0x6ffffffe
+ DT_VERNEEDNUM = 0x6fffffff
+ DT_VERSYM = 0x6ffffff0
DT_WHT = 0xe
ECHO = 0x8
ECRYPTFS_SUPER_MAGIC = 0xf15f
EFD_SEMAPHORE = 0x1
EFIVARFS_MAGIC = 0xde5e81e4
EFS_SUPER_MAGIC = 0x414a53
+ EI_CLASS = 0x4
+ EI_DATA = 0x5
+ EI_MAG0 = 0x0
+ EI_MAG1 = 0x1
+ EI_MAG2 = 0x2
+ EI_MAG3 = 0x3
+ EI_NIDENT = 0x10
+ EI_OSABI = 0x7
+ EI_PAD = 0x8
+ EI_VERSION = 0x6
+ ELFCLASS32 = 0x1
+ ELFCLASS64 = 0x2
+ ELFCLASSNONE = 0x0
+ ELFCLASSNUM = 0x3
+ ELFDATA2LSB = 0x1
+ ELFDATA2MSB = 0x2
+ ELFDATANONE = 0x0
+ ELFMAG = "\177ELF"
+ ELFMAG0 = 0x7f
+ ELFMAG1 = 'E'
+ ELFMAG2 = 'L'
+ ELFMAG3 = 'F'
+ ELFOSABI_LINUX = 0x3
+ ELFOSABI_NONE = 0x0
EM_386 = 0x3
EM_486 = 0x6
EM_68K = 0x4
ETH_P_WCCP = 0x883e
ETH_P_X25 = 0x805
ETH_P_XDSA = 0xf8
+ ET_CORE = 0x4
+ ET_DYN = 0x3
+ ET_EXEC = 0x2
+ ET_HIPROC = 0xffff
+ ET_LOPROC = 0xff00
+ ET_NONE = 0x0
+ ET_REL = 0x1
EV_ABS = 0x3
EV_CNT = 0x20
+ EV_CURRENT = 0x1
EV_FF = 0x15
EV_FF_STATUS = 0x17
EV_KEY = 0x1
EV_LED = 0x11
EV_MAX = 0x1f
EV_MSC = 0x4
+ EV_NONE = 0x0
+ EV_NUM = 0x2
EV_PWR = 0x16
EV_REL = 0x2
EV_REP = 0x14
NLM_F_REPLACE = 0x100
NLM_F_REQUEST = 0x1
NLM_F_ROOT = 0x100
+ NN_386_IOPERM = "LINUX"
+ NN_386_TLS = "LINUX"
+ NN_ARC_V2 = "LINUX"
+ NN_ARM_FPMR = "LINUX"
+ NN_ARM_GCS = "LINUX"
+ NN_ARM_HW_BREAK = "LINUX"
+ NN_ARM_HW_WATCH = "LINUX"
+ NN_ARM_PACA_KEYS = "LINUX"
+ NN_ARM_PACG_KEYS = "LINUX"
+ NN_ARM_PAC_ENABLED_KEYS = "LINUX"
+ NN_ARM_PAC_MASK = "LINUX"
+ NN_ARM_POE = "LINUX"
+ NN_ARM_SSVE = "LINUX"
+ NN_ARM_SVE = "LINUX"
+ NN_ARM_SYSTEM_CALL = "LINUX"
+ NN_ARM_TAGGED_ADDR_CTRL = "LINUX"
+ NN_ARM_TLS = "LINUX"
+ NN_ARM_VFP = "LINUX"
+ NN_ARM_ZA = "LINUX"
+ NN_ARM_ZT = "LINUX"
+ NN_AUXV = "CORE"
+ NN_FILE = "CORE"
+ NN_GNU_PROPERTY_TYPE_0 = "GNU"
+ NN_LOONGARCH_CPUCFG = "LINUX"
+ NN_LOONGARCH_CSR = "LINUX"
+ NN_LOONGARCH_HW_BREAK = "LINUX"
+ NN_LOONGARCH_HW_WATCH = "LINUX"
+ NN_LOONGARCH_LASX = "LINUX"
+ NN_LOONGARCH_LBT = "LINUX"
+ NN_LOONGARCH_LSX = "LINUX"
+ NN_MIPS_DSP = "LINUX"
+ NN_MIPS_FP_MODE = "LINUX"
+ NN_MIPS_MSA = "LINUX"
+ NN_PPC_DEXCR = "LINUX"
+ NN_PPC_DSCR = "LINUX"
+ NN_PPC_EBB = "LINUX"
+ NN_PPC_HASHKEYR = "LINUX"
+ NN_PPC_PKEY = "LINUX"
+ NN_PPC_PMU = "LINUX"
+ NN_PPC_PPR = "LINUX"
+ NN_PPC_SPE = "LINUX"
+ NN_PPC_TAR = "LINUX"
+ NN_PPC_TM_CDSCR = "LINUX"
+ NN_PPC_TM_CFPR = "LINUX"
+ NN_PPC_TM_CGPR = "LINUX"
+ NN_PPC_TM_CPPR = "LINUX"
+ NN_PPC_TM_CTAR = "LINUX"
+ NN_PPC_TM_CVMX = "LINUX"
+ NN_PPC_TM_CVSX = "LINUX"
+ NN_PPC_TM_SPR = "LINUX"
+ NN_PPC_VMX = "LINUX"
+ NN_PPC_VSX = "LINUX"
+ NN_PRFPREG = "CORE"
+ NN_PRPSINFO = "CORE"
+ NN_PRSTATUS = "CORE"
+ NN_PRXFPREG = "LINUX"
+ NN_RISCV_CSR = "LINUX"
+ NN_RISCV_TAGGED_ADDR_CTRL = "LINUX"
+ NN_RISCV_VECTOR = "LINUX"
+ NN_S390_CTRS = "LINUX"
+ NN_S390_GS_BC = "LINUX"
+ NN_S390_GS_CB = "LINUX"
+ NN_S390_HIGH_GPRS = "LINUX"
+ NN_S390_LAST_BREAK = "LINUX"
+ NN_S390_PREFIX = "LINUX"
+ NN_S390_PV_CPU_DATA = "LINUX"
+ NN_S390_RI_CB = "LINUX"
+ NN_S390_SYSTEM_CALL = "LINUX"
+ NN_S390_TDB = "LINUX"
+ NN_S390_TIMER = "LINUX"
+ NN_S390_TODCMP = "LINUX"
+ NN_S390_TODPREG = "LINUX"
+ NN_S390_VXRS_HIGH = "LINUX"
+ NN_S390_VXRS_LOW = "LINUX"
+ NN_SIGINFO = "CORE"
+ NN_TASKSTRUCT = "CORE"
+ NN_VMCOREDD = "LINUX"
+ NN_X86_SHSTK = "LINUX"
+ NN_X86_XSAVE_LAYOUT = "LINUX"
+ NN_X86_XSTATE = "LINUX"
NSFS_MAGIC = 0x6e736673
+ NT_386_IOPERM = 0x201
+ NT_386_TLS = 0x200
+ NT_ARC_V2 = 0x600
+ NT_ARM_FPMR = 0x40e
+ NT_ARM_GCS = 0x410
+ NT_ARM_HW_BREAK = 0x402
+ NT_ARM_HW_WATCH = 0x403
+ NT_ARM_PACA_KEYS = 0x407
+ NT_ARM_PACG_KEYS = 0x408
+ NT_ARM_PAC_ENABLED_KEYS = 0x40a
+ NT_ARM_PAC_MASK = 0x406
+ NT_ARM_POE = 0x40f
+ NT_ARM_SSVE = 0x40b
+ NT_ARM_SVE = 0x405
+ NT_ARM_SYSTEM_CALL = 0x404
+ NT_ARM_TAGGED_ADDR_CTRL = 0x409
+ NT_ARM_TLS = 0x401
+ NT_ARM_VFP = 0x400
+ NT_ARM_ZA = 0x40c
+ NT_ARM_ZT = 0x40d
+ NT_AUXV = 0x6
+ NT_FILE = 0x46494c45
+ NT_GNU_PROPERTY_TYPE_0 = 0x5
+ NT_LOONGARCH_CPUCFG = 0xa00
+ NT_LOONGARCH_CSR = 0xa01
+ NT_LOONGARCH_HW_BREAK = 0xa05
+ NT_LOONGARCH_HW_WATCH = 0xa06
+ NT_LOONGARCH_LASX = 0xa03
+ NT_LOONGARCH_LBT = 0xa04
+ NT_LOONGARCH_LSX = 0xa02
+ NT_MIPS_DSP = 0x800
+ NT_MIPS_FP_MODE = 0x801
+ NT_MIPS_MSA = 0x802
+ NT_PPC_DEXCR = 0x111
+ NT_PPC_DSCR = 0x105
+ NT_PPC_EBB = 0x106
+ NT_PPC_HASHKEYR = 0x112
+ NT_PPC_PKEY = 0x110
+ NT_PPC_PMU = 0x107
+ NT_PPC_PPR = 0x104
+ NT_PPC_SPE = 0x101
+ NT_PPC_TAR = 0x103
+ NT_PPC_TM_CDSCR = 0x10f
+ NT_PPC_TM_CFPR = 0x109
+ NT_PPC_TM_CGPR = 0x108
+ NT_PPC_TM_CPPR = 0x10e
+ NT_PPC_TM_CTAR = 0x10d
+ NT_PPC_TM_CVMX = 0x10a
+ NT_PPC_TM_CVSX = 0x10b
+ NT_PPC_TM_SPR = 0x10c
+ NT_PPC_VMX = 0x100
+ NT_PPC_VSX = 0x102
+ NT_PRFPREG = 0x2
+ NT_PRPSINFO = 0x3
+ NT_PRSTATUS = 0x1
+ NT_PRXFPREG = 0x46e62b7f
+ NT_RISCV_CSR = 0x900
+ NT_RISCV_TAGGED_ADDR_CTRL = 0x902
+ NT_RISCV_VECTOR = 0x901
+ NT_S390_CTRS = 0x304
+ NT_S390_GS_BC = 0x30c
+ NT_S390_GS_CB = 0x30b
+ NT_S390_HIGH_GPRS = 0x300
+ NT_S390_LAST_BREAK = 0x306
+ NT_S390_PREFIX = 0x305
+ NT_S390_PV_CPU_DATA = 0x30e
+ NT_S390_RI_CB = 0x30d
+ NT_S390_SYSTEM_CALL = 0x307
+ NT_S390_TDB = 0x308
+ NT_S390_TIMER = 0x301
+ NT_S390_TODCMP = 0x302
+ NT_S390_TODPREG = 0x303
+ NT_S390_VXRS_HIGH = 0x30a
+ NT_S390_VXRS_LOW = 0x309
+ NT_SIGINFO = 0x53494749
+ NT_TASKSTRUCT = 0x4
+ NT_VMCOREDD = 0x700
+ NT_X86_SHSTK = 0x204
+ NT_X86_XSAVE_LAYOUT = 0x205
+ NT_X86_XSTATE = 0x202
OCFS2_SUPER_MAGIC = 0x7461636f
OCRNL = 0x8
OFDEL = 0x80
PERF_RECORD_MISC_USER = 0x2
PERF_SAMPLE_BRANCH_PLM_ALL = 0x7
PERF_SAMPLE_WEIGHT_TYPE = 0x1004000
+ PF_ALG = 0x26
+ PF_APPLETALK = 0x5
+ PF_ASH = 0x12
+ PF_ATMPVC = 0x8
+ PF_ATMSVC = 0x14
+ PF_AX25 = 0x3
+ PF_BLUETOOTH = 0x1f
+ PF_BRIDGE = 0x7
+ PF_CAIF = 0x25
+ PF_CAN = 0x1d
+ PF_DECnet = 0xc
+ PF_ECONET = 0x13
+ PF_FILE = 0x1
+ PF_IB = 0x1b
+ PF_IEEE802154 = 0x24
+ PF_INET = 0x2
+ PF_INET6 = 0xa
+ PF_IPX = 0x4
+ PF_IRDA = 0x17
+ PF_ISDN = 0x22
+ PF_IUCV = 0x20
+ PF_KCM = 0x29
+ PF_KEY = 0xf
+ PF_LLC = 0x1a
+ PF_LOCAL = 0x1
+ PF_MAX = 0x2e
+ PF_MCTP = 0x2d
+ PF_MPLS = 0x1c
+ PF_NETBEUI = 0xd
+ PF_NETLINK = 0x10
+ PF_NETROM = 0x6
+ PF_NFC = 0x27
+ PF_PACKET = 0x11
+ PF_PHONET = 0x23
+ PF_PPPOX = 0x18
+ PF_QIPCRTR = 0x2a
+ PF_R = 0x4
+ PF_RDS = 0x15
+ PF_ROSE = 0xb
+ PF_ROUTE = 0x10
+ PF_RXRPC = 0x21
+ PF_SECURITY = 0xe
+ PF_SMC = 0x2b
+ PF_SNA = 0x16
+ PF_TIPC = 0x1e
+ PF_UNIX = 0x1
+ PF_UNSPEC = 0x0
+ PF_VSOCK = 0x28
+ PF_W = 0x2
+ PF_WANPIPE = 0x19
+ PF_X = 0x1
+ PF_X25 = 0x9
+ PF_XDP = 0x2c
PID_FS_MAGIC = 0x50494446
PIPEFS_MAGIC = 0x50495045
PPPIOCGNPMODE = 0xc008744c
PTRACE_SYSCALL_INFO_NONE = 0x0
PTRACE_SYSCALL_INFO_SECCOMP = 0x3
PTRACE_TRACEME = 0x0
+ PT_AARCH64_MEMTAG_MTE = 0x70000002
+ PT_DYNAMIC = 0x2
+ PT_GNU_EH_FRAME = 0x6474e550
+ PT_GNU_PROPERTY = 0x6474e553
+ PT_GNU_RELRO = 0x6474e552
+ PT_GNU_STACK = 0x6474e551
+ PT_HIOS = 0x6fffffff
+ PT_HIPROC = 0x7fffffff
+ PT_INTERP = 0x3
+ PT_LOAD = 0x1
+ PT_LOOS = 0x60000000
+ PT_LOPROC = 0x70000000
+ PT_NOTE = 0x4
+ PT_NULL = 0x0
+ PT_PHDR = 0x6
+ PT_SHLIB = 0x5
+ PT_TLS = 0x7
P_ALL = 0x0
P_PGID = 0x2
P_PID = 0x1
SEEK_MAX = 0x4
SEEK_SET = 0x0
SELINUX_MAGIC = 0xf97cff8c
+ SHF_ALLOC = 0x2
+ SHF_EXCLUDE = 0x8000000
+ SHF_EXECINSTR = 0x4
+ SHF_GROUP = 0x200
+ SHF_INFO_LINK = 0x40
+ SHF_LINK_ORDER = 0x80
+ SHF_MASKOS = 0xff00000
+ SHF_MASKPROC = 0xf0000000
+ SHF_MERGE = 0x10
+ SHF_ORDERED = 0x4000000
+ SHF_OS_NONCONFORMING = 0x100
+ SHF_RELA_LIVEPATCH = 0x100000
+ SHF_RO_AFTER_INIT = 0x200000
+ SHF_STRINGS = 0x20
+ SHF_TLS = 0x400
+ SHF_WRITE = 0x1
+ SHN_ABS = 0xfff1
+ SHN_COMMON = 0xfff2
+ SHN_HIPROC = 0xff1f
+ SHN_HIRESERVE = 0xffff
+ SHN_LIVEPATCH = 0xff20
+ SHN_LOPROC = 0xff00
+ SHN_LORESERVE = 0xff00
+ SHN_UNDEF = 0x0
+ SHT_DYNAMIC = 0x6
+ SHT_DYNSYM = 0xb
+ SHT_HASH = 0x5
+ SHT_HIPROC = 0x7fffffff
+ SHT_HIUSER = 0xffffffff
+ SHT_LOPROC = 0x70000000
+ SHT_LOUSER = 0x80000000
+ SHT_NOBITS = 0x8
+ SHT_NOTE = 0x7
+ SHT_NULL = 0x0
+ SHT_NUM = 0xc
+ SHT_PROGBITS = 0x1
+ SHT_REL = 0x9
+ SHT_RELA = 0x4
+ SHT_SHLIB = 0xa
+ SHT_STRTAB = 0x3
+ SHT_SYMTAB = 0x2
SHUT_RD = 0x0
SHUT_RDWR = 0x2
SHUT_WR = 0x1
STATX_UID = 0x8
STATX_WRITE_ATOMIC = 0x10000
STATX__RESERVED = 0x80000000
+ STB_GLOBAL = 0x1
+ STB_LOCAL = 0x0
+ STB_WEAK = 0x2
+ STT_COMMON = 0x5
+ STT_FILE = 0x4
+ STT_FUNC = 0x2
+ STT_NOTYPE = 0x0
+ STT_OBJECT = 0x1
+ STT_SECTION = 0x3
+ STT_TLS = 0x6
SYNC_FILE_RANGE_WAIT_AFTER = 0x4
SYNC_FILE_RANGE_WAIT_BEFORE = 0x1
SYNC_FILE_RANGE_WRITE = 0x2
UTIME_OMIT = 0x3ffffffe
V9FS_MAGIC = 0x1021997
VERASE = 0x2
+ VER_FLG_BASE = 0x1
+ VER_FLG_WEAK = 0x2
VINTR = 0x0
VKILL = 0x3
VLNEXT = 0xf
}
return
}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func setMemPolicy(mode int, mask *CPUSet, size int) (err error) {
+ _, _, e1 := Syscall(SYS_SET_MEMPOLICY, uintptr(mode), uintptr(unsafe.Pointer(mask)), uintptr(size))
+ if e1 != 0 {
+ err = errnoErr(e1)
+ }
+ return
+}
Flags uint32
}
+const SizeofNhmsg = 0x8
+
type NexthopGrp struct {
Id uint32
Weight uint8
Resvd2 uint16
}
+const SizeofNexthopGrp = 0x8
+
const (
NHA_UNSPEC = 0x0
NHA_ID = 0x1
}
const RTM_NEWNVLAN = 0x70
+
+const (
+ MPOL_BIND = 0x2
+ MPOL_DEFAULT = 0x0
+ MPOL_F_ADDR = 0x2
+ MPOL_F_MEMS_ALLOWED = 0x4
+ MPOL_F_MOF = 0x8
+ MPOL_F_MORON = 0x10
+ MPOL_F_NODE = 0x1
+ MPOL_F_NUMA_BALANCING = 0x2000
+ MPOL_F_RELATIVE_NODES = 0x4000
+ MPOL_F_SHARED = 0x1
+ MPOL_F_STATIC_NODES = 0x8000
+ MPOL_INTERLEAVE = 0x3
+ MPOL_LOCAL = 0x4
+ MPOL_MAX = 0x7
+ MPOL_MF_INTERNAL = 0x10
+ MPOL_MF_LAZY = 0x8
+ MPOL_MF_MOVE_ALL = 0x4
+ MPOL_MF_MOVE = 0x2
+ MPOL_MF_STRICT = 0x1
+ MPOL_MF_VALID = 0x7
+ MPOL_MODE_FLAGS = 0xe000
+ MPOL_PREFERRED = 0x1
+ MPOL_PREFERRED_MANY = 0x5
+ MPOL_WEIGHTED_INTERLEAVE = 0x6
+)
//sys MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) = kernel32.MultiByteToWideChar
//sys getBestInterfaceEx(sockaddr unsafe.Pointer, pdwBestIfIndex *uint32) (errcode error) = iphlpapi.GetBestInterfaceEx
//sys GetIfEntry2Ex(level uint32, row *MibIfRow2) (errcode error) = iphlpapi.GetIfEntry2Ex
+//sys GetIpForwardEntry2(row *MibIpForwardRow2) (errcode error) = iphlpapi.GetIpForwardEntry2
+//sys GetIpForwardTable2(family uint16, table **MibIpForwardTable2) (errcode error) = iphlpapi.GetIpForwardTable2
//sys GetUnicastIpAddressEntry(row *MibUnicastIpAddressRow) (errcode error) = iphlpapi.GetUnicastIpAddressEntry
+//sys FreeMibTable(memory unsafe.Pointer) = iphlpapi.FreeMibTable
//sys NotifyIpInterfaceChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) = iphlpapi.NotifyIpInterfaceChange
+//sys NotifyRouteChange2(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) = iphlpapi.NotifyRouteChange2
//sys NotifyUnicastIpAddressChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) = iphlpapi.NotifyUnicastIpAddressChange
//sys CancelMibChangeNotify2(notificationHandle Handle) (errcode error) = iphlpapi.CancelMibChangeNotify2
Scope_id uint32
}
+// RawSockaddrInet is a union that contains an IPv4, an IPv6 address, or an address family. See
+// https://learn.microsoft.com/en-us/windows/win32/api/ws2ipdef/ns-ws2ipdef-sockaddr_inet.
+//
+// A [*RawSockaddrInet] may be converted to a [*RawSockaddrInet4] or [*RawSockaddrInet6] using
+// unsafe, depending on the address family.
+type RawSockaddrInet struct {
+ Family uint16
+ Port uint16
+ Data [6]uint32
+}
+
type RawSockaddr struct {
Family uint16
Data [14]int8
OutQLen uint64
}
+// IP_ADDRESS_PREFIX stores an IP address prefix. See
+// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-ip_address_prefix.
+type IpAddressPrefix struct {
+ Prefix RawSockaddrInet
+ PrefixLength uint8
+}
+
+// NL_ROUTE_ORIGIN enumeration from nldef.h or
+// https://learn.microsoft.com/en-us/windows/win32/api/nldef/ne-nldef-nl_route_origin.
+const (
+ NlroManual = 0
+ NlroWellKnown = 1
+ NlroDHCP = 2
+ NlroRouterAdvertisement = 3
+ Nlro6to4 = 4
+)
+
+// NL_ROUTE_ORIGIN enumeration from nldef.h or
+// https://learn.microsoft.com/en-us/windows/win32/api/nldef/ne-nldef-nl_route_protocol.
+const (
+ MIB_IPPROTO_OTHER = 1
+ MIB_IPPROTO_LOCAL = 2
+ MIB_IPPROTO_NETMGMT = 3
+ MIB_IPPROTO_ICMP = 4
+ MIB_IPPROTO_EGP = 5
+ MIB_IPPROTO_GGP = 6
+ MIB_IPPROTO_HELLO = 7
+ MIB_IPPROTO_RIP = 8
+ MIB_IPPROTO_IS_IS = 9
+ MIB_IPPROTO_ES_IS = 10
+ MIB_IPPROTO_CISCO = 11
+ MIB_IPPROTO_BBN = 12
+ MIB_IPPROTO_OSPF = 13
+ MIB_IPPROTO_BGP = 14
+ MIB_IPPROTO_IDPR = 15
+ MIB_IPPROTO_EIGRP = 16
+ MIB_IPPROTO_DVMRP = 17
+ MIB_IPPROTO_RPL = 18
+ MIB_IPPROTO_DHCP = 19
+ MIB_IPPROTO_NT_AUTOSTATIC = 10002
+ MIB_IPPROTO_NT_STATIC = 10006
+ MIB_IPPROTO_NT_STATIC_NON_DOD = 10007
+)
+
+// MIB_IPFORWARD_ROW2 stores information about an IP route entry. See
+// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_ipforward_row2.
+type MibIpForwardRow2 struct {
+ InterfaceLuid uint64
+ InterfaceIndex uint32
+ DestinationPrefix IpAddressPrefix
+ NextHop RawSockaddrInet
+ SitePrefixLength uint8
+ ValidLifetime uint32
+ PreferredLifetime uint32
+ Metric uint32
+ Protocol uint32
+ Loopback uint8
+ AutoconfigureAddress uint8
+ Publish uint8
+ Immortal uint8
+ Age uint32
+ Origin uint32
+}
+
+// MIB_IPFORWARD_TABLE2 contains a table of IP route entries. See
+// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_ipforward_table2.
+type MibIpForwardTable2 struct {
+ NumEntries uint32
+ Table [1]MibIpForwardRow2
+}
+
+// Rows returns the IP route entries in the table.
+func (t *MibIpForwardTable2) Rows() []MibIpForwardRow2 {
+ return unsafe.Slice(&t.Table[0], t.NumEntries)
+}
+
// MIB_UNICASTIPADDRESS_ROW stores information about a unicast IP address. See
// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_unicastipaddress_row.
type MibUnicastIpAddressRow struct {
procDwmGetWindowAttribute = moddwmapi.NewProc("DwmGetWindowAttribute")
procDwmSetWindowAttribute = moddwmapi.NewProc("DwmSetWindowAttribute")
procCancelMibChangeNotify2 = modiphlpapi.NewProc("CancelMibChangeNotify2")
+ procFreeMibTable = modiphlpapi.NewProc("FreeMibTable")
procGetAdaptersAddresses = modiphlpapi.NewProc("GetAdaptersAddresses")
procGetAdaptersInfo = modiphlpapi.NewProc("GetAdaptersInfo")
procGetBestInterfaceEx = modiphlpapi.NewProc("GetBestInterfaceEx")
procGetIfEntry = modiphlpapi.NewProc("GetIfEntry")
procGetIfEntry2Ex = modiphlpapi.NewProc("GetIfEntry2Ex")
+ procGetIpForwardEntry2 = modiphlpapi.NewProc("GetIpForwardEntry2")
+ procGetIpForwardTable2 = modiphlpapi.NewProc("GetIpForwardTable2")
procGetUnicastIpAddressEntry = modiphlpapi.NewProc("GetUnicastIpAddressEntry")
procNotifyIpInterfaceChange = modiphlpapi.NewProc("NotifyIpInterfaceChange")
+ procNotifyRouteChange2 = modiphlpapi.NewProc("NotifyRouteChange2")
procNotifyUnicastIpAddressChange = modiphlpapi.NewProc("NotifyUnicastIpAddressChange")
procAddDllDirectory = modkernel32.NewProc("AddDllDirectory")
procAssignProcessToJobObject = modkernel32.NewProc("AssignProcessToJobObject")
return
}
+func FreeMibTable(memory unsafe.Pointer) {
+ syscall.SyscallN(procFreeMibTable.Addr(), uintptr(memory))
+ return
+}
+
func GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) {
r0, _, _ := syscall.SyscallN(procGetAdaptersAddresses.Addr(), uintptr(family), uintptr(flags), uintptr(reserved), uintptr(unsafe.Pointer(adapterAddresses)), uintptr(unsafe.Pointer(sizePointer)))
if r0 != 0 {
return
}
+func GetIpForwardEntry2(row *MibIpForwardRow2) (errcode error) {
+ r0, _, _ := syscall.SyscallN(procGetIpForwardEntry2.Addr(), uintptr(unsafe.Pointer(row)))
+ if r0 != 0 {
+ errcode = syscall.Errno(r0)
+ }
+ return
+}
+
+func GetIpForwardTable2(family uint16, table **MibIpForwardTable2) (errcode error) {
+ r0, _, _ := syscall.SyscallN(procGetIpForwardTable2.Addr(), uintptr(family), uintptr(unsafe.Pointer(table)))
+ if r0 != 0 {
+ errcode = syscall.Errno(r0)
+ }
+ return
+}
+
func GetUnicastIpAddressEntry(row *MibUnicastIpAddressRow) (errcode error) {
r0, _, _ := syscall.SyscallN(procGetUnicastIpAddressEntry.Addr(), uintptr(unsafe.Pointer(row)))
if r0 != 0 {
return
}
+func NotifyRouteChange2(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) {
+ var _p0 uint32
+ if initialNotification {
+ _p0 = 1
+ }
+ r0, _, _ := syscall.SyscallN(procNotifyRouteChange2.Addr(), uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle)))
+ if r0 != 0 {
+ errcode = syscall.Errno(r0)
+ }
+ return
+}
+
func NotifyUnicastIpAddressChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) {
var _p0 uint32
if initialNotification {
--- /dev/null
+issuerepo: golang/go
URL string
// SuggestedFixes is an optional list of fixes to address the
- // problem described by the diagnostic. Each one represents
- // an alternative strategy; at most one may be applied.
+ // problem described by the diagnostic. Each one represents an
+ // alternative strategy, and should have a distinct and
+ // descriptive message; at most one may be applied.
//
// Fixes for different diagnostics should be treated as
// independent changes to the same baseline file state,
"encoding/json"
"flag"
"fmt"
- "go/token"
"io"
"log"
"os"
"strconv"
- "strings"
"golang.org/x/tools/go/analysis"
)
// flags common to all {single,multi,unit}checkers.
var (
- JSON = false // -json
- Context = -1 // -c=N: if N>0, display offending line plus N lines of context
- Fix bool // -fix
- diffFlag bool // -diff (changes [ApplyFixes] behavior)
+ JSON = false // -json
+ Context = -1 // -c=N: if N>0, display offending line plus N lines of context
+ Fix bool // -fix
+ Diff bool // -diff
)
// Parse creates a flag for each of the analyzer's flags,
flag.BoolVar(&JSON, "json", JSON, "emit JSON output")
flag.IntVar(&Context, "c", Context, `display offending line with this many lines of context`)
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
- flag.BoolVar(&diffFlag, "diff", false, "with -fix, don't update the files, but print a unified diff")
+ flag.BoolVar(&Diff, "diff", false, "with -fix, don't update the files, but print a unified diff")
// Add shims for legacy vet flags to enable existing
// scripts that run vet to continue to work.
"unusedfuncs": "unusedresult.funcs",
"unusedstringmethods": "unusedresult.stringmethods",
}
-
-// ---- output helpers common to all drivers ----
-//
-// These functions should not depend on global state (flags)!
-// Really they belong in a different package.
-
-// TODO(adonovan): don't accept an io.Writer if we don't report errors.
-// Either accept a bytes.Buffer (infallible), or return a []byte.
-
-// PrintPlain prints a diagnostic in plain text form.
-// If contextLines is nonnegative, it also prints the
-// offending line plus this many lines of context.
-func PrintPlain(out io.Writer, fset *token.FileSet, contextLines int, diag analysis.Diagnostic) {
- print := func(pos, end token.Pos, message string) {
- posn := fset.Position(pos)
- fmt.Fprintf(out, "%s: %s\n", posn, message)
-
- // show offending line plus N lines of context.
- if contextLines >= 0 {
- end := fset.Position(end)
- if !end.IsValid() {
- end = posn
- }
- // TODO(adonovan): highlight the portion of the line indicated
- // by pos...end using ASCII art, terminal colors, etc?
- data, _ := os.ReadFile(posn.Filename)
- lines := strings.Split(string(data), "\n")
- for i := posn.Line - contextLines; i <= end.Line+contextLines; i++ {
- if 1 <= i && i <= len(lines) {
- fmt.Fprintf(out, "%d\t%s\n", i, lines[i-1])
- }
- }
- }
- }
-
- print(diag.Pos, diag.End, diag.Message)
- for _, rel := range diag.Related {
- print(rel.Pos, rel.End, "\t"+rel.Message)
- }
-}
-
-// A JSONTree is a mapping from package ID to analysis name to result.
-// Each result is either a jsonError or a list of JSONDiagnostic.
-type JSONTree map[string]map[string]any
-
-// A TextEdit describes the replacement of a portion of a file.
-// Start and End are zero-based half-open indices into the original byte
-// sequence of the file, and New is the new text.
-type JSONTextEdit struct {
- Filename string `json:"filename"`
- Start int `json:"start"`
- End int `json:"end"`
- New string `json:"new"`
-}
-
-// A JSONSuggestedFix describes an edit that should be applied as a whole or not
-// at all. It might contain multiple TextEdits/text_edits if the SuggestedFix
-// consists of multiple non-contiguous edits.
-type JSONSuggestedFix struct {
- Message string `json:"message"`
- Edits []JSONTextEdit `json:"edits"`
-}
-
-// A JSONDiagnostic describes the JSON schema of an analysis.Diagnostic.
-//
-// TODO(matloob): include End position if present.
-type JSONDiagnostic struct {
- Category string `json:"category,omitempty"`
- Posn string `json:"posn"` // e.g. "file.go:line:column"
- Message string `json:"message"`
- SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"`
- Related []JSONRelatedInformation `json:"related,omitempty"`
-}
-
-// A JSONRelated describes a secondary position and message related to
-// a primary diagnostic.
-//
-// TODO(adonovan): include End position if present.
-type JSONRelatedInformation struct {
- Posn string `json:"posn"` // e.g. "file.go:line:column"
- Message string `json:"message"`
-}
-
-// Add adds the result of analysis 'name' on package 'id'.
-// The result is either a list of diagnostics or an error.
-func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) {
- var v any
- if err != nil {
- type jsonError struct {
- Err string `json:"error"`
- }
- v = jsonError{err.Error()}
- } else if len(diags) > 0 {
- diagnostics := make([]JSONDiagnostic, 0, len(diags))
- for _, f := range diags {
- var fixes []JSONSuggestedFix
- for _, fix := range f.SuggestedFixes {
- var edits []JSONTextEdit
- for _, edit := range fix.TextEdits {
- edits = append(edits, JSONTextEdit{
- Filename: fset.Position(edit.Pos).Filename,
- Start: fset.Position(edit.Pos).Offset,
- End: fset.Position(edit.End).Offset,
- New: string(edit.NewText),
- })
- }
- fixes = append(fixes, JSONSuggestedFix{
- Message: fix.Message,
- Edits: edits,
- })
- }
- var related []JSONRelatedInformation
- for _, r := range f.Related {
- related = append(related, JSONRelatedInformation{
- Posn: fset.Position(r.Pos).String(),
- Message: r.Message,
- })
- }
- jdiag := JSONDiagnostic{
- Category: f.Category,
- Posn: fset.Position(f.Pos).String(),
- Message: f.Message,
- SuggestedFixes: fixes,
- Related: related,
- }
- diagnostics = append(diagnostics, jdiag)
- }
- v = diagnostics
- }
- if v != nil {
- m, ok := tree[id]
- if !ok {
- m = make(map[string]any)
- tree[id] = m
- }
- m[name] = v
- }
-}
-
-func (tree JSONTree) Print(out io.Writer) error {
- data, err := json.MarshalIndent(tree, "", "\t")
- if err != nil {
- log.Panicf("internal error: JSON marshaling failed: %v", err)
- }
- _, err = fmt.Fprintf(out, "%s\n", data)
- return err
-}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
//go:embed doc.go
var Analyzer = &analysis.Analyzer{
Name: "appends",
- Doc: analysisinternal.MustExtractDoc(doc, "appends"),
+ Doc: analyzerutil.MustExtractDoc(doc, "appends"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/appends",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"strings"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
const Doc = "report mismatches between assembly files and Go declarations"
Files:
for _, fname := range sfiles {
- content, tf, err := analysisinternal.ReadFile(pass, fname)
+ content, tf, err := analyzerutil.ReadFile(pass, fname)
if err != nil {
return nil, err
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
var Analyzer = &analysis.Analyzer{
Name: "assign",
- Doc: analysisinternal.MustExtractDoc(doc, "assign"),
+ Doc: analyzerutil.MustExtractDoc(doc, "assign"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "atomic",
- Doc: analysisinternal.MustExtractDoc(doc, "atomic"),
+ Doc: analyzerutil.MustExtractDoc(doc, "atomic"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic",
Requires: []*analysis.Analyzer{inspect.Analyzer},
RunDespiteErrors: true,
"unicode"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
const Doc = "check //go:build and // +build directives"
// We cannot use the Go parser, since this may not be a Go source file.
// Read the raw bytes instead.
- content, tf, err := analysisinternal.ReadFile(pass, filename)
+ content, tf, err := analyzerutil.ReadFile(pass, filename)
if err != nil {
return err
}
case *types.Array:
return typeOKForCgoCall(t.Elem(), m)
case *types.Struct:
- for i := 0; i < t.NumFields(); i++ {
- if !typeOKForCgoCall(t.Field(i).Type(), m) {
+ for field := range t.Fields() {
+ if !typeOKForCgoCall(field.Type(), m) {
return false
}
}
ttyp, ok := typ.Underlying().(*types.Tuple)
if ok {
- for i := 0; i < ttyp.Len(); i++ {
- subpath := lockPath(tpkg, ttyp.At(i).Type(), seen)
+ for v := range ttyp.Variables() {
+ subpath := lockPath(tpkg, v.Type(), seen)
if subpath != nil {
return append(subpath, typ.String())
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/analysis/passes/internal/ctrlflowinternal"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/cfg"
"golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/cfginternal"
+ "golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Doc: "build a control-flow graph",
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ctrlflow",
Run: run,
- ResultType: reflect.TypeOf(new(CFGs)),
+ ResultType: reflect.TypeFor[*CFGs](),
FactTypes: []analysis.Fact{new(noReturn)},
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
defs map[*ast.Ident]types.Object // from Pass.TypesInfo.Defs
funcDecls map[*types.Func]*declInfo
funcLits map[*ast.FuncLit]*litInfo
- pass *analysis.Pass // transient; nil after construction
+ noReturn map[*types.Func]bool // functions lacking a reachable return statement
+ pass *analysis.Pass // transient; nil after construction
+}
+
+// TODO(adonovan): add (*CFGs).NoReturn to public API.
+func (c *CFGs) isNoReturn(fn *types.Func) bool {
+ return c.noReturn[fn]
+}
+
+func init() {
+ // Expose the hidden method to callers in x/tools.
+ ctrlflowinternal.NoReturn = func(c any, fn *types.Func) bool {
+ return c.(*CFGs).isNoReturn(fn)
+ }
}
// CFGs has two maps: funcDecls for named functions and funcLits for
// *types.Func but not the other way.
type declInfo struct {
- decl *ast.FuncDecl
- cfg *cfg.CFG // iff decl.Body != nil
- started bool // to break cycles
- noReturn bool
+ decl *ast.FuncDecl
+ cfg *cfg.CFG // iff decl.Body != nil
+ started bool // to break cycles
}
type litInfo struct {
cfg *cfg.CFG
- noReturn bool
+ noReturn bool // (currently unused)
}
// FuncDecl returns the control-flow graph for a named function.
defs: pass.TypesInfo.Defs,
funcDecls: funcDecls,
funcLits: funcLits,
+ noReturn: make(map[*types.Func]bool),
pass: pass,
}
li := funcLits[lit]
if li.cfg == nil {
li.cfg = cfg.New(lit.Body, c.callMayReturn)
- if !hasReachableReturn(li.cfg) {
+ if cfginternal.IsNoReturn(li.cfg) {
li.noReturn = true
}
}
// The buildDecl call tree thus resembles the static call graph.
// We mark each node when we start working on it to break cycles.
- if !di.started { // break cycle
- di.started = true
+ if di.started {
+ return // break cycle
+ }
+ di.started = true
- if isIntrinsicNoReturn(fn) {
- di.noReturn = true
- }
- if di.decl.Body != nil {
- di.cfg = cfg.New(di.decl.Body, c.callMayReturn)
- if !hasReachableReturn(di.cfg) {
- di.noReturn = true
- }
- }
- if di.noReturn {
- c.pass.ExportObjectFact(fn, new(noReturn))
- }
+ noreturn := isIntrinsicNoReturn(fn)
- // debugging
- if false {
- log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), di.noReturn)
+ if di.decl.Body != nil {
+ di.cfg = cfg.New(di.decl.Body, c.callMayReturn)
+ if cfginternal.IsNoReturn(di.cfg) {
+ noreturn = true
}
}
+ if noreturn {
+ c.pass.ExportObjectFact(fn, new(noReturn))
+ c.noReturn[fn] = true
+ }
+
+ // debugging
+ if false {
+ log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), noreturn)
+ }
}
// callMayReturn reports whether the called function may return.
// Function or method declared in this package?
if di, ok := c.funcDecls[fn]; ok {
c.buildDecl(fn, di)
- return !di.noReturn
+ return !c.noReturn[fn]
}
// Not declared in this package.
// Is there a fact from another package?
- return !c.pass.ImportObjectFact(fn, new(noReturn))
+ if c.pass.ImportObjectFact(fn, new(noReturn)) {
+ c.noReturn[fn] = true
+ return false
+ }
+
+ return true
}
var panicBuiltin = types.Universe.Lookup("panic").(*types.Builtin)
-func hasReachableReturn(g *cfg.CFG) bool {
- for _, b := range g.Blocks {
- if b.Live && b.Return() != nil {
- return true
- }
- }
- return false
-}
-
// isIntrinsicNoReturn reports whether a function intrinsically never
// returns because it stops execution of the calling thread.
// It is the base case in the recursion.
func isIntrinsicNoReturn(fn *types.Func) bool {
// Add functions here as the need arises, but don't allocate memory.
- path, name := fn.Pkg().Path(), fn.Name()
- return path == "syscall" && (name == "Exit" || name == "ExitProcess" || name == "ExitThread") ||
- path == "runtime" && name == "Goexit"
+ return typesinternal.IsFunctionNamed(fn, "syscall", "Exit", "ExitProcess", "ExitThread") ||
+ typesinternal.IsFunctionNamed(fn, "runtime", "Goexit")
}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "defers",
Requires: []*analysis.Analyzer{inspect.Analyzer},
- Doc: analysisinternal.MustExtractDoc(doc, "defers"),
+ Doc: analyzerutil.MustExtractDoc(doc, "defers"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/defers",
Run: run,
}
"unicode/utf8"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
const Doc = `check Go toolchain directives such as //go:debug
func checkOtherFile(pass *analysis.Pass, filename string) error {
// We cannot use the Go parser, since is not a Go source file.
// Read the raw bytes instead.
- content, tf, err := analysisinternal.ReadFile(pass, filename)
+ content, tf, err := analyzerutil.ReadFile(pass, filename)
if err != nil {
return err
}
"go/types"
"golang.org/x/tools/go/analysis"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
"unicode"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
const Doc = "report assembly that clobbers the frame pointer before saving it"
}
for _, fname := range sfiles {
- content, tf, err := analysisinternal.ReadFile(pass, fname)
+ content, tf, err := analyzerutil.ReadFile(pass, fname)
if err != nil {
return nil, err
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/types/typeutil"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typeparams"
)
var Analyzer = &analysis.Analyzer{
Name: "ifaceassert",
- Doc: analysisinternal.MustExtractDoc(doc, "ifaceassert"),
+ Doc: analyzerutil.MustExtractDoc(doc, "ifaceassert"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/diff"
+ "golang.org/x/tools/internal/moreiters"
"golang.org/x/tools/internal/packagepath"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/refactor/inline"
var Analyzer = &analysis.Analyzer{
Name: "inline",
- Doc: analysisinternal.MustExtractDoc(doc, "inline"),
+ Doc: analyzerutil.MustExtractDoc(doc, "inline"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inline",
Run: run,
FactTypes: []analysis.Fact{
// inline inlines each static call to an inlinable function
// and each reference to an inlinable constant or type alias.
-//
-// TODO(adonovan): handle multiple diffs that each add the same import.
func (a *analyzer) inline() {
for cur := range a.root.Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) {
switch n := cur.Node().(type) {
return // nope
}
+ if a.withinTestOf(cur, fn) {
+ return // don't inline a function from within its own test
+ }
+
// Inline the call.
content, err := a.readFile(call)
if err != nil {
}
}
+// withinTestOf reports whether cur is within a dedicated test
+// function for the inlinable target function.
+// A call within its dedicated test should not be inlined.
+func (a *analyzer) withinTestOf(cur inspector.Cursor, target *types.Func) bool {
+ curFuncDecl, ok := moreiters.First(cur.Enclosing((*ast.FuncDecl)(nil)))
+ if !ok {
+ return false // not in a function
+ }
+ funcDecl := curFuncDecl.Node().(*ast.FuncDecl)
+ if funcDecl.Recv != nil {
+ return false // not a test func
+ }
+ if strings.TrimSuffix(a.pass.Pkg.Path(), "_test") != target.Pkg().Path() {
+ return false // different package
+ }
+ if !strings.HasSuffix(a.pass.Fset.File(funcDecl.Pos()).Name(), "_test.go") {
+ return false // not a test file
+ }
+
+ // Computed expected SYMBOL portion of "TestSYMBOL_comment"
+ // for the target symbol.
+ symbol := target.Name()
+ if recv := target.Signature().Recv(); recv != nil {
+ _, named := typesinternal.ReceiverNamed(recv)
+ symbol = named.Obj().Name() + "_" + symbol
+ }
+
+ // TODO(adonovan): use a proper Test function parser.
+ fname := funcDecl.Name.Name
+ for _, pre := range []string{"Test", "Example", "Bench"} {
+ if fname == pre+symbol || strings.HasPrefix(fname, pre+symbol+"_") {
+ return true
+ }
+ }
+
+ return false
+}
+
// If tn is the TypeName of an inlinable alias, suggest inlining its use at cur.
func (a *analyzer) inlineAlias(tn *types.TypeName, curId inspector.Cursor) {
inalias, ok := a.inlinableAliases[tn]
visit(t.Key())
visit(t.Elem())
case *types.Struct:
- for i := range t.NumFields() {
- visit(t.Field(i).Type())
+ for field := range t.Fields() {
+ visit(field.Type())
}
case *types.Signature:
// Ignore the receiver: although it may be present, it has no meaning
visit(t.Params())
visit(t.Results())
case *types.Interface:
- for i := range t.NumEmbeddeds() {
- visit(t.EmbeddedType(i))
+ for etyp := range t.EmbeddedTypes() {
+ visit(etyp)
}
- for i := range t.NumExplicitMethods() {
- visit(t.ExplicitMethod(i).Type())
+ for method := range t.ExplicitMethods() {
+ visit(method.Type())
}
case *types.Tuple:
for v := range t.Variables() {
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inspect",
Run: run,
RunDespiteErrors: true,
- ResultType: reflect.TypeOf(new(inspector.Inspector)),
+ ResultType: reflect.TypeFor[*inspector.Inspector](),
}
func run(pass *analysis.Pass) (any, error) {
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ctrlflowinternal exposes internals of ctrlflow.
+// It cannot actually depend on symbols from ctrlflow.
+package ctrlflowinternal
+
+import "go/types"
+
+// NoReturn exposes the (*ctrlflow.CFGs).NoReturn method to the buildssa analyzer.
+//
+// You must link [golang.org/x/tools/go/analysis/passes/ctrlflow] into your
+// application for it to be non-nil.
+var NoReturn = func(cfgs any, fn *types.Func) bool {
+ panic("x/tools/go/analysis/passes/ctrlflow is not linked into this application")
+}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/versions"
)
var Analyzer = &analysis.Analyzer{
Name: "loopclosure",
- Doc: analysisinternal.MustExtractDoc(doc, "loopclosure"),
+ Doc: analyzerutil.MustExtractDoc(doc, "loopclosure"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/loopclosure",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
switch n := n.(type) {
case *ast.File:
// Only traverse the file if its goversion is strictly before go1.22.
- goversion := versions.FileVersion(pass.TypesInfo, n)
- return versions.Before(goversion, versions.Go1_22)
+ return !analyzerutil.FileUsesGoVersion(pass, n, versions.Go1_22)
+
case *ast.RangeStmt:
body = n.Body
addVar(n.Key)
if !ok {
continue
}
- expr := exprStmt.X
- if isMethodCall(info, expr, "testing", "T", "Parallel") {
- call, _ := expr.(*ast.CallExpr)
- if call == nil {
- continue
- }
+ call, ok := exprStmt.X.(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ if isMethodCall(info, call, "testing", "T", "Parallel") {
x, _ := call.Fun.(*ast.SelectorExpr)
if x == nil {
continue
}
}
-// isMethodCall reports whether expr is a method call of
-// <pkgPath>.<typeName>.<method>.
-func isMethodCall(info *types.Info, expr ast.Expr, pkgPath, typeName, method string) bool {
- call, ok := expr.(*ast.CallExpr)
- if !ok {
- return false
- }
-
- // Check that we are calling a method <method>
- f := typeutil.StaticCallee(info, call)
- if f == nil || f.Name() != method {
- return false
- }
- recv := f.Type().(*types.Signature).Recv()
- if recv == nil {
- return false
- }
-
- // Check that the receiver is a <pkgPath>.<typeName> or
- // *<pkgPath>.<typeName>.
- _, named := typesinternal.ReceiverNamed(recv)
- return typesinternal.IsTypeNamed(named, pkgPath, typeName)
+func isMethodCall(info *types.Info, call *ast.CallExpr, pkgPath, typeName, method string) bool {
+ return typesinternal.IsMethodNamed(typeutil.Callee(info, call), pkgPath, typeName, method)
}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/cfg"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "lostcancel",
- Doc: analysisinternal.MustExtractDoc(doc, "lostcancel"),
+ Doc: analyzerutil.MustExtractDoc(doc, "lostcancel"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel",
Run: run,
Requires: []*analysis.Analyzer{
}
func tupleContains(tuple *types.Tuple, v *types.Var) bool {
- for i := 0; i < tuple.Len(); i++ {
- if tuple.At(i) == v {
+ for v0 := range tuple.Variables() {
+ if v0 == v {
return true
}
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ "golang.org/x/tools/internal/versions"
)
var AnyAnalyzer = &analysis.Analyzer{
- Name: "any",
- Doc: analysisinternal.MustExtractDoc(doc, "any"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: runAny,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any",
+ Name: "any",
+ Doc: analyzerutil.MustExtractDoc(doc, "any"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: runAny,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any",
}
// The any pass replaces interface{} with go1.18's 'any'.
func runAny(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-
- for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.18") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_18) {
for curIface := range curFile.Preorder((*ast.InterfaceType)(nil)) {
iface := curIface.Node().(*ast.InterfaceType)
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/moreiters"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var BLoopAnalyzer = &analysis.Analyzer{
Name: "bloop",
- Doc: analysisinternal.MustExtractDoc(doc, "bloop"),
+ Doc: analyzerutil.MustExtractDoc(doc, "bloop"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// for i := 0; i < b.N; i++ {} => for b.Loop() {}
// for range b.N {}
func bloop(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
if !typesinternal.Imports(pass.Pkg, "testing") {
return nil, nil
}
var (
- inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
- info = pass.TypesInfo
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
)
// edits computes the text edits for a matched for/range loop
(*ast.ForStmt)(nil),
(*ast.RangeStmt)(nil),
}
- for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_24) {
for curLoop := range curFile.Preorder(loops...) {
switch n := curLoop.Node().(type) {
case *ast.ForStmt:
// 2. The only b.N loop in that benchmark function
// - b.Loop() can only be called once per benchmark execution
// - Multiple calls result in "B.Loop called with timer stopped" error
+// - Multiple loops may have complex interdependencies that are hard to analyze
func usesBenchmarkNOnce(c inspector.Cursor, info *types.Info) bool {
// Find the enclosing benchmark function
curFunc, ok := enclosingFunc(c)
return false
}
- // Count b.N references in this benchmark function
+ // Count all b.N references in this benchmark function (including nested functions)
bnRefCount := 0
- filter := []ast.Node{(*ast.SelectorExpr)(nil), (*ast.FuncLit)(nil)}
+ filter := []ast.Node{(*ast.SelectorExpr)(nil)}
curFunc.Inspect(filter, func(cur inspector.Cursor) bool {
- switch n := cur.Node().(type) {
- case *ast.FuncLit:
- return false // don't descend into nested function literals
- case *ast.SelectorExpr:
- if n.Sel.Name == "N" && typesinternal.IsPointerToNamed(info.TypeOf(n.X), "testing", "B") {
- bnRefCount++
- }
+ sel := cur.Node().(*ast.SelectorExpr)
+ if sel.Sel.Name == "N" &&
+ typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") {
+ bnRefCount++
}
return true
})
if assign, ok := loop.Init.(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Rhs) == 1 &&
- isZeroIntLiteral(info, assign.Rhs[0]) &&
+ isZeroIntConst(info, assign.Rhs[0]) &&
is[*ast.IncDecStmt](loop.Post) &&
loop.Post.(*ast.IncDecStmt).Tok == token.INC &&
astutil.EqualSyntax(loop.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) {
and suggests a fix to turn them into inlinable wrappers around
go1.26's built-in new(expr) function:
+ //go:fix inline
func varOf(x int) *int { return new(x) }
+(The directive comment causes the 'inline' analyzer to suggest
+that calls to such functions are inlined.)
+
In addition, this analyzer suggests a fix for each call
to one of the functions before it is transformed, so that
use(new(123))
-(Wrapper functions such as varOf are common when working with Go
+Wrapper functions such as varOf are common when working with Go
serialization packages such as for JSON or protobuf, where pointers
-are often used to express optionality.)
+are often used to express optionality.
# Analyzer omitzero
where x is one of various well-known types in the standard library.
+# Analyzer stringscut
+
+stringscut: replace strings.Index etc. with strings.Cut
+
+This analyzer replaces certain patterns of use of [strings.Index] and string slicing by [strings.Cut], added in go1.18.
+
+For example:
+
+ idx := strings.Index(s, substr)
+ if idx >= 0 {
+ return s[:idx]
+ }
+
+is replaced by:
+
+ before, _, ok := strings.Cut(s, substr)
+ if ok {
+ return before
+ }
+
+And:
+
+ idx := strings.Index(s, substr)
+ if idx >= 0 {
+ return
+ }
+
+is replaced by:
+
+ found := strings.Contains(s, substr)
+ if found {
+ return
+ }
+
+It also handles variants using [strings.IndexByte] instead of Index, or the bytes package instead of strings.
+
+Fixes are offered only in cases in which there are no potential modifications of the idx, s, or substr expressions between their definition and use.
+
# Analyzer stringscutprefix
stringscutprefix: replace HasPrefix/TrimPrefix with CutPrefix
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/goplsexport"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var errorsastypeAnalyzer = &analysis.Analyzer{
Name: "errorsastype",
- Doc: analysisinternal.MustExtractDoc(doc, "errorsastype"),
+ Doc: analyzerutil.MustExtractDoc(doc, "errorsastype"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype",
- Requires: []*analysis.Analyzer{generated.Analyzer, typeindexanalyzer.Analyzer},
+ Requires: []*analysis.Analyzer{typeindexanalyzer.Analyzer},
Run: errorsastype,
}
//
// - if errors.As(err, myerr) && othercond { ... }
func errorsastype(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
}
file := astutil.EnclosingFile(curDeclStmt)
- if !fileUses(info, file, "go1.26") {
+ if !analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_26) {
continue // errors.AsType is too new
}
errtype := types.TypeString(v.Type(), qual)
// Choose a name for the "ok" variable.
+ // TODO(adonovan): this pattern also appears in stditerators,
+ // and is wanted elsewhere; factor.
okName := "ok"
if okVar := lookup(info, curCall, "ok"); okVar != nil {
// The name 'ok' is already declared, but
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var FmtAppendfAnalyzer = &analysis.Analyzer{
Name: "fmtappendf",
- Doc: analysisinternal.MustExtractDoc(doc, "fmtappendf"),
+ Doc: analyzerutil.MustExtractDoc(doc, "fmtappendf"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// The fmtappend function replaces []byte(fmt.Sprintf(...)) by
// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln.
func fmtappendf(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
for _, fn := range []types.Object{
index.Object("fmt", "Sprintf"),
conv := curCall.Parent().Node().(*ast.CallExpr)
tv := pass.TypesInfo.Types[conv.Fun]
if tv.IsType() && types.Identical(tv.Type, byteSliceType) &&
- fileUses(pass.TypesInfo, astutil.EnclosingFile(curCall), "go1.19") {
+ analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(curCall), versions.Go1_19) {
// Have: []byte(fmt.SprintX(...))
// Find "Sprint" identifier.
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/versions"
)
var ForVarAnalyzer = &analysis.Analyzer{
- Name: "forvar",
- Doc: analysisinternal.MustExtractDoc(doc, "forvar"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: forvar,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar",
+ Name: "forvar",
+ Doc: analyzerutil.MustExtractDoc(doc, "forvar"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: forvar,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar",
}
// forvar offers to fix unnecessary copying of a for variable
// where the two idents are the same,
// and the ident is defined (:=) as a variable in the for statement.
// (Note that this 'fix' does not work for three clause loops
-// because the Go specification says "The variable used by each subsequent iteration
+// because the Go specfilesUsingGoVersionsays "The variable used by each subsequent iteration
// is declared implicitly before executing the post statement and initialized to the
// value of the previous iteration's variable at that moment.")
+//
+// Variant: same thing in an IfStmt.Init, when the IfStmt is the sole
+// loop body statement:
+//
+// for _, x := range foo {
+// if x := x; cond { ... }
+// }
+//
+// (The restriction is necessary to avoid potential problems arising
+// from merging two distinct variables.)
+//
+// This analyzer is synergistic with stditerators,
+// which may create redundant "x := x" statements.
func forvar(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.22") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_22) {
for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) {
loop := curLoop.Node().(*ast.RangeStmt)
if loop.Tok != token.DEFINE {
continue
}
- isLoopVarRedecl := func(assign *ast.AssignStmt) bool {
- for i, lhs := range assign.Lhs {
- if !(astutil.EqualSyntax(lhs, assign.Rhs[i]) &&
- (astutil.EqualSyntax(lhs, loop.Key) || astutil.EqualSyntax(lhs, loop.Value))) {
- return false
+ isLoopVarRedecl := func(stmt ast.Stmt) bool {
+ if assign, ok := stmt.(*ast.AssignStmt); ok &&
+ assign.Tok == token.DEFINE &&
+ len(assign.Lhs) == len(assign.Rhs) {
+
+ for i, lhs := range assign.Lhs {
+ if !(astutil.EqualSyntax(lhs, assign.Rhs[i]) &&
+ (astutil.EqualSyntax(lhs, loop.Key) ||
+ astutil.EqualSyntax(lhs, loop.Value))) {
+ return false
+ }
}
+ return true
}
- return true
+ return false
}
// Have: for k, v := range x { stmts }
//
// Delete the prefix of stmts that are
// of the form k := k; v := v; k, v := k, v; v, k := v, k.
for _, stmt := range loop.Body.List {
- if assign, ok := stmt.(*ast.AssignStmt); ok &&
- assign.Tok == token.DEFINE &&
- len(assign.Lhs) == len(assign.Rhs) &&
- isLoopVarRedecl(assign) {
-
- curStmt, _ := curLoop.FindNode(stmt)
- edits := refactor.DeleteStmt(pass.Fset.File(stmt.Pos()), curStmt)
- if len(edits) > 0 {
- pass.Report(analysis.Diagnostic{
- Pos: stmt.Pos(),
- End: stmt.End(),
- Message: "copying variable is unneeded",
- SuggestedFixes: []analysis.SuggestedFix{{
- Message: "Remove unneeded redeclaration",
- TextEdits: edits,
- }},
- })
- }
+ if isLoopVarRedecl(stmt) {
+ // { x := x; ... }
+ // ------
+ } else if ifstmt, ok := stmt.(*ast.IfStmt); ok &&
+ ifstmt.Init != nil &&
+ len(loop.Body.List) == 1 && // must be sole statement in loop body
+ isLoopVarRedecl(ifstmt.Init) {
+ // if x := x; cond {
+ // ------
+ stmt = ifstmt.Init
} else {
break // stop at first other statement
}
+
+ curStmt, _ := curLoop.FindNode(stmt)
+ edits := refactor.DeleteStmt(pass.Fset.File(stmt.Pos()), curStmt)
+ if len(edits) > 0 {
+ pass.Report(analysis.Diagnostic{
+ Pos: stmt.Pos(),
+ End: stmt.End(),
+ Message: "copying variable is unneeded",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: "Remove unneeded redeclaration",
+ TextEdits: edits,
+ }},
+ })
+ }
}
}
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
)
var MapsLoopAnalyzer = &analysis.Analyzer{
- Name: "mapsloop",
- Doc: analysisinternal.MustExtractDoc(doc, "mapsloop"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: mapsloop,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop",
+ Name: "mapsloop",
+ Doc: analyzerutil.MustExtractDoc(doc, "mapsloop"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: mapsloop,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop",
}
// The mapsloop pass offers to simplify a loop of map insertions:
// m = make(M)
// m = M{}
func mapsloop(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "maps", "bytes", "runtime") {
}
// Find all range loops around m[k] = v.
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.23") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_23) {
file := curFile.Node().(*ast.File)
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var MinMaxAnalyzer = &analysis.Analyzer{
Name: "minmax",
- Doc: analysisinternal.MustExtractDoc(doc, "minmax"),
+ Doc: analyzerutil.MustExtractDoc(doc, "minmax"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// - "x := a" or "x = a" or "var x = a" in pattern 2
// - "x < b" or "a < b" in pattern 2
func minmax(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Check for user-defined min/max functions that can be removed
checkUserDefinedMinMax(pass)
// Find all "if a < b { lhs = rhs }" statements.
info := pass.TypesInfo
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- for curFile := range filesUsing(inspect, info, "go1.21") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_21) {
astFile := curFile.Node().(*ast.File)
for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
ifStmt := curIfStmt.Node().(*ast.IfStmt)
"strings"
"golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/moreiters"
"golang.org/x/tools/internal/packagepath"
"golang.org/x/tools/internal/stdlib"
"golang.org/x/tools/internal/typesinternal"
- "golang.org/x/tools/internal/versions"
)
//go:embed doc.go
// SlicesDeleteAnalyzer, // not nil-preserving!
SlicesSortAnalyzer,
stditeratorsAnalyzer,
+ stringscutAnalyzer,
StringsCutPrefixAnalyzer,
StringsSeqAnalyzer,
StringsBuilderAnalyzer,
// -- helpers --
-// skipGenerated decorates pass.Report to suppress diagnostics in generated files.
-func skipGenerated(pass *analysis.Pass) {
- report := pass.Report
- pass.Report = func(diag analysis.Diagnostic) {
- generated := pass.ResultOf[generated.Analyzer].(*generated.Result)
- if generated.IsGenerated(diag.Pos) {
- return // skip
- }
- report(diag)
- }
-}
-
// formatExprs formats a comma-separated list of expressions.
func formatExprs(fset *token.FileSet, exprs []ast.Expr) string {
var buf strings.Builder
return buf.String()
}
-// isZeroIntLiteral reports whether e is an integer whose value is 0.
-func isZeroIntLiteral(info *types.Info, e ast.Expr) bool {
+// isZeroIntConst reports whether e is an integer whose value is 0.
+func isZeroIntConst(info *types.Info, e ast.Expr) bool {
return isIntLiteral(info, e, 0)
}
return info.Types[e].Value == constant.MakeInt64(n)
}
-// filesUsing returns a cursor for each *ast.File in the inspector
+// filesUsingGoVersion returns a cursor for each *ast.File in the inspector
// that uses at least the specified version of Go (e.g. "go1.24").
//
+// The pass's analyzer must require [inspect.Analyzer].
+//
// TODO(adonovan): opt: eliminate this function, instead following the
-// approach of [fmtappendf], which uses typeindex and [fileUses].
-// See "Tip" at [fileUses] for motivation.
-func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[inspector.Cursor] {
+// approach of [fmtappendf], which uses typeindex and
+// [analyzerutil.FileUsesGoVersion]; see "Tip" documented at the
+// latter function for motivation.
+func filesUsingGoVersion(pass *analysis.Pass, version string) iter.Seq[inspector.Cursor] {
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
return func(yield func(inspector.Cursor) bool) {
for curFile := range inspect.Root().Children() {
file := curFile.Node().(*ast.File)
- if !versions.Before(info.FileVersions[file], version) && !yield(curFile) {
+ if analyzerutil.FileUsesGoVersion(pass, file, version) && !yield(curFile) {
break
}
}
}
}
-// fileUses reports whether the specified file uses at least the
-// specified version of Go (e.g. "go1.24").
-//
-// Tip: we recommend using this check "late", just before calling
-// pass.Report, rather than "early" (when entering each ast.File, or
-// each candidate node of interest, during the traversal), because the
-// operation is not free, yet is not a highly selective filter: the
-// fraction of files that pass most version checks is high and
-// increases over time.
-func fileUses(info *types.Info, file *ast.File, version string) bool {
- return !versions.Before(info.FileVersions[file], version)
-}
-
// within reports whether the current pass is analyzing one of the
// specified standard packages or their dependencies.
func within(pass *analysis.Pass, pkgs ...string) bool {
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/versions"
)
var NewExprAnalyzer = &analysis.Analyzer{
Name: "newexpr",
- Doc: analysisinternal.MustExtractDoc(doc, "newexpr"),
+ Doc: analyzerutil.MustExtractDoc(doc, "newexpr"),
URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
// Check file version.
file := astutil.EnclosingFile(curFuncDecl)
- if !fileUses(info, file, "go1.26") {
+ if !analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_26) {
continue // new(expr) not available in this file
}
}
}
- // Disabled until we resolve https://go.dev/issue/75726
- // (Go version skew between caller and callee in inliner.)
- // TODO(adonovan): fix and reenable.
+ // Add a //go:fix inline annotation, if not already present.
//
- // Also, restore these lines to our section of doc.go:
- // //go:fix inline
- // ...
- // (The directive comment causes the inline analyzer to suggest
- // that calls to such functions are inlined.)
- if false {
- // Add a //go:fix inline annotation, if not already present.
- // TODO(adonovan): use ast.ParseDirective when go1.26 is assured.
- if !strings.Contains(decl.Doc.Text(), "go:fix inline") {
- edits = append(edits, analysis.TextEdit{
- Pos: decl.Pos(),
- End: decl.Pos(),
- NewText: []byte("//go:fix inline\n"),
- })
- }
+ // The inliner will not inline a newer callee body into an
+ // older Go file; see https://go.dev/issue/75726.
+ //
+ // TODO(adonovan): use ast.ParseDirective when go1.26 is assured.
+ if !strings.Contains(decl.Doc.Text(), "go:fix inline") {
+ edits = append(edits, analysis.TextEdit{
+ Pos: decl.Pos(),
+ End: decl.Pos(),
+ NewText: []byte("//go:fix inline\n"),
+ })
}
if len(edits) > 0 {
// Check file version.
file := astutil.EnclosingFile(curCall)
- if !fileUses(info, file, "go1.26") {
+ if !analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_26) {
continue // new(expr) not available in this file
}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/versions"
)
var OmitZeroAnalyzer = &analysis.Analyzer{
- Name: "omitzero",
- Doc: analysisinternal.MustExtractDoc(doc, "omitzero"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: omitzero,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero",
+ Name: "omitzero",
+ Doc: analyzerutil.MustExtractDoc(doc, "omitzero"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: omitzero,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero",
}
func checkOmitEmptyField(pass *analysis.Pass, info *types.Info, curField *ast.Field) {
// No omitempty in json tag
return
}
- omitEmptyPos, omitEmptyEnd, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3])
+ omitEmpty, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3])
if err != nil {
return
}
- removePos, removeEnd := omitEmptyPos, omitEmptyEnd
+ var remove analysis.Range = omitEmpty
jsonTag := reflect.StructTag(tagconv).Get("json")
if jsonTag == ",omitempty" {
// Remove the entire struct tag if json is the only package used
if match[1]-match[0] == len(tagconv) {
- removePos = curField.Tag.Pos()
- removeEnd = curField.Tag.End()
+ remove = curField.Tag
} else {
// Remove the json tag if omitempty is the only field
- removePos, err = astutil.PosInStringLiteral(curField.Tag, match[0])
- if err != nil {
- return
- }
- removeEnd, err = astutil.PosInStringLiteral(curField.Tag, match[1])
+ remove, err = astutil.RangeInStringLiteral(curField.Tag, match[0], match[1])
if err != nil {
return
}
Message: "Remove redundant omitempty tag",
TextEdits: []analysis.TextEdit{
{
- Pos: removePos,
- End: removeEnd,
+ Pos: remove.Pos(),
+ End: remove.End(),
},
},
},
Message: "Replace omitempty with omitzero (behavior change)",
TextEdits: []analysis.TextEdit{
{
- Pos: omitEmptyPos,
- End: omitEmptyEnd,
+ Pos: omitEmpty.Pos(),
+ End: omitEmpty.End(),
NewText: []byte(",omitzero"),
},
},
}
// The omitzero pass searches for instances of "omitempty" in a json field tag on a
-// struct. Since "omitempty" does not have any effect when applied to a struct field,
+// struct. Since "omitfilesUsingGoVersions not have any effect when applied to a struct field,
// it suggests either deleting "omitempty" or replacing it with "omitzero", which
// correctly excludes structs from a json encoding.
func omitzero(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- info := pass.TypesInfo
- for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_24) {
for curStruct := range curFile.Preorder((*ast.StructType)(nil)) {
for _, curField := range curStruct.Node().(*ast.StructType).Fields.List {
- checkOmitEmptyField(pass, info, curField)
+ checkOmitEmptyField(pass, pass.TypesInfo, curField)
}
}
}
"strings"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/goplsexport"
+ "golang.org/x/tools/internal/versions"
)
var plusBuildAnalyzer = &analysis.Analyzer{
Name: "plusbuild",
- Doc: analysisinternal.MustExtractDoc(doc, "plusbuild"),
+ Doc: analyzerutil.MustExtractDoc(doc, "plusbuild"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#plusbuild",
Run: plusbuild,
}
func plusbuild(pass *analysis.Pass) (any, error) {
check := func(f *ast.File) {
- if !fileUses(pass.TypesInfo, f, "go1.18") {
+ if !analyzerutil.FileUsesGoVersion(pass, f, versions.Go1_18) {
return
}
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var RangeIntAnalyzer = &analysis.Analyzer{
Name: "rangeint",
- Doc: analysisinternal.MustExtractDoc(doc, "rangeint"),
+ Doc: analyzerutil.MustExtractDoc(doc, "rangeint"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// - a constant; or
// - len(s), where s has the above properties.
func rangeint(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
+ var (
+ info = pass.TypesInfo
+ typeindex = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ )
- info := pass.TypesInfo
-
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
-
- for curFile := range filesUsing(inspect, info, "go1.22") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_22) {
nextLoop:
for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) {
loop := curLoop.Node().(*ast.ForStmt)
if init, ok := loop.Init.(*ast.AssignStmt); ok &&
isSimpleAssign(init) &&
is[*ast.Ident](init.Lhs[0]) &&
- isZeroIntLiteral(info, init.Rhs[0]) {
+ isZeroIntConst(info, init.Rhs[0]) {
// Have: for i = 0; ... (or i := 0)
index := init.Lhs[0].(*ast.Ident)
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
var ReflectTypeForAnalyzer = &analysis.Analyzer{
Name: "reflecttypefor",
- Doc: analysisinternal.MustExtractDoc(doc, "reflecttypefor"),
+ Doc: analyzerutil.MustExtractDoc(doc, "reflecttypefor"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
}
func reflecttypefor(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
}
file := astutil.EnclosingFile(curCall)
- if versions.Before(info.FileVersions[file], "go1.22") {
+ if !analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_22) {
continue // TypeFor requires go1.22
}
tokFile := pass.Fset.File(file.Pos())
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
)
// Warning: this analyzer is not safe to enable by default.
var AppendClippedAnalyzer = &analysis.Analyzer{
- Name: "appendclipped",
- Doc: analysisinternal.MustExtractDoc(doc, "appendclipped"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: appendclipped,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped",
+ Name: "appendclipped",
+ Doc: analyzerutil.MustExtractDoc(doc, "appendclipped"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: appendclipped,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped",
}
// The appendclipped pass offers to simplify a tower of append calls:
// The fix does not always preserve nilness the of base slice when the
// addends (a, b, c) are all empty (see #73557).
func appendclipped(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "bytes", "runtime") {
skip := make(map[*ast.CallExpr]bool)
// Visit calls of form append(x, y...).
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- for curFile := range filesUsing(inspect, info, "go1.21") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_21) {
file := curFile.Node().(*ast.File)
for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
// x[:0:0], x[:len(x):len(x)], x[:k:k]
if e.Slice3 && e.High != nil && e.Max != nil && astutil.EqualSyntax(e.High, e.Max) { // x[:k:k]
res = e
- empty = isZeroIntLiteral(info, e.High) // x[:0:0]
+ empty = isZeroIntConst(info, e.High) // x[:0:0]
if call, ok := e.High.(*ast.CallExpr); ok &&
typeutil.Callee(info, call) == builtinLen &&
astutil.EqualSyntax(call.Args[0], e.X) {
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var SlicesContainsAnalyzer = &analysis.Analyzer{
Name: "slicescontains",
- Doc: analysisinternal.MustExtractDoc(doc, "slicescontains"),
+ Doc: analyzerutil.MustExtractDoc(doc, "slicescontains"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// TODO(adonovan): Add a check that needle/predicate expression from
// if-statement has no effects. Now the program behavior may change.
func slicescontains(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "runtime") {
}
var (
- inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
- info = pass.TypesInfo
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
)
// check is called for each RangeStmt of this form:
// Special case:
// prev="lhs = false" body={ lhs = true; break }
- // => lhs = slices.Contains(...) (or negation)
+ // => lhs = slices.Contains(...) (or its negation)
if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
len(body.List) == 2 &&
assign.Tok == token.ASSIGN &&
len(assign.Rhs) == 1 {
// Have: body={ lhs = rhs; break }
-
if prevAssign, ok := prevStmt.(*ast.AssignStmt); ok &&
len(prevAssign.Lhs) == 1 &&
len(prevAssign.Rhs) == 1 &&
astutil.EqualSyntax(prevAssign.Lhs[0], assign.Lhs[0]) &&
- is[*ast.Ident](assign.Rhs[0]) &&
- info.Uses[assign.Rhs[0].(*ast.Ident)] == builtinTrue {
+ isTrueOrFalse(info, assign.Rhs[0]) ==
+ -isTrueOrFalse(info, prevAssign.Rhs[0]) {
// Have:
// lhs = false
//
// TODO(adonovan):
// - support "var lhs bool = false" and variants.
- // - support negation.
- // Both these variants seem quite significant.
// - allow the break to be omitted.
+ neg := cond(isTrueOrFalse(info, assign.Rhs[0]) < 0, "!", "")
report([]analysis.TextEdit{
- // Replace "rhs" of previous assignment by slices.Contains(...)
+ // Replace "rhs" of previous assignment by [!]slices.Contains(...)
{
Pos: prevAssign.Rhs[0].Pos(),
End: prevAssign.Rhs[0].End(),
- NewText: []byte(contains),
+ NewText: []byte(neg + contains),
},
// Delete the loop and preceding space.
{
}
}
- for curFile := range filesUsing(inspect, info, "go1.21") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_21) {
file := curFile.Node().(*ast.File)
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
// isReturnTrueOrFalse returns nonzero if stmt returns true (+1) or false (-1).
func isReturnTrueOrFalse(info *types.Info, stmt ast.Stmt) int {
if ret, ok := stmt.(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
- if id, ok := ret.Results[0].(*ast.Ident); ok {
- switch info.Uses[id] {
- case builtinTrue:
- return +1
- case builtinFalse:
- return -1
- }
+ return isTrueOrFalse(info, ret.Results[0])
+ }
+ return 0
+}
+
+// isTrueOrFalse returns nonzero if expr is literally true (+1) or false (-1).
+func isTrueOrFalse(info *types.Info, expr ast.Expr) int {
+ if id, ok := expr.(*ast.Ident); ok {
+ switch info.Uses[id] {
+ case builtinTrue:
+ return +1
+ case builtinFalse:
+ return -1
}
}
return 0
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
)
// Warning: this analyzer is not safe to enable by default (not nil-preserving).
var SlicesDeleteAnalyzer = &analysis.Analyzer{
- Name: "slicesdelete",
- Doc: analysisinternal.MustExtractDoc(doc, "slicesdelete"),
- Requires: []*analysis.Analyzer{
- generated.Analyzer,
- inspect.Analyzer,
- },
- Run: slicesdelete,
- URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete",
+ Name: "slicesdelete",
+ Doc: analyzerutil.MustExtractDoc(doc, "slicesdelete"),
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: slicesdelete,
+ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete",
}
// The slicesdelete pass attempts to replace instances of append(s[:i], s[i+k:]...)
// Other variations that will also have suggested replacements include:
// append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1.
func slicesdelete(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "runtime") {
return nil, nil
}
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
info := pass.TypesInfo
report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) {
insert := func(pos token.Pos, text string) analysis.TextEdit {
return types.Identical(types.Default(info.TypeOf(e)), builtinInt.Type())
}
isIntShadowed := func() bool {
- scope := pass.TypesInfo.Scopes[file].Innermost(call.Lparen)
+ scope := info.Scopes[file].Innermost(call.Lparen)
if _, obj := scope.LookupParent("int", call.Lparen); obj != builtinInt {
return true // int type is shadowed
}
}},
})
}
- for curFile := range filesUsing(inspect, info, "go1.21") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_21) {
file := curFile.Node().(*ast.File)
for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
call := curCall.Node().(*ast.CallExpr)
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
// (Not to be confused with go/analysis/passes/sortslice.)
var SlicesSortAnalyzer = &analysis.Analyzer{
Name: "slicessort",
- Doc: analysisinternal.MustExtractDoc(doc, "slicessort"),
+ Doc: analyzerutil.MustExtractDoc(doc, "slicessort"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// - sort.Sort(x) where x has a named slice type whose Less method is the natural order.
// -> sort.Slice(x)
func slicessort(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "sort", "runtime") {
}
file := astutil.EnclosingFile(curCall)
if isIndex(compare.X, i) && isIndex(compare.Y, j) &&
- fileUses(info, file, "go1.21") {
+ analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_21) {
// Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
prefix, importEdits := refactor.AddImport(
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/goplsexport"
"golang.org/x/tools/internal/refactor"
var stditeratorsAnalyzer = &analysis.Analyzer{
Name: "stditerators",
- Doc: analysisinternal.MustExtractDoc(doc, "stditerators"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stditerators"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: stditerators,
// iterator for that reason? We don't want to go fix to
// undo optimizations. Do we need a suppression mechanism?
func stditerators(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
//
// for ... { e := x.At(i); use(e) }
//
+ // or
+ //
+ // for ... { if e := x.At(i); cond { use(e) } }
+ //
// then chooseName prefers the name e and additionally
// returns the var's symbol. We'll transform this to:
//
// which leaves a redundant assignment that a
// subsequent 'forvar' pass will eliminate.
chooseName := func(curBody inspector.Cursor, x ast.Expr, i *types.Var) (string, *types.Var) {
- // Is body { elem := x.At(i); ... } ?
- body := curBody.Node().(*ast.BlockStmt)
- if len(body.List) > 0 {
- if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
+
+ // isVarAssign reports whether stmt has the form v := x.At(i)
+ // and returns the variable if so.
+ isVarAssign := func(stmt ast.Stmt) *types.Var {
+ if assign, ok := stmt.(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1 &&
astutil.EqualSyntax(ast.Unparen(call.Fun).(*ast.SelectorExpr).X, x) &&
is[*ast.Ident](call.Args[0]) &&
info.Uses[call.Args[0].(*ast.Ident)] == i {
- // Have: { elem := x.At(i); ... }
+ // Have: elem := x.At(i)
id := assign.Lhs[0].(*ast.Ident)
- return id.Name, info.Defs[id].(*types.Var)
+ return info.Defs[id].(*types.Var)
+ }
+ }
+ return nil
+ }
+
+ body := curBody.Node().(*ast.BlockStmt)
+ if len(body.List) > 0 {
+ // Is body { elem := x.At(i); ... } ?
+ if v := isVarAssign(body.List[0]); v != nil {
+ return v.Name(), v
+ }
+
+ // Or { if elem := x.At(i); cond { ... } } ?
+ if ifstmt, ok := body.List[0].(*ast.IfStmt); ok && ifstmt.Init != nil {
+ if v := isVarAssign(ifstmt.Init); v != nil {
+ return v.Name(), v
}
}
}
loop := curBody.Parent().Node()
- return refactor.FreshName(info.Scopes[loop], loop.Pos(), row.elemname), nil
+
+ // Choose a fresh name only if
+ // (a) the preferred name is already declared here, and
+ // (b) there are references to it from the loop body.
+ // TODO(adonovan): this pattern also appears in errorsastype,
+ // and is wanted elsewhere; factor.
+ name := row.elemname
+ if v := lookup(info, curBody, name); v != nil {
+ // is it free in body?
+ for curUse := range index.Uses(v) {
+ if curBody.Contains(curUse) {
+ name = refactor.FreshName(info.Scopes[loop], loop.Pos(), name)
+ break
+ }
+ }
+ }
+ return name, nil
}
// Process each call of x.Len().
}
// Have: for i := 0; i < x.Len(); i++ { ... }.
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- rng = analysisinternal.Range(loop.For, loop.Post.End())
+ rng = astutil.RangeOf(loop.For, loop.Post.End())
indexVar = v
curBody = curFor.ChildAt(edge.ForStmt_Body, -1)
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
// Have: for i := range x.Len() { ... }
// ~~~~~~~~~~~~~
- rng = analysisinternal.Range(loop.Range, loop.X.End())
+ rng = astutil.RangeOf(loop.Range, loop.X.End())
indexVar = info.Defs[id].(*types.Var)
curBody = curRange.ChildAt(edge.RangeStmt_Body, -1)
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
// may be somewhat expensive.)
if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok {
panic("no version found")
- } else if file := astutil.EnclosingFile(curLenCall); !fileUses(info, file, v.String()) {
+ } else if !analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(curLenCall), v.String()) {
continue nextCall
}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
var StringsBuilderAnalyzer = &analysis.Analyzer{
Name: "stringsbuilder",
- Doc: analysisinternal.MustExtractDoc(doc, "stringsbuilder"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stringsbuilder"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// stringsbuilder replaces string += string in a loop by strings.Builder.
func stringsbuilder(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "strings", "runtime") {
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package modernize
+
+import (
+ "fmt"
+ "go/ast"
+ "go/constant"
+ "go/token"
+ "go/types"
+ "iter"
+ "strconv"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/edge"
+ "golang.org/x/tools/go/ast/inspector"
+ "golang.org/x/tools/go/types/typeutil"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
+ "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/goplsexport"
+ "golang.org/x/tools/internal/refactor"
+ "golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
+)
+
+var stringscutAnalyzer = &analysis.Analyzer{
+ Name: "stringscut",
+ Doc: analyzerutil.MustExtractDoc(doc, "stringscut"),
+ Requires: []*analysis.Analyzer{
+ inspect.Analyzer,
+ typeindexanalyzer.Analyzer,
+ },
+ Run: stringscut,
+ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#stringscut",
+}
+
+func init() {
+ // Export to gopls until this is a published modernizer.
+ goplsexport.StringsCutModernizer = stringscutAnalyzer
+}
+
+// stringscut offers a fix to replace an occurrence of strings.Index{,Byte} with
+// strings.{Cut,Contains}, and similar fixes for functions in the bytes package.
+// Consider some candidate for replacement i := strings.Index(s, substr).
+// The following must hold for a replacement to occur:
+//
+// 1. All instances of i and s must be in one of these forms.
+// Binary expressions:
+// (a): establishing that i < 0: e.g.: i < 0, 0 > i, i == -1, -1 == i
+// (b): establishing that i > -1: e.g.: i >= 0, 0 <= i, i == 0, 0 == i
+//
+// Slice expressions:
+// a: s[:i], s[0:i]
+// b: s[i+len(substr):], s[len(substr) + i:], s[i + const], s[k + i] (where k = len(substr))
+//
+// 2. There can be no uses of s, substr, or i where they are
+// potentially modified (i.e. in assignments, or function calls with unknown side
+// effects).
+//
+// Then, the replacement involves the following substitutions:
+//
+// 1. Replace "i := strings.Index(s, substr)" with "before, after, ok := strings.Cut(s, substr)"
+//
+// 2. Replace instances of binary expressions (a) with !ok and binary expressions (b) with ok.
+//
+// 3. Replace slice expressions (a) with "before" and slice expressions (b) with after.
+//
+// 4. The assignments to before, after, and ok may use the blank identifier "_" if they are unused.
+//
+// For example:
+//
+// i := strings.Index(s, substr)
+// if i >= 0 {
+// use(s[:i], s[i+len(substr):])
+// }
+//
+// Would become:
+//
+// before, after, ok := strings.Cut(s, substr)
+// if ok {
+// use(before, after)
+// }
+//
+// If the condition involving `i` establishes that i > -1, then we replace it with
+// `if ok“. Variants listed above include i >= 0, i > 0, and i == 0.
+// If the condition is negated (e.g. establishes `i < 0`), we use `if !ok` instead.
+// If the slices of `s` match `s[:i]` or `s[i+len(substr):]` or their variants listed above,
+// then we replace them with before and after.
+//
+// When the index `i` is used only to check for the presence of the substring or byte slice,
+// the suggested fix uses Contains() instead of Cut.
+//
+// For example:
+//
+// i := strings.Index(s, substr)
+// if i >= 0 {
+// return
+// }
+//
+// Would become:
+//
+// found := strings.Contains(s, substr)
+// if found {
+// return
+// }
+func stringscut(pass *analysis.Pass) (any, error) {
+ var (
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
+
+ stringsIndex = index.Object("strings", "Index")
+ stringsIndexByte = index.Object("strings", "IndexByte")
+ bytesIndex = index.Object("bytes", "Index")
+ bytesIndexByte = index.Object("bytes", "IndexByte")
+ )
+
+ for _, obj := range []types.Object{
+ stringsIndex,
+ stringsIndexByte,
+ bytesIndex,
+ bytesIndexByte,
+ } {
+ // (obj may be nil)
+ nextcall:
+ for curCall := range index.Calls(obj) {
+ // Check file version.
+ if !analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(curCall), versions.Go1_18) {
+ continue // strings.Index not available in this file
+ }
+ indexCall := curCall.Node().(*ast.CallExpr) // the call to strings.Index, etc.
+ obj := typeutil.Callee(info, indexCall)
+ if obj == nil {
+ continue
+ }
+
+ var iIdent *ast.Ident // defining identifier of i var
+ switch ek, idx := curCall.ParentEdge(); ek {
+ case edge.ValueSpec_Values:
+ // Have: var i = strings.Index(...)
+ curName := curCall.Parent().ChildAt(edge.ValueSpec_Names, idx)
+ iIdent = curName.Node().(*ast.Ident)
+ case edge.AssignStmt_Rhs:
+ // Have: i := strings.Index(...)
+ // (Must be i's definition.)
+ curLhs := curCall.Parent().ChildAt(edge.AssignStmt_Lhs, idx)
+ iIdent, _ = curLhs.Node().(*ast.Ident) // may be nil
+ }
+
+ if iIdent == nil {
+ continue
+ }
+ // Inv: iIdent is i's definition. The following would be skipped: 'var i int; i = strings.Index(...)'
+ // Get uses of i.
+ iObj := info.ObjectOf(iIdent)
+ if iObj == nil {
+ continue
+ }
+
+ var (
+ s = indexCall.Args[0]
+ substr = indexCall.Args[1]
+ )
+
+ // Check that there are no statements that alter the value of s
+ // or substr after the call to Index().
+ if !indexArgValid(info, index, s, indexCall.Pos()) ||
+ !indexArgValid(info, index, substr, indexCall.Pos()) {
+ continue nextcall
+ }
+
+ // Next, examine all uses of i. If the only uses are of the
+ // forms mentioned above (e.g. i < 0, i >= 0, s[:i] and s[i +
+ // len(substr)]), then we can replace the call to Index()
+ // with a call to Cut() and use the returned ok, before,
+ // and after variables accordingly.
+ lessZero, greaterNegOne, beforeSlice, afterSlice := checkIdxUses(pass.TypesInfo, index.Uses(iObj), s, substr)
+
+ // Either there are no uses of before, after, or ok, or some use
+ // of i does not match our criteria - don't suggest a fix.
+ if lessZero == nil && greaterNegOne == nil && beforeSlice == nil && afterSlice == nil {
+ continue
+ }
+
+ // If the only uses are ok and !ok, don't suggest a Cut() fix - these should be using Contains()
+ isContains := (len(lessZero) > 0 || len(greaterNegOne) > 0) && len(beforeSlice) == 0 && len(afterSlice) == 0
+
+ scope := iObj.Parent()
+ var (
+ // TODO(adonovan): avoid FreshName when not needed; see errorsastype.
+ okVarName = refactor.FreshName(scope, iIdent.Pos(), "ok")
+ beforeVarName = refactor.FreshName(scope, iIdent.Pos(), "before")
+ afterVarName = refactor.FreshName(scope, iIdent.Pos(), "after")
+ foundVarName = refactor.FreshName(scope, iIdent.Pos(), "found") // for Contains()
+ )
+
+ // If there will be no uses of ok, before, or after, use the
+ // blank identifier instead.
+ if len(lessZero) == 0 && len(greaterNegOne) == 0 {
+ okVarName = "_"
+ }
+ if len(beforeSlice) == 0 {
+ beforeVarName = "_"
+ }
+ if len(afterSlice) == 0 {
+ afterVarName = "_"
+ }
+
+ var edits []analysis.TextEdit
+ replace := func(exprs []ast.Expr, new string) {
+ for _, expr := range exprs {
+ edits = append(edits, analysis.TextEdit{
+ Pos: expr.Pos(),
+ End: expr.End(),
+ NewText: []byte(new),
+ })
+ }
+ }
+ // Get the ident for the call to strings.Index, which could just be
+ // "Index" if the strings package is dot imported.
+ indexCallId := typesinternal.UsedIdent(info, indexCall.Fun)
+ replacedFunc := "Cut"
+ if isContains {
+ replacedFunc = "Contains"
+ replace(lessZero, "!"+foundVarName) // idx < 0 -> !found
+ replace(greaterNegOne, foundVarName) // idx > -1 -> found
+
+ // Replace the assignment with found, and replace the call to
+ // Index or IndexByte with a call to Contains.
+ // i := strings.Index (...)
+ // ----- --------
+ // found := strings.Contains(...)
+ edits = append(edits, analysis.TextEdit{
+ Pos: iIdent.Pos(),
+ End: iIdent.End(),
+ NewText: []byte(foundVarName),
+ }, analysis.TextEdit{
+ Pos: indexCallId.Pos(),
+ End: indexCallId.End(),
+ NewText: []byte("Contains"),
+ })
+ } else {
+ replace(lessZero, "!"+okVarName) // idx < 0 -> !ok
+ replace(greaterNegOne, okVarName) // idx > -1 -> ok
+ replace(beforeSlice, beforeVarName) // s[:idx] -> before
+ replace(afterSlice, afterVarName) // s[idx+k:] -> after
+
+ // Replace the assignment with before, after, ok, and replace
+ // the call to Index or IndexByte with a call to Cut.
+ // i := strings.Index(...)
+ // ----------------- -----
+ // before, after, ok := strings.Cut (...)
+ edits = append(edits, analysis.TextEdit{
+ Pos: iIdent.Pos(),
+ End: iIdent.End(),
+ NewText: fmt.Appendf(nil, "%s, %s, %s", beforeVarName, afterVarName, okVarName),
+ }, analysis.TextEdit{
+ Pos: indexCallId.Pos(),
+ End: indexCallId.End(),
+ NewText: []byte("Cut"),
+ })
+ }
+
+ // Calls to IndexByte have a byte as their second arg, which
+ // must be converted to a string or []byte to be a valid arg for Cut/Contains.
+ if obj.Name() == "IndexByte" {
+ switch obj.Pkg().Name() {
+ case "strings":
+ searchByteVal := info.Types[substr].Value
+ if searchByteVal == nil {
+ // substr is a variable, e.g. substr := byte('b')
+ // use string(substr)
+ edits = append(edits, []analysis.TextEdit{
+ {
+ Pos: substr.Pos(),
+ NewText: []byte("string("),
+ },
+ {
+ Pos: substr.End(),
+ NewText: []byte(")"),
+ },
+ }...)
+ } else {
+ // substr is a byte constant
+ val, _ := constant.Int64Val(searchByteVal) // inv: must be a valid byte
+ // strings.Cut/Contains requires a string, so convert byte literal to string literal; e.g. 'a' -> "a", 55 -> "7"
+ edits = append(edits, analysis.TextEdit{
+ Pos: substr.Pos(),
+ End: substr.End(),
+ NewText: strconv.AppendQuote(nil, string(byte(val))),
+ })
+ }
+ case "bytes":
+ // bytes.Cut/Contains requires a []byte, so wrap substr in a []byte{}
+ edits = append(edits, []analysis.TextEdit{
+ {
+ Pos: substr.Pos(),
+ NewText: []byte("[]byte{"),
+ },
+ {
+ Pos: substr.End(),
+ NewText: []byte("}"),
+ },
+ }...)
+ }
+ }
+ pass.Report(analysis.Diagnostic{
+ Pos: indexCall.Fun.Pos(),
+ End: indexCall.Fun.End(),
+ Message: fmt.Sprintf("%s.%s can be simplified using %s.%s",
+ obj.Pkg().Name(), obj.Name(), obj.Pkg().Name(), replacedFunc),
+ Category: "stringscut",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ Message: fmt.Sprintf("Simplify %s.%s call using %s.%s", obj.Pkg().Name(), obj.Name(), obj.Pkg().Name(), replacedFunc),
+ TextEdits: edits,
+ }},
+ })
+ }
+ }
+
+ return nil, nil
+}
+
+// indexArgValid reports whether expr is a valid strings.Index(_, _) arg
+// for the transformation. An arg is valid iff it is:
+// - constant;
+// - a local variable with no modifying uses after the Index() call; or
+// - []byte(x) where x is also valid by this definition.
+// All other expressions are assumed not referentially transparent,
+// so we cannot be sure that all uses are safe to replace.
+func indexArgValid(info *types.Info, index *typeindex.Index, expr ast.Expr, afterPos token.Pos) bool {
+ tv := info.Types[expr]
+ if tv.Value != nil {
+ return true // constant
+ }
+ switch expr := expr.(type) {
+ case *ast.CallExpr:
+ return types.Identical(tv.Type, byteSliceType) &&
+ indexArgValid(info, index, expr.Args[0], afterPos) // check s in []byte(s)
+ case *ast.Ident:
+ sObj := info.Uses[expr]
+ sUses := index.Uses(sObj)
+ return !hasModifyingUses(info, sUses, afterPos)
+ default:
+ // For now, skip instances where s or substr are not
+ // identifers, basic lits, or call expressions of the form
+ // []byte(s).
+ // TODO(mkalil): Handle s and substr being expressions like ptr.field[i].
+ // From adonovan: We'd need to analyze s and substr to see
+ // whether they are referentially transparent, and if not,
+ // analyze all code between declaration and use and see if
+ // there are statements or expressions with potential side
+ // effects.
+ return false
+ }
+}
+
+// checkIdxUses inspects the uses of i to make sure they match certain criteria that
+// allows us to suggest a modernization. If all uses of i, s and substr match
+// one of the following four valid formats, it returns a list of occurrences for
+// each format. If any of the uses do not match one of the formats, return nil
+// for all values, since we should not offer a replacement.
+// 1. lessZero - a condition involving i establishing that i is negative (e.g. i < 0, 0 > i, i == -1, -1 == i)
+// 2. greaterNegOne - a condition involving i establishing that i is non-negative (e.g. i >= 0, 0 <= i, i == 0, 0 == i)
+// 3. beforeSlice - a slice of `s` that matches either s[:i], s[0:i]
+// 4. afterSlice - a slice of `s` that matches one of: s[i+len(substr):], s[len(substr) + i:], s[i + const], s[k + i] (where k = len(substr))
+func checkIdxUses(info *types.Info, uses iter.Seq[inspector.Cursor], s, substr ast.Expr) (lessZero, greaterNegOne, beforeSlice, afterSlice []ast.Expr) {
+ use := func(cur inspector.Cursor) bool {
+ ek, _ := cur.ParentEdge()
+ n := cur.Parent().Node()
+ switch ek {
+ case edge.BinaryExpr_X, edge.BinaryExpr_Y:
+ check := n.(*ast.BinaryExpr)
+ switch checkIdxComparison(info, check) {
+ case -1:
+ lessZero = append(lessZero, check)
+ return true
+ case 1:
+ greaterNegOne = append(greaterNegOne, check)
+ return true
+ }
+ // Check does not establish that i < 0 or i > -1.
+ // Might be part of an outer slice expression like s[i + k]
+ // which requires a different check.
+ // Check that the thing being sliced is s and that the slice
+ // doesn't have a max index.
+ if slice, ok := cur.Parent().Parent().Node().(*ast.SliceExpr); ok &&
+ sameObject(info, s, slice.X) &&
+ slice.Max == nil {
+ if isBeforeSlice(info, ek, slice) {
+ beforeSlice = append(beforeSlice, slice)
+ return true
+ } else if isAfterSlice(info, ek, slice, substr) {
+ afterSlice = append(afterSlice, slice)
+ return true
+ }
+ }
+ case edge.SliceExpr_Low, edge.SliceExpr_High:
+ slice := n.(*ast.SliceExpr)
+ // Check that the thing being sliced is s and that the slice doesn't
+ // have a max index.
+ if sameObject(info, s, slice.X) && slice.Max == nil {
+ if isBeforeSlice(info, ek, slice) {
+ beforeSlice = append(beforeSlice, slice)
+ return true
+ } else if isAfterSlice(info, ek, slice, substr) {
+ afterSlice = append(afterSlice, slice)
+ return true
+ }
+ }
+ }
+ return false
+ }
+
+ for curIdent := range uses {
+ if !use(curIdent) {
+ return nil, nil, nil, nil
+ }
+ }
+ return lessZero, greaterNegOne, beforeSlice, afterSlice
+}
+
+// hasModifyingUses reports whether any of the uses involve potential
+// modifications. Uses involving assignments before the "afterPos" won't be
+// considered.
+func hasModifyingUses(info *types.Info, uses iter.Seq[inspector.Cursor], afterPos token.Pos) bool {
+ for curUse := range uses {
+ ek, _ := curUse.ParentEdge()
+ if ek == edge.AssignStmt_Lhs {
+ if curUse.Node().Pos() <= afterPos {
+ continue
+ }
+ assign := curUse.Parent().Node().(*ast.AssignStmt)
+ if sameObject(info, assign.Lhs[0], curUse.Node().(*ast.Ident)) {
+ // Modifying use because we are reassigning the value of the object.
+ return true
+ }
+ } else if ek == edge.UnaryExpr_X &&
+ curUse.Parent().Node().(*ast.UnaryExpr).Op == token.AND {
+ // Modifying use because we might be passing the object by reference (an explicit &).
+ // We can ignore the case where we have a method call on the expression (which
+ // has an implicit &) because we know the type of s and substr are strings
+ // which cannot have methods on them.
+ return true
+ }
+ }
+ return false
+}
+
+// checkIdxComparison reports whether the check establishes that i is negative
+// or non-negative. It returns -1 in the first case, 1 in the second, and 0 if
+// we can confirm neither condition. We assume that a check passed to
+// checkIdxComparison has i as one of its operands.
+func checkIdxComparison(info *types.Info, check *ast.BinaryExpr) int {
+ // Check establishes that i is negative.
+ // e.g.: i < 0, 0 > i, i == -1, -1 == i
+ if check.Op == token.LSS && (isNegativeConst(info, check.Y) || isZeroIntConst(info, check.Y)) || //i < (0 or neg)
+ check.Op == token.GTR && (isNegativeConst(info, check.X) || isZeroIntConst(info, check.X)) || // (0 or neg) > i
+ check.Op == token.LEQ && (isNegativeConst(info, check.Y)) || //i <= (neg)
+ check.Op == token.GEQ && (isNegativeConst(info, check.X)) || // (neg) >= i
+ check.Op == token.EQL &&
+ (isNegativeConst(info, check.X) || isNegativeConst(info, check.Y)) { // i == neg; neg == i
+ return -1
+ }
+ // Check establishes that i is non-negative.
+ // e.g.: i >= 0, 0 <= i, i == 0, 0 == i
+ if check.Op == token.GTR && (isNonNegativeConst(info, check.Y) || isIntLiteral(info, check.Y, -1)) || // i > (non-neg or -1)
+ check.Op == token.LSS && (isNonNegativeConst(info, check.X) || isIntLiteral(info, check.X, -1)) || // (non-neg or -1) < i
+ check.Op == token.GEQ && isNonNegativeConst(info, check.Y) || // i >= (non-neg)
+ check.Op == token.LEQ && isNonNegativeConst(info, check.X) || // (non-neg) <= i
+ check.Op == token.EQL &&
+ (isNonNegativeConst(info, check.X) || isNonNegativeConst(info, check.Y)) { // i == non-neg; non-neg == i
+ return 1
+ }
+ return 0
+}
+
+// isNegativeConst returns true if the expr is a const int with value < zero.
+func isNegativeConst(info *types.Info, expr ast.Expr) bool {
+ if tv, ok := info.Types[expr]; ok && tv.Value != nil && tv.Value.Kind() == constant.Int {
+ if v, ok := constant.Int64Val(tv.Value); ok {
+ return v < 0
+ }
+ }
+ return false
+}
+
+// isNoneNegativeConst returns true if the expr is a const int with value >= zero.
+func isNonNegativeConst(info *types.Info, expr ast.Expr) bool {
+ if tv, ok := info.Types[expr]; ok && tv.Value != nil && tv.Value.Kind() == constant.Int {
+ if v, ok := constant.Int64Val(tv.Value); ok {
+ return v >= 0
+ }
+ }
+ return false
+}
+
+// isBeforeSlice reports whether the SliceExpr is of the form s[:i] or s[0:i].
+func isBeforeSlice(info *types.Info, ek edge.Kind, slice *ast.SliceExpr) bool {
+ return ek == edge.SliceExpr_High && (slice.Low == nil || isZeroIntConst(info, slice.Low))
+}
+
+// isAfterSlice reports whether the SliceExpr is of the form s[i+len(substr):],
+// or s[i + k:] where k is a const is equal to len(substr).
+func isAfterSlice(info *types.Info, ek edge.Kind, slice *ast.SliceExpr, substr ast.Expr) bool {
+ lowExpr, ok := slice.Low.(*ast.BinaryExpr)
+ if !ok || slice.High != nil {
+ return false
+ }
+ // Returns true if the expression is a call to len(substr).
+ isLenCall := func(expr ast.Expr) bool {
+ call, ok := expr.(*ast.CallExpr)
+ if !ok || len(call.Args) != 1 {
+ return false
+ }
+ return sameObject(info, substr, call.Args[0]) && typeutil.Callee(info, call) == builtinLen
+ }
+
+ // Handle len([]byte(substr))
+ if is[*ast.CallExpr](substr) {
+ call := substr.(*ast.CallExpr)
+ tv := info.Types[call.Fun]
+ if tv.IsType() && types.Identical(tv.Type, byteSliceType) {
+ // Only one arg in []byte conversion.
+ substr = call.Args[0]
+ }
+ }
+ substrLen := -1
+ substrVal := info.Types[substr].Value
+ if substrVal != nil {
+ switch substrVal.Kind() {
+ case constant.String:
+ substrLen = len(constant.StringVal(substrVal))
+ case constant.Int:
+ // constant.Value is a byte literal, e.g. bytes.IndexByte(_, 'a')
+ // or a numeric byte literal, e.g. bytes.IndexByte(_, 65)
+ substrLen = 1
+ }
+ }
+
+ switch ek {
+ case edge.BinaryExpr_X:
+ kVal := info.Types[lowExpr.Y].Value
+ if kVal == nil {
+ // i + len(substr)
+ return lowExpr.Op == token.ADD && isLenCall(lowExpr.Y)
+ } else {
+ // i + k
+ kInt, ok := constant.Int64Val(kVal)
+ return ok && substrLen == int(kInt)
+ }
+ case edge.BinaryExpr_Y:
+ kVal := info.Types[lowExpr.X].Value
+ if kVal == nil {
+ // len(substr) + i
+ return lowExpr.Op == token.ADD && isLenCall(lowExpr.X)
+ } else {
+ // k + i
+ kInt, ok := constant.Int64Val(kVal)
+ return ok && substrLen == int(kInt)
+ }
+ }
+ return false
+}
+
+// sameObject reports whether we know that the expressions resolve to the same object.
+func sameObject(info *types.Info, expr1, expr2 ast.Expr) bool {
+ if ident1, ok := expr1.(*ast.Ident); ok {
+ if ident2, ok := expr2.(*ast.Ident); ok {
+ uses1, ok1 := info.Uses[ident1]
+ uses2, ok2 := info.Uses[ident2]
+ return ok1 && ok2 && uses1 == uses2
+ }
+ }
+ return false
+}
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var StringsCutPrefixAnalyzer = &analysis.Analyzer{
Name: "stringscutprefix",
- Doc: analysisinternal.MustExtractDoc(doc, "stringscutprefix"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stringscutprefix"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// Variants:
// - bytes.HasPrefix/HasSuffix usage as pattern 1.
func stringscutprefix(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
- inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
- info = pass.TypesInfo
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
stringsTrimPrefix = index.Object("strings", "TrimPrefix")
bytesTrimPrefix = index.Object("bytes", "TrimPrefix")
return nil, nil
}
- for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_20) {
for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
ifStmt := curIfStmt.Node().(*ast.IfStmt)
if astutil.EqualSyntax(lhs, bin.X) && astutil.EqualSyntax(call.Args[0], bin.Y) ||
(astutil.EqualSyntax(lhs, bin.Y) && astutil.EqualSyntax(call.Args[0], bin.X)) {
+ // TODO(adonovan): avoid FreshName when not needed; see errorsastype.
okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok")
// Have one of:
// if rest := TrimPrefix(s, prefix); rest != s { (ditto Suffix)
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
- "golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var StringsSeqAnalyzer = &analysis.Analyzer{
Name: "stringsseq",
- Doc: analysisinternal.MustExtractDoc(doc, "stringsseq"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stringsseq"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// - bytes.SplitSeq
// - bytes.FieldsSeq
func stringsseq(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
- inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
- info = pass.TypesInfo
+ index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
+ info = pass.TypesInfo
stringsSplit = index.Object("strings", "Split")
stringsFields = index.Object("strings", "Fields")
return nil, nil
}
- for curFile := range filesUsing(inspect, info, "go1.24") {
+ for curFile := range filesUsingGoVersion(pass, versions.Go1_24) {
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
rng := curRange.Node().(*ast.RangeStmt)
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var TestingContextAnalyzer = &analysis.Analyzer{
Name: "testingcontext",
- Doc: analysisinternal.MustExtractDoc(doc, "testingcontext"),
+ Doc: analyzerutil.MustExtractDoc(doc, "testingcontext"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// - the call is within a test or subtest function
// - the relevant testing.{T,B,F} is named and not shadowed at the call
func testingContext(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
testObj = isTestFn(info, n)
}
}
- if testObj != nil && fileUses(info, astutil.EnclosingFile(cur), "go1.24") {
+ if testObj != nil && analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(cur), versions.Go1_24) {
// Have a test function. Check that we can resolve the relevant
// testing.{T,B,F} at the current position.
if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj {
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
- "golang.org/x/tools/internal/analysisinternal/generated"
- typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal/typeindex"
+ "golang.org/x/tools/internal/versions"
)
var WaitGroupAnalyzer = &analysis.Analyzer{
Name: "waitgroup",
- Doc: analysisinternal.MustExtractDoc(doc, "waitgroup"),
+ Doc: analyzerutil.MustExtractDoc(doc, "waitgroup"),
Requires: []*analysis.Analyzer{
- generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
// other effects, or blocked, or if WaitGroup.Go propagated panics
// from child to parent goroutine, the argument would be different.)
func waitgroup(pass *analysis.Pass) (any, error) {
- skipGenerated(pass)
-
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
}
file := astutil.EnclosingFile(curAddCall)
- if !fileUses(info, file, "go1.25") {
+ if !analyzerutil.FileUsesGoVersion(pass, file, versions.Go1_25) {
continue
}
tokFile := pass.Fset.File(file.Pos())
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "nilfunc",
- Doc: analysisinternal.MustExtractDoc(doc, "nilfunc"),
+ Doc: analyzerutil.MustExtractDoc(doc, "nilfunc"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilfunc",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/fmtstr"
"golang.org/x/tools/internal/typeparams"
var Analyzer = &analysis.Analyzer{
Name: "printf",
- Doc: analysisinternal.MustExtractDoc(doc, "printf"),
+ Doc: analyzerutil.MustExtractDoc(doc, "printf"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
// breaking existing tests and CI scripts.
if idx == len(call.Args)-1 &&
fileVersion != "" && // fail open
- versions.AtLeast(fileVersion, "go1.24") {
+ versions.AtLeast(fileVersion, versions.Go1_24) {
pass.Report(analysis.Diagnostic{
Pos: formatArg.Pos(),
anyIndex = true
}
rng := opRange(formatArg, op)
- if !okPrintfArg(pass, call, rng, &maxArgIndex, firstArg, name, op) {
+ if !okPrintfArg(pass, fileVersion, call, rng, &maxArgIndex, firstArg, name, op) {
// One error per format is enough.
return
}
// such as the position of the %v substring of "...%v...".
func opRange(formatArg ast.Expr, op *fmtstr.Operation) analysis.Range {
if lit, ok := formatArg.(*ast.BasicLit); ok {
- start, end, err := astutil.RangeInStringLiteral(lit, op.Range.Start, op.Range.End)
+ rng, err := astutil.RangeInStringLiteral(lit, op.Range.Start, op.Range.End)
if err == nil {
- return analysisinternal.Range(start, end) // position of "%v"
+ return rng // position of "%v"
}
}
return formatArg // entire format string
const (
argBool printfArgType = 1 << iota
+ argByte
argInt
argRune
argString
{'o', sharpNumFlag, argInt | argPointer},
{'O', sharpNumFlag, argInt | argPointer},
{'p', "-#", argPointer},
- {'q', " -+.0#", argRune | argInt | argString},
+ {'q', " -+.0#", argRune | argInt | argString}, // note: when analyzing go1.26 code, argInt => argByte
{'s', " -+.0", argString},
{'t', "-", argBool},
{'T', "-", anyType},
// okPrintfArg compares the operation to the arguments actually present,
// reporting any discrepancies it can discern, maxArgIndex was the index of the highest used index.
// If the final argument is ellipsissed, there's little it can do for that.
-func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, rng analysis.Range, maxArgIndex *int, firstArg int, name string, operation *fmtstr.Operation) (ok bool) {
+func okPrintfArg(pass *analysis.Pass, fileVersion string, call *ast.CallExpr, rng analysis.Range, maxArgIndex *int, firstArg int, name string, operation *fmtstr.Operation) (ok bool) {
verb := operation.Verb.Verb
var v printVerb
found := false
}
}
+ // When analyzing go1.26 code, rune and byte are the only %q integers (#72850).
+ if verb == 'q' &&
+ fileVersion != "" && // fail open
+ versions.AtLeast(fileVersion, versions.Go1_26) {
+ v.typ = argRune | argByte | argString
+ }
+
// Could verb's arg implement fmt.Formatter?
// Skip check for the %w verb, which requires an error.
formatter := false
case *types.Struct:
// report whether all the elements of the struct match the expected type. For
// instance, with "%d" all the elements must be printable with the "%d" format.
- for i := 0; i < typ.NumFields(); i++ {
- typf := typ.Field(i)
+ for typf := range typ.Fields() {
if !m.match(typf.Type(), false) {
return false
}
types.Bool:
return m.t&argBool != 0
+ case types.Byte:
+ return m.t&(argInt|argByte) != 0
+
+ case types.Rune, types.UntypedRune:
+ return m.t&(argInt|argRune) != 0
+
case types.UntypedInt,
types.Int,
types.Int8,
types.Int16,
- types.Int32,
+ // see case Rune for int32
types.Int64,
types.Uint,
- types.Uint8,
+ // see case Byte for uint8
types.Uint16,
types.Uint32,
types.Uint64,
case types.UnsafePointer:
return m.t&(argPointer|argInt) != 0
- case types.UntypedRune:
- return m.t&(argInt|argRune) != 0
-
case types.UntypedNil:
return false
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
// Analyzer describes sigchanyzer analysis function detector.
var Analyzer = &analysis.Analyzer{
Name: "sigchanyzer",
- Doc: analysisinternal.MustExtractDoc(doc, "sigchanyzer"),
+ Doc: analyzerutil.MustExtractDoc(doc, "sigchanyzer"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "slog",
- Doc: analysisinternal.MustExtractDoc(doc, "slog"),
+ Doc: analyzerutil.MustExtractDoc(doc, "slog"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/slog",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
// "slog.Logger.With" (instead of "(*log/slog.Logger).With")
func shortName(fn *types.Func) string {
var r string
- if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
+ if recv := fn.Signature().Recv(); recv != nil {
if _, named := typesinternal.ReceiverNamed(recv); named != nil {
r = named.Obj().Name()
} else {
return 0, false
}
var recvName string // by default a slog package function
- if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
+ if recv := fn.Signature().Recv(); recv != nil {
_, named := typesinternal.ReceiverNamed(recv)
if named == nil {
return 0, false // anon struct/interface
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
)
//go:embed doc.go
var Analyzer = &analysis.Analyzer{
Name: "stdmethods",
- Doc: analysisinternal.MustExtractDoc(doc, "stdmethods"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stdmethods"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
}
// Do the =s (if any) all match?
- if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") {
+ if !matchParams(expect.args, args, "=") || !matchParams(expect.results, results, "=") {
return
}
// Everything must match.
- if !matchParams(pass, expect.args, args, "") || !matchParams(pass, expect.results, results, "") {
+ if !matchParams(expect.args, args, "") || !matchParams(expect.results, results, "") {
expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
if len(expect.results) == 1 {
expectFmt += " " + argjoin(expect.results)
}
// Does each type in expect with the given prefix match the corresponding type in actual?
-func matchParams(pass *analysis.Pass, expect []string, actual *types.Tuple, prefix string) bool {
+func matchParams(expect []string, actual *types.Tuple, prefix string) bool {
for i, x := range expect {
if !strings.HasPrefix(x, prefix) {
continue
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
var Analyzer = &analysis.Analyzer{
Name: "stringintconv",
- Doc: analysisinternal.MustExtractDoc(doc, "stringintconv"),
+ Doc: analyzerutil.MustExtractDoc(doc, "stringintconv"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stringintconv",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "testinggoroutine",
- Doc: analysisinternal.MustExtractDoc(doc, "testinggoroutine"),
+ Doc: analyzerutil.MustExtractDoc(doc, "testinggoroutine"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
// isMethodNamed returns true if f is a method defined
// in package with the path pkgPath with a name in names.
//
-// (Unlike [analysisinternal.IsMethodNamed], it ignores the receiver type name.)
+// (Unlike [analysis.IsMethodNamed], it ignores the receiver type name.)
func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool {
if f == nil {
return false
if f.Pkg() == nil || f.Pkg().Path() != pkgPath {
return false
}
- if f.Type().(*types.Signature).Recv() == nil {
+ if f.Signature().Recv() == nil {
return false
}
return slices.Contains(names, f.Name())
"unicode/utf8"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ "golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "tests",
- Doc: analysisinternal.MustExtractDoc(doc, "tests"),
+ Doc: analyzerutil.MustExtractDoc(doc, "tests"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/tests",
Run: run,
}
if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 {
// Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters.
// We have currently decided to also warn before compilation/package loading. This can help users in IDEs.
- pass.ReportRangef(analysisinternal.Range(tparams.Opening, tparams.Closing),
+ pass.ReportRangef(astutil.RangeOf(tparams.Opening, tparams.Closing),
"%s has type parameters: it will not be run by go test as a %sXXX function",
fn.Name.Name, prefix)
}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "timeformat",
- Doc: analysisinternal.MustExtractDoc(doc, "timeformat"),
+ Doc: analyzerutil.MustExtractDoc(doc, "timeformat"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
func run(pass *analysis.Pass) (any, error) {
// Note: (time.Time).Format is a method and can be a typeutil.Callee
// without directly importing "time". So we cannot just skip this package
- // when !analysisinternal.Imports(pass.Pkg, "time").
+ // when !analysis.Imports(pass.Pkg, "time").
// TODO(taking): Consider using a prepass to collect typeutil.Callees.
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "unmarshal",
- Doc: analysisinternal.MustExtractDoc(doc, "unmarshal"),
+ Doc: analyzerutil.MustExtractDoc(doc, "unmarshal"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
// Note: (*"encoding/json".Decoder).Decode, (* "encoding/gob".Decoder).Decode
// and (* "encoding/xml".Decoder).Decode are methods and can be a typeutil.Callee
// without directly importing their packages. So we cannot just skip this package
- // when !analysisinternal.Imports(pass.Pkg, "encoding/...").
+ // when !analysis.Imports(pass.Pkg, "encoding/...").
// TODO(taking): Consider using a prepass to collect typeutil.Callees.
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
// Classify the callee (without allocating memory).
argidx := -1
- recv := fn.Type().(*types.Signature).Recv()
+ recv := fn.Signature().Recv()
if fn.Name() == "Unmarshal" && recv == nil {
// "encoding/json".Unmarshal
// "encoding/xml".Unmarshal
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/refactor"
)
var Analyzer = &analysis.Analyzer{
Name: "unreachable",
- Doc: analysisinternal.MustExtractDoc(doc, "unreachable"),
+ Doc: analyzerutil.MustExtractDoc(doc, "unreachable"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable",
Requires: []*analysis.Analyzer{inspect.Analyzer},
RunDespiteErrors: true,
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "unsafeptr",
- Doc: analysisinternal.MustExtractDoc(doc, "unsafeptr"),
+ Doc: analyzerutil.MustExtractDoc(doc, "unsafeptr"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
+ "golang.org/x/tools/internal/astutil"
)
//go:embed doc.go
var Analyzer = &analysis.Analyzer{
Name: "unusedresult",
- Doc: analysisinternal.MustExtractDoc(doc, "unusedresult"),
+ Doc: analyzerutil.MustExtractDoc(doc, "unusedresult"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
if !ok {
return // e.g. var or builtin
}
- if sig := fn.Type().(*types.Signature); sig.Recv() != nil {
+ if sig := fn.Signature(); sig.Recv() != nil {
// method (e.g. foo.String())
if types.Identical(sig, sigNoArgsStringResult) {
if stringMethods[fn.Name()] {
- pass.ReportRangef(analysisinternal.Range(call.Pos(), call.Lparen),
+ pass.ReportRangef(astutil.RangeOf(call.Pos(), call.Lparen),
"result of (%s).%s call not used",
sig.Recv().Type(), fn.Name())
}
} else {
// package-level function (e.g. fmt.Errorf)
if pkgFuncs[[2]string{fn.Pkg().Path(), fn.Name()}] {
- pass.ReportRangef(analysisinternal.Range(call.Pos(), call.Lparen),
+ pass.ReportRangef(astutil.RangeOf(call.Pos(), call.Lparen),
"result of %s.%s call not used",
fn.Pkg().Path(), fn.Name())
}
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/analyzerutil"
"golang.org/x/tools/internal/typesinternal"
)
var Analyzer = &analysis.Analyzer{
Name: "waitgroup",
- Doc: analysisinternal.MustExtractDoc(doc, "waitgroup"),
+ Doc: analyzerutil.MustExtractDoc(doc, "waitgroup"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/waitgroup",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/internal/analysisflags"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/internal/analysis/driverutil"
"golang.org/x/tools/internal/facts"
)
// but apply all fixes from the root actions.
// Convert results to form needed by ApplyFixes.
- fixActions := make([]analysisflags.FixAction, len(results))
+ fixActions := make([]driverutil.FixAction, len(results))
for i, res := range results {
- fixActions[i] = analysisflags.FixAction{
+ fixActions[i] = driverutil.FixAction{
Name: res.a.Name,
+ Pkg: res.pkg,
+ Files: res.files,
FileSet: fset,
- ReadFileFunc: os.ReadFile,
+ ReadFileFunc: os.ReadFile, // TODO(adonovan): respect overlays
Diagnostics: res.diagnostics,
}
}
- if err := analysisflags.ApplyFixes(fixActions, false); err != nil {
+ if err := driverutil.ApplyFixes(fixActions, analysisflags.Diff, false); err != nil {
// Fail when applying fixes failed.
log.Print(err)
exit = 1
if analysisflags.JSON {
// JSON output
- tree := make(analysisflags.JSONTree)
+ tree := make(driverutil.JSONTree)
for _, res := range results {
tree.Add(fset, id, res.a.Name, res.diagnostics, res.err)
}
}
for _, res := range results {
for _, diag := range res.diagnostics {
- analysisflags.PrintPlain(os.Stderr, fset, analysisflags.Context, diag)
+ driverutil.PrintPlain(os.Stderr, fset, analysisflags.Context, diag)
exit = 1
}
}
ResultOf: inputs,
Report: func(d analysis.Diagnostic) {
// Unitchecker doesn't apply fixes, but it does report them in the JSON output.
- if err := analysisinternal.ValidateFixes(fset, a, d.SuggestedFixes); err != nil {
+ if err := driverutil.ValidateFixes(fset, a, d.SuggestedFixes); err != nil {
// Since we have diagnostics, the exit code will be nonzero,
// so logging these errors is sufficient.
log.Println(err)
AllPackageFacts: func() []analysis.PackageFact { return facts.AllPackageFacts(factFilter) },
Module: module,
}
- pass.ReadFile = analysisinternal.CheckedReadFile(pass, os.ReadFile)
+ pass.ReadFile = driverutil.CheckedReadFile(pass, os.ReadFile)
t0 := time.Now()
act.result, act.err = a.Run(pass)
if act.err == nil { // resolve URLs on diagnostics.
for i := range act.diagnostics {
- if url, uerr := analysisflags.ResolveURL(a, act.diagnostics[i]); uerr == nil {
+ if url, uerr := driverutil.ResolveURL(a, act.diagnostics[i]); uerr == nil {
act.diagnostics[i].URL = url
} else {
act.err = uerr // keep the last error
results := make([]result, len(analyzers))
for i, a := range analyzers {
act := actions[a]
- results[i].a = a
- results[i].err = act.err
- results[i].diagnostics = act.diagnostics
+ results[i] = result{pkg, files, a, act.diagnostics, act.err}
}
data := facts.Encode()
}
type result struct {
+ pkg *types.Package
+ files []*ast.File
a *analysis.Analyzer
diagnostics []analysis.Diagnostic
err error
// This algorithm could be implemented using c.Inspect,
// but it is about 2.5x slower.
- best := int32(-1) // push index of latest (=innermost) node containing range
+ // best is the push-index of the latest (=innermost) node containing range.
+ // (Beware: latest is not always innermost because FuncDecl.{Name,Type} overlap.)
+ best := int32(-1)
for i, limit := c.indices(); i < limit; i++ {
ev := events[i]
if ev.index > i { // push?
continue
}
} else {
+ // Edge case: FuncDecl.Name and .Type overlap:
+ // Don't update best from Name to FuncDecl.Type.
+ //
+ // The condition can be read as:
+ // - n is FuncType
+ // - n.parent is FuncDecl
+ // - best is strictly beneath the FuncDecl
+ if ev.typ == 1<<nFuncType &&
+ events[ev.parent].typ == 1<<nFuncDecl &&
+ best > ev.parent {
+ continue
+ }
+
nodeEnd = n.End()
if n.Pos() > start {
break // disjoint, after; stop
)
type builder struct {
- cfg *CFG
+ blocks []*Block
mayReturn func(*ast.CallExpr) bool
current *Block
lblocks map[string]*lblock // labeled blocks
*ast.SendStmt,
*ast.IncDecStmt,
*ast.GoStmt,
- *ast.DeferStmt,
*ast.EmptyStmt,
*ast.AssignStmt:
// No effect on control flow.
b.add(s)
+ case *ast.DeferStmt:
+ b.add(s)
+ // Assume conservatively that this behaves like:
+ // defer func() { recover() }
+ // so any subsequent panic may act like a return.
+ b.current.returns = true
+
case *ast.ExprStmt:
b.add(s)
if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) {
goto start // effectively: tailcall stmt(g, s.Stmt, label)
case *ast.ReturnStmt:
+ b.current.returns = true
b.add(s)
b.current = b.newBlock(KindUnreachable, s)
// It does not automatically become the current block.
// comment is an optional string for more readable debugging output.
func (b *builder) newBlock(kind BlockKind, stmt ast.Stmt) *Block {
- g := b.cfg
block := &Block{
- Index: int32(len(g.Blocks)),
+ Index: int32(len(b.blocks)),
Kind: kind,
Stmt: stmt,
}
block.Succs = block.succs2[:0]
- g.Blocks = append(g.Blocks, block)
+ b.blocks = append(b.blocks, block)
return block
}
"go/ast"
"go/format"
"go/token"
+
+ "golang.org/x/tools/internal/cfginternal"
)
// A CFG represents the control-flow graph of a single function.
//
// The entry point is Blocks[0]; there may be multiple return blocks.
type CFG struct {
- Blocks []*Block // block[0] is entry; order otherwise undefined
+ Blocks []*Block // block[0] is entry; order otherwise undefined
+ noreturn bool // function body lacks a reachable return statement
}
// A Block represents a basic block: a list of statements and
// an [ast.Expr], Succs[0] is the successor if the condition is true, and
// Succs[1] is the successor if the condition is false.
type Block struct {
- Nodes []ast.Node // statements, expressions, and ValueSpecs
- Succs []*Block // successor nodes in the graph
- Index int32 // index within CFG.Blocks
- Live bool // block is reachable from entry
- Kind BlockKind // block kind
- Stmt ast.Stmt // statement that gave rise to this block (see BlockKind for details)
+ Nodes []ast.Node // statements, expressions, and ValueSpecs
+ Succs []*Block // successor nodes in the graph
+ Index int32 // index within CFG.Blocks
+ Live bool // block is reachable from entry
+ returns bool // block contains return or defer (which may recover and return)
+ Kind BlockKind // block kind
+ Stmt ast.Stmt // statement that gave rise to this block (see BlockKind for details)
succs2 [2]*Block // underlying array for Succs
}
func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
b := builder{
mayReturn: mayReturn,
- cfg: new(CFG),
}
b.current = b.newBlock(KindBody, body)
b.stmt(body)
- // Compute liveness (reachability from entry point), breadth-first.
- q := make([]*Block, 0, len(b.cfg.Blocks))
- q = append(q, b.cfg.Blocks[0]) // entry point
+ // Compute liveness (reachability from entry point),
+ // breadth-first, marking Block.Live flags.
+ q := make([]*Block, 0, len(b.blocks))
+ q = append(q, b.blocks[0]) // entry point
for len(q) > 0 {
b := q[len(q)-1]
q = q[:len(q)-1]
// Does control fall off the end of the function's body?
// Make implicit return explicit.
if b.current != nil && b.current.Live {
+ b.current.returns = true
b.add(&ast.ReturnStmt{
Return: body.End() - 1,
})
}
- return b.cfg
+ // Is any return (or defer+recover) block reachable?
+ noreturn := true
+ for _, bl := range b.blocks {
+ if bl.Live && bl.returns {
+ noreturn = false
+ break
+ }
+ }
+
+ return &CFG{Blocks: b.blocks, noreturn: noreturn}
+}
+
+// isNoReturn reports whether the function has no reachable return.
+// TODO(adonovan): add (*CFG).NoReturn to public API.
+func isNoReturn(_cfg any) bool { return _cfg.(*CFG).noreturn }
+
+func init() {
+ cfginternal.IsNoReturn = isNoReturn // expose to ctrlflow analyzer
}
func (b *Block) String() string {
//
// When control falls off the end of the function, the ReturnStmt is synthetic
// and its [ast.Node.End] position may be beyond the end of the file.
+//
+// A function that contains no return statement (explicit or implied)
+// may yet return normally, and may even return a nonzero value. For example:
+//
+// func() (res any) {
+// defer func() { res = recover() }()
+// panic(123)
+// }
func (b *Block) Return() (ret *ast.ReturnStmt) {
if len(b.Nodes) > 0 {
ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt)
case *types.Func:
// A func, if not package-level, must be a method.
- if recv := obj.Type().(*types.Signature).Recv(); recv == nil {
+ if recv := obj.Signature().Recv(); recv == nil {
return "", fmt.Errorf("func is not a method: %v", obj)
}
return "", false
}
- _, named := typesinternal.ReceiverNamed(meth.Type().(*types.Signature).Recv())
+ _, named := typesinternal.ReceiverNamed(meth.Signature().Recv())
if named == nil {
return "", false
}
case *types.Named:
hash := h.hashTypeName(t.Obj())
targs := t.TypeArgs()
- for i := 0; i < targs.Len(); i++ {
- targ := targs.At(i)
+ for targ := range targs.Types() {
hash += 2 * h.hash(targ)
}
return hash
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package analyzerutil provides implementation helpers for analyzers.
+package analyzerutil
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package analysisinternal
+package analyzerutil
import (
"fmt"
//
// var Analyzer = &analysis.Analyzer{
// Name: "halting",
-// Doc: analysisinternal.MustExtractDoc(doc, "halting"),
+// Doc: analyzerutil.MustExtractDoc(doc, "halting"),
// ...
// }
func MustExtractDoc(content, name string) string {
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package analyzerutil
+
+// This file defines helpers for calling [analysis.Pass.ReadFile].
+
+import (
+ "go/token"
+ "os"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// ReadFile reads a file and adds it to the FileSet in pass
+// so that we can report errors against it using lineStart.
+func ReadFile(pass *analysis.Pass, filename string) ([]byte, *token.File, error) {
+ readFile := pass.ReadFile
+ if readFile == nil {
+ readFile = os.ReadFile
+ }
+ content, err := readFile(filename)
+ if err != nil {
+ return nil, nil, err
+ }
+ tf := pass.Fset.AddFile(filename, -1, len(content))
+ tf.SetLinesForContent(content)
+ return content, tf, nil
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package analyzerutil
+
+import (
+ "go/ast"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/internal/packagepath"
+ "golang.org/x/tools/internal/stdlib"
+ "golang.org/x/tools/internal/versions"
+)
+
+// FileUsesGoVersion reports whether the specified file may use features of the
+// specified version of Go (e.g. "go1.24").
+//
+// Tip: we recommend using this check "late", just before calling
+// pass.Report, rather than "early" (when entering each ast.File, or
+// each candidate node of interest, during the traversal), because the
+// operation is not free, yet is not a highly selective filter: the
+// fraction of files that pass most version checks is high and
+// increases over time.
+func FileUsesGoVersion(pass *analysis.Pass, file *ast.File, version string) (_res bool) {
+ fileVersion := pass.TypesInfo.FileVersions[file]
+
+ // Standard packages that are part of toolchain bootstrapping
+ // are not considered to use a version of Go later than the
+ // current bootstrap toolchain version.
+ // The bootstrap rule does not cover tests,
+ // and some tests (e.g. debug/elf/file_test.go) rely on this.
+ pkgpath := pass.Pkg.Path()
+ if packagepath.IsStdPackage(pkgpath) &&
+ stdlib.IsBootstrapPackage(pkgpath) && // (excludes "*_test" external test packages)
+ !strings.HasSuffix(pass.Fset.File(file.Pos()).Name(), "_test.go") { // (excludes all tests)
+ fileVersion = stdlib.BootstrapVersion.String() // package must bootstrap
+ }
+
+ return !versions.Before(fileVersion, version)
+}
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package analysisflags
+// Package driverutil defines implementation helper functions for
+// analysis drivers such as unitchecker, {single,multi}checker, and
+// analysistest.
+package driverutil
// This file defines the -fix logic common to unitchecker and
// {single,multi}checker.
import (
+ "bytes"
"fmt"
- "go/format"
+ "go/ast"
+ "go/parser"
+ "go/printer"
"go/token"
+ "go/types"
"log"
"maps"
"os"
"sort"
+ "strconv"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/internal/analysisinternal"
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/internal/astutil/free"
"golang.org/x/tools/internal/diff"
)
// FixAction abstracts a checker action (running one analyzer on one
// package) for the purposes of applying its diagnostics' fixes.
type FixAction struct {
- Name string // e.g. "analyzer@package"
+ Name string // e.g. "analyzer@package"
+ Pkg *types.Package // (for import removal)
+ Files []*ast.File
FileSet *token.FileSet
- ReadFileFunc analysisinternal.ReadFileFunc
+ ReadFileFunc ReadFileFunc
Diagnostics []analysis.Diagnostic
}
// ApplyFixes attempts to apply the first suggested fix associated
// with each diagnostic reported by the specified actions.
-// All fixes must have been validated by [analysisinternal.ValidateFixes].
+// All fixes must have been validated by [ValidateFixes].
//
// Each fix is treated as an independent change; fixes are merged in
// an arbitrary deterministic order as if by a three-way diff tool
// composition of the two fixes is semantically correct. Coalescing
// identical edits is appropriate for imports, but not for, say,
// increments to a counter variable; the correct resolution in that
-// case might be to increment it twice. Or consider two fixes that
-// each delete the penultimate reference to an import or local
-// variable: each fix is sound individually, and they may be textually
-// distant from each other, but when both are applied, the program is
-// no longer valid because it has an unreferenced import or local
-// variable.
-// TODO(adonovan): investigate replacing the final "gofmt" step with a
-// formatter that applies the unused-import deletion logic of
-// "goimports".
+// case might be to increment it twice.
+//
+// Or consider two fixes that each delete the penultimate reference to
+// a local variable: each fix is sound individually, and they may be
+// textually distant from each other, but when both are applied, the
+// program is no longer valid because it has an unreferenced local
+// variable. (ApplyFixes solves the analogous problem for imports by
+// eliminating imports whose name is unreferenced in the remainder of
+// the fixed file.)
//
// Merging depends on both the order of fixes and they order of edits
// within them. For example, if three fixes add import "a" twice and
// applyFixes returns success if all fixes are valid, could be cleanly
// merged, and the corresponding files were successfully updated.
//
-// If the -diff flag was set, instead of updating the files it display the final
-// patch composed of all the cleanly merged fixes.
+// If printDiff (from the -diff flag) is set, instead of updating the
+// files it display the final patch composed of all the cleanly merged
+// fixes.
//
// TODO(adonovan): handle file-system level aliases such as symbolic
// links using robustio.FileID.
-func ApplyFixes(actions []FixAction, verbose bool) error {
+func ApplyFixes(actions []FixAction, printDiff, verbose bool) error {
+ generated := make(map[*token.File]bool)
+
// Select fixes to apply.
//
// If there are several for a given Diagnostic, choose the first.
}
var fixes []*fixact
for _, act := range actions {
+ for _, file := range act.Files {
+ tokFile := act.FileSet.File(file.FileStart)
+ // Memoize, since there may be many actions
+ // for the same package (list of files).
+ if _, seen := generated[tokFile]; !seen {
+ generated[tokFile] = ast.IsGenerated(file)
+ }
+ }
+
for _, diag := range act.Diagnostics {
for i := range diag.SuggestedFixes {
fix := &diag.SuggestedFixes[i]
// packages are not disjoint, due to test variants, so this
// would not really address the issue.)
baselineContent := make(map[string][]byte)
- getBaseline := func(readFile analysisinternal.ReadFileFunc, filename string) ([]byte, error) {
+ getBaseline := func(readFile ReadFileFunc, filename string) ([]byte, error) {
content, ok := baselineContent[filename]
if !ok {
var err error
// Apply each fix, updating the current state
// only if the entire fix can be cleanly merged.
- accumulatedEdits := make(map[string][]diff.Edit)
- goodFixes := 0
+ var (
+ accumulatedEdits = make(map[string][]diff.Edit)
+ filePkgs = make(map[string]*types.Package) // maps each file to an arbitrary package that includes it
+
+ goodFixes = 0 // number of fixes cleanly applied
+ skippedFixes = 0 // number of fixes skipped (because e.g. edits a generated file)
+ )
fixloop:
for _, fixact := range fixes {
+ // Skip a fix if any of its edits touch a generated file.
+ for _, edit := range fixact.fix.TextEdits {
+ file := fixact.act.FileSet.File(edit.Pos)
+ if generated[file] {
+ skippedFixes++
+ continue fixloop
+ }
+ }
+
// Convert analysis.TextEdits to diff.Edits, grouped by file.
// Precondition: a prior call to validateFix succeeded.
fileEdits := make(map[string][]diff.Edit)
for _, edit := range fixact.fix.TextEdits {
file := fixact.act.FileSet.File(edit.Pos)
+ filePkgs[file.Name()] = fixact.act.Pkg
+
baseline, err := getBaseline(fixact.act.ReadFileFunc, file.Name())
if err != nil {
log.Printf("skipping fix to file %s: %v", file.Name(), err)
log.Printf("%s: fix %s applied", fixact.act.Name, fixact.fix.Message)
}
}
- badFixes := len(fixes) - goodFixes
+ badFixes := len(fixes) - goodFixes - skippedFixes // number of fixes that could not be applied
// Show diff or update files to final state.
var files []string
}
// Attempt to format each file.
- if formatted, err := format.Source(final); err == nil {
+ if formatted, err := FormatSourceRemoveImports(filePkgs[file], final); err == nil {
final = formatted
}
- if diffFlag {
+ if printDiff {
// Since we formatted the file, we need to recompute the diff.
unified := diff.Unified(file+" (old)", file+" (new)", string(baseline), string(final))
// TODO(adonovan): abstract the I/O.
// These numbers are potentially misleading:
// The denominator includes duplicate conflicting fixes due to
// common files in packages "p" and "p [p.test]", which may
- // have been fixed fixed and won't appear in the re-run.
+ // have been fixed and won't appear in the re-run.
// TODO(adonovan): eliminate identical fixes as an initial
// filtering step.
//
// TODO(adonovan): should we log that n files were updated in case of total victory?
if badFixes > 0 || filesUpdated < totalFiles {
- if diffFlag {
- return fmt.Errorf("%d of %d fixes skipped (e.g. due to conflicts)", badFixes, len(fixes))
+ if printDiff {
+ return fmt.Errorf("%d of %s skipped (e.g. due to conflicts)",
+ badFixes,
+ plural(len(fixes), "fix", "fixes"))
} else {
- return fmt.Errorf("applied %d of %d fixes; %d files updated. (Re-run the command to apply more.)",
- goodFixes, len(fixes), filesUpdated)
+ return fmt.Errorf("applied %d of %s; %s updated. (Re-run the command to apply more.)",
+ goodFixes,
+ plural(len(fixes), "fix", "fixes"),
+ plural(filesUpdated, "file", "files"))
}
}
if verbose {
- log.Printf("applied %d fixes, updated %d files", len(fixes), filesUpdated)
+ if skippedFixes > 0 {
+ log.Printf("skipped %s that would edit generated files",
+ plural(skippedFixes, "fix", "fixes"))
+ }
+ log.Printf("applied %s, updated %s",
+ plural(len(fixes), "fix", "fixes"),
+ plural(filesUpdated, "file", "files"))
}
return nil
}
+
+// FormatSourceRemoveImports is a variant of [format.Source] that
+// removes imports that became redundant when fixes were applied.
+//
+// Import removal is necessarily heuristic since we do not have type
+// information for the fixed file and thus cannot accurately tell
+// whether k is among the free names of T{k: 0}, which requires
+// knowledge of whether T is a struct type.
+func FormatSourceRemoveImports(pkg *types.Package, src []byte) ([]byte, error) {
+ // This function was reduced from the "strict entire file"
+ // path through [format.Source].
+
+ fset := token.NewFileSet()
+ file, err := parser.ParseFile(fset, "fixed.go", src, parser.ParseComments|parser.SkipObjectResolution)
+ if err != nil {
+ return nil, err
+ }
+
+ ast.SortImports(fset, file)
+
+ removeUnneededImports(fset, pkg, file)
+
+ // printerNormalizeNumbers means to canonicalize number literal prefixes
+ // and exponents while printing. See https://golang.org/doc/go1.13#gofmt.
+ //
+ // This value is defined in go/printer specifically for go/format and cmd/gofmt.
+ const printerNormalizeNumbers = 1 << 30
+ cfg := &printer.Config{
+ Mode: printer.UseSpaces | printer.TabIndent | printerNormalizeNumbers,
+ Tabwidth: 8,
+ }
+ var buf bytes.Buffer
+ if err := cfg.Fprint(&buf, fset, file); err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// removeUnneededImports removes import specs that are not referenced
+// within the fixed file. It uses [free.Names] to heuristically
+// approximate the set of imported names needed by the body of the
+// file based only on syntax.
+//
+// pkg provides type information about the unmodified package, in
+// particular the name that would implicitly be declared by a
+// non-renaming import of a given existing dependency.
+func removeUnneededImports(fset *token.FileSet, pkg *types.Package, file *ast.File) {
+ // Map each existing dependency to its default import name.
+ // (We'll need this to interpret non-renaming imports.)
+ packageNames := make(map[string]string)
+ for _, imp := range pkg.Imports() {
+ packageNames[imp.Path()] = imp.Name()
+ }
+
+ // Compute the set of free names of the file,
+ // ignoring its import decls.
+ freenames := make(map[string]bool)
+ for _, decl := range file.Decls {
+ if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
+ continue // skip import
+ }
+
+ // TODO(adonovan): we could do better than includeComplitIdents=false
+ // since we have type information about the unmodified package,
+ // which is a good source of heuristics.
+ const includeComplitIdents = false
+ maps.Copy(freenames, free.Names(decl, includeComplitIdents))
+ }
+
+ // Check whether each import's declared name is free (referenced) by the file.
+ var deletions []func()
+ for _, spec := range file.Imports {
+ path, err := strconv.Unquote(spec.Path.Value)
+ if err != nil {
+ continue // malformed import; ignore
+ }
+ explicit := "" // explicit PkgName, if any
+ if spec.Name != nil {
+ explicit = spec.Name.Name
+ }
+ name := explicit // effective PkgName
+ if name == "" {
+ // Non-renaming import: use package's default name.
+ name = packageNames[path]
+ }
+ switch name {
+ case "":
+ continue // assume it's a new import
+ case ".":
+ continue // dot imports are tricky
+ case "_":
+ continue // keep blank imports
+ }
+ if !freenames[name] {
+ // Import's effective name is not free in (not used by) the file.
+ // Enqueue it for deletion after the loop.
+ deletions = append(deletions, func() {
+ astutil.DeleteNamedImport(fset, file, explicit, path)
+ })
+ }
+ }
+
+ // Apply the deletions.
+ for _, del := range deletions {
+ del()
+ }
+}
+
+// plural returns "n nouns", selecting the plural form as approriate.
+func plural(n int, singular, plural string) string {
+ if n == 1 {
+ return "1 " + singular
+ } else {
+ return fmt.Sprintf("%d %s", n, plural)
+ }
+}
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package driverutil
+
+// This file defined output helpers common to all drivers.
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/token"
+ "io"
+ "log"
+ "os"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// TODO(adonovan): don't accept an io.Writer if we don't report errors.
+// Either accept a bytes.Buffer (infallible), or return a []byte.
+
+// PrintPlain prints a diagnostic in plain text form.
+// If contextLines is nonnegative, it also prints the
+// offending line plus this many lines of context.
+func PrintPlain(out io.Writer, fset *token.FileSet, contextLines int, diag analysis.Diagnostic) {
+ print := func(pos, end token.Pos, message string) {
+ posn := fset.Position(pos)
+ fmt.Fprintf(out, "%s: %s\n", posn, message)
+
+ // show offending line plus N lines of context.
+ if contextLines >= 0 {
+ end := fset.Position(end)
+ if !end.IsValid() {
+ end = posn
+ }
+ // TODO(adonovan): highlight the portion of the line indicated
+ // by pos...end using ASCII art, terminal colors, etc?
+ data, _ := os.ReadFile(posn.Filename)
+ lines := strings.Split(string(data), "\n")
+ for i := posn.Line - contextLines; i <= end.Line+contextLines; i++ {
+ if 1 <= i && i <= len(lines) {
+ fmt.Fprintf(out, "%d\t%s\n", i, lines[i-1])
+ }
+ }
+ }
+ }
+
+ print(diag.Pos, diag.End, diag.Message)
+ for _, rel := range diag.Related {
+ print(rel.Pos, rel.End, "\t"+rel.Message)
+ }
+}
+
+// A JSONTree is a mapping from package ID to analysis name to result.
+// Each result is either a jsonError or a list of JSONDiagnostic.
+type JSONTree map[string]map[string]any
+
+// A TextEdit describes the replacement of a portion of a file.
+// Start and End are zero-based half-open indices into the original byte
+// sequence of the file, and New is the new text.
+type JSONTextEdit struct {
+ Filename string `json:"filename"`
+ Start int `json:"start"`
+ End int `json:"end"`
+ New string `json:"new"`
+}
+
+// A JSONSuggestedFix describes an edit that should be applied as a whole or not
+// at all. It might contain multiple TextEdits/text_edits if the SuggestedFix
+// consists of multiple non-contiguous edits.
+type JSONSuggestedFix struct {
+ Message string `json:"message"`
+ Edits []JSONTextEdit `json:"edits"`
+}
+
+// A JSONDiagnostic describes the JSON schema of an analysis.Diagnostic.
+//
+// TODO(matloob): include End position if present.
+type JSONDiagnostic struct {
+ Category string `json:"category,omitempty"`
+ Posn string `json:"posn"` // e.g. "file.go:line:column"
+ Message string `json:"message"`
+ SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"`
+ Related []JSONRelatedInformation `json:"related,omitempty"`
+}
+
+// A JSONRelated describes a secondary position and message related to
+// a primary diagnostic.
+//
+// TODO(adonovan): include End position if present.
+type JSONRelatedInformation struct {
+ Posn string `json:"posn"` // e.g. "file.go:line:column"
+ Message string `json:"message"`
+}
+
+// Add adds the result of analysis 'name' on package 'id'.
+// The result is either a list of diagnostics or an error.
+func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) {
+ var v any
+ if err != nil {
+ type jsonError struct {
+ Err string `json:"error"`
+ }
+ v = jsonError{err.Error()}
+ } else if len(diags) > 0 {
+ diagnostics := make([]JSONDiagnostic, 0, len(diags))
+ for _, f := range diags {
+ var fixes []JSONSuggestedFix
+ for _, fix := range f.SuggestedFixes {
+ var edits []JSONTextEdit
+ for _, edit := range fix.TextEdits {
+ edits = append(edits, JSONTextEdit{
+ Filename: fset.Position(edit.Pos).Filename,
+ Start: fset.Position(edit.Pos).Offset,
+ End: fset.Position(edit.End).Offset,
+ New: string(edit.NewText),
+ })
+ }
+ fixes = append(fixes, JSONSuggestedFix{
+ Message: fix.Message,
+ Edits: edits,
+ })
+ }
+ var related []JSONRelatedInformation
+ for _, r := range f.Related {
+ related = append(related, JSONRelatedInformation{
+ Posn: fset.Position(r.Pos).String(),
+ Message: r.Message,
+ })
+ }
+ jdiag := JSONDiagnostic{
+ Category: f.Category,
+ Posn: fset.Position(f.Pos).String(),
+ Message: f.Message,
+ SuggestedFixes: fixes,
+ Related: related,
+ }
+ diagnostics = append(diagnostics, jdiag)
+ }
+ v = diagnostics
+ }
+ if v != nil {
+ m, ok := tree[id]
+ if !ok {
+ m = make(map[string]any)
+ tree[id] = m
+ }
+ m[name] = v
+ }
+}
+
+func (tree JSONTree) Print(out io.Writer) error {
+ data, err := json.MarshalIndent(tree, "", "\t")
+ if err != nil {
+ log.Panicf("internal error: JSON marshaling failed: %v", err)
+ }
+ _, err = fmt.Fprintf(out, "%s\n", data)
+ return err
+}
--- /dev/null
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package driverutil
+
+// This file defines helpers for implementing [analysis.Pass.ReadFile].
+
+import (
+ "fmt"
+ "slices"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// A ReadFileFunc is a function that returns the
+// contents of a file, such as [os.ReadFile].
+type ReadFileFunc = func(filename string) ([]byte, error)
+
+// CheckedReadFile returns a wrapper around a Pass.ReadFile
+// function that performs the appropriate checks.
+func CheckedReadFile(pass *analysis.Pass, readFile ReadFileFunc) ReadFileFunc {
+ return func(filename string) ([]byte, error) {
+ if err := CheckReadable(pass, filename); err != nil {
+ return nil, err
+ }
+ return readFile(filename)
+ }
+}
+
+// CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
+func CheckReadable(pass *analysis.Pass, filename string) error {
+ if slices.Contains(pass.OtherFiles, filename) ||
+ slices.Contains(pass.IgnoredFiles, filename) {
+ return nil
+ }
+ for _, f := range pass.Files {
+ if pass.Fset.File(f.FileStart).Name() == filename {
+ return nil
+ }
+ }
+ return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
+}
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package analysisflags
+package driverutil
import (
"fmt"
-// Copyright 2020 The Go Authors. All rights reserved.
+// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Package analysisinternal provides helper functions for use in both
-// the analysis drivers in go/analysis and gopls, and in various
-// analyzers.
-//
-// TODO(adonovan): this is not ideal as it may lead to unnecessary
-// dependencies between drivers and analyzers. Split into analyzerlib
-// and driverlib?
-package analysisinternal
+package driverutil
+
+// This file defines the validation of SuggestedFixes.
import (
"cmp"
"fmt"
"go/token"
- "os"
"slices"
"golang.org/x/tools/go/analysis"
)
-// ReadFile reads a file and adds it to the FileSet in pass
-// so that we can report errors against it using lineStart.
-func ReadFile(pass *analysis.Pass, filename string) ([]byte, *token.File, error) {
- readFile := pass.ReadFile
- if readFile == nil {
- readFile = os.ReadFile
- }
- content, err := readFile(filename)
- if err != nil {
- return nil, nil, err
- }
- tf := pass.Fset.AddFile(filename, -1, len(content))
- tf.SetLinesForContent(content)
- return content, tf, nil
-}
-
-// A ReadFileFunc is a function that returns the
-// contents of a file, such as [os.ReadFile].
-type ReadFileFunc = func(filename string) ([]byte, error)
-
-// CheckedReadFile returns a wrapper around a Pass.ReadFile
-// function that performs the appropriate checks.
-func CheckedReadFile(pass *analysis.Pass, readFile ReadFileFunc) ReadFileFunc {
- return func(filename string) ([]byte, error) {
- if err := CheckReadable(pass, filename); err != nil {
- return nil, err
- }
- return readFile(filename)
- }
-}
-
-// CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
-func CheckReadable(pass *analysis.Pass, filename string) error {
- if slices.Contains(pass.OtherFiles, filename) ||
- slices.Contains(pass.IgnoredFiles, filename) {
- return nil
- }
- for _, f := range pass.Files {
- if pass.Fset.File(f.FileStart).Name() == filename {
- return nil
- }
- }
- return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
-}
-
// ValidateFixes validates the set of fixes for a single diagnostic.
// Any error indicates a bug in the originating analyzer.
//
return nil
}
-
-// Range returns an [analysis.Range] for the specified start and end positions.
-func Range(pos, end token.Pos) analysis.Range {
- return tokenRange{pos, end}
-}
-
-// tokenRange is an implementation of the [analysis.Range] interface.
-type tokenRange struct{ StartPos, EndPos token.Pos }
-
-func (r tokenRange) Pos() token.Pos { return r.StartPos }
-func (r tokenRange) End() token.Pos { return r.EndPos }
var Analyzer = &analysis.Analyzer{
Name: "typeindex",
Doc: "indexes of type information for later passes",
- URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/typeindex",
+ URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysis/typeindex",
Run: func(pass *analysis.Pass) (any, error) {
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
return typeindex.New(inspect, pass.Pkg, pass.TypesInfo), nil
},
RunDespiteErrors: true,
Requires: []*analysis.Analyzer{inspect.Analyzer},
- ResultType: reflect.TypeOf(new(typeindex.Index)),
+ ResultType: reflect.TypeFor[*typeindex.Index](),
}
+++ /dev/null
-// Copyright 2025 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package generated defines an analyzer whose result makes it
-// convenient to skip diagnostics within generated files.
-package generated
-
-import (
- "go/ast"
- "go/token"
- "reflect"
-
- "golang.org/x/tools/go/analysis"
-)
-
-var Analyzer = &analysis.Analyzer{
- Name: "generated",
- Doc: "detect which Go files are generated",
- URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/generated",
- ResultType: reflect.TypeFor[*Result](),
- Run: func(pass *analysis.Pass) (any, error) {
- set := make(map[*token.File]bool)
- for _, file := range pass.Files {
- if ast.IsGenerated(file) {
- set[pass.Fset.File(file.FileStart)] = true
- }
- }
- return &Result{fset: pass.Fset, generatedFiles: set}, nil
- },
-}
-
-type Result struct {
- fset *token.FileSet
- generatedFiles map[*token.File]bool
-}
-
-// IsGenerated reports whether the position is within a generated file.
-func (r *Result) IsGenerated(pos token.Pos) bool {
- return r.generatedFiles[r.fset.File(pos)]
-}
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Copied, with considerable changes, from go/parser/resolver.go
-// at af53bd2c03.
-
-package inline
+// Package free defines utilities for computing the free variables of
+// a syntax tree without type information. This is inherently
+// heuristic because of the T{f: x} ambiguity, in which f may or may
+// not be a lexical reference depending on whether T is a struct type.
+package free
import (
"go/ast"
"go/token"
)
-// freeishNames computes an approximation to the free names of the AST
-// at node n based solely on syntax, inserting values into the map.
+// Copied, with considerable changes, from go/parser/resolver.go
+// at af53bd2c03.
+
+// Names computes an approximation to the set of free names of the AST
+// at node n based solely on syntax.
//
// In the absence of composite literals, the set of free names is exact. Composite
// literals introduce an ambiguity that can only be resolved with type information:
// a struct type, so freeishNames underapproximates: the resulting set
// may omit names that are free lexical references.
//
+// TODO(adonovan): includeComplitIdents is a crude hammer: the caller
+// may have partial or heuristic information about whether a given T
+// is struct type. Replace includeComplitIdents with a hook to query
+// the caller.
+//
// The code is based on go/parser.resolveFile, but heavily simplified. Crucial
// differences are:
// - Instead of resolving names to their objects, this function merely records
// whether they are free.
// - Labels are ignored: they do not refer to values.
-// - This is never called on FuncDecls or ImportSpecs, so the function
-// panics if it sees one.
-func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) {
- v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents}
+// - This is never called on ImportSpecs, so the function panics if it sees one.
+func Names(n ast.Node, includeComplitIdents bool) map[string]bool {
+ v := &freeVisitor{
+ free: make(map[string]bool),
+ includeComplitIdents: includeComplitIdents,
+ }
// Begin with a scope, even though n might not be a form that establishes a scope.
// For example, n might be:
// x := ...
v.openScope()
ast.Walk(v, n)
v.closeScope()
- assert(v.scope == nil, "unbalanced scopes")
+ if v.scope != nil {
+ panic("unbalanced scopes")
+ }
+ return v.free
}
// A freeVisitor holds state for a free-name analysis.
// Expressions.
case *ast.Ident:
- v.resolve(n)
+ v.use(n)
case *ast.FuncLit:
v.openScope()
defer v.closeScope()
- v.walkFuncType(n.Type)
+ v.walkFuncType(nil, n.Type)
v.walkBody(n.Body)
case *ast.SelectorExpr:
case *ast.FuncType:
v.openScope()
defer v.closeScope()
- v.walkFuncType(n)
+ v.walkFuncType(nil, n)
case *ast.CompositeLit:
v.walk(n.Type)
if v.includeComplitIdents {
// Over-approximate by treating both cases as potentially
// free names.
- v.resolve(ident)
+ v.use(ident)
} else {
// Under-approximate by ignoring potentially free names.
}
}
case *ast.LabeledStmt:
- // ignore labels
- // TODO(jba): consider labels?
+ // Ignore labels.
v.walk(n.Stmt)
case *ast.BranchStmt:
// Ignore labels.
- // TODO(jba): consider labels?
case *ast.BlockStmt:
v.openScope()
v.walkBody(n.Body)
case *ast.TypeSwitchStmt:
+ v.openScope()
+ defer v.closeScope()
if n.Init != nil {
- v.openScope()
- defer v.closeScope()
v.walk(n.Init)
}
- v.openScope()
- defer v.closeScope()
v.walk(n.Assign)
// We can use walkBody here because we don't track label scopes.
v.walkBody(n.Body)
for _, spec := range n.Specs {
spec := spec.(*ast.ValueSpec)
walkSlice(v, spec.Values)
- if spec.Type != nil {
- v.walk(spec.Type)
- }
+ v.walk(spec.Type)
v.declare(spec.Names...)
}
+
case token.TYPE:
for _, spec := range n.Specs {
spec := spec.(*ast.TypeSpec)
}
case *ast.FuncDecl:
- panic("encountered top-level function declaration in free analysis")
+ if n.Recv == nil && n.Name.Name != "init" { // package-level function
+ v.declare(n.Name)
+ }
+ v.openScope()
+ defer v.closeScope()
+ v.walkTypeParams(n.Type.TypeParams)
+ v.walkFuncType(n.Recv, n.Type)
+ v.walkBody(n.Body)
default:
return v
return nil
}
-func (r *freeVisitor) openScope() {
- r.scope = &scope{map[string]bool{}, r.scope}
+func (v *freeVisitor) openScope() {
+ v.scope = &scope{map[string]bool{}, v.scope}
}
-func (r *freeVisitor) closeScope() {
- r.scope = r.scope.outer
+func (v *freeVisitor) closeScope() {
+ v.scope = v.scope.outer
}
-func (r *freeVisitor) walk(n ast.Node) {
+func (v *freeVisitor) walk(n ast.Node) {
if n != nil {
- ast.Walk(r, n)
+ ast.Walk(v, n)
}
}
-// walkFuncType walks a function type. It is used for explicit
-// function types, like this:
-//
-// type RunFunc func(context.Context) error
-//
-// and function literals, like this:
-//
-// func(a, b int) int { return a + b}
-//
-// neither of which have type parameters.
-// Function declarations do involve type parameters, but we don't
-// handle them.
-func (r *freeVisitor) walkFuncType(typ *ast.FuncType) {
- // The order here doesn't really matter, because names in
- // a field list cannot appear in types.
- // (The situation is different for type parameters, for which
- // see [freeVisitor.walkTypeParams].)
- r.resolveFieldList(typ.Params)
- r.resolveFieldList(typ.Results)
- r.declareFieldList(typ.Params)
- r.declareFieldList(typ.Results)
+func (v *freeVisitor) walkFuncType(recv *ast.FieldList, typ *ast.FuncType) {
+ // First use field types...
+ v.walkRecvFieldType(recv)
+ v.walkFieldTypes(typ.Params)
+ v.walkFieldTypes(typ.Results)
+
+ // ...then declare field names.
+ v.declareFieldNames(recv)
+ v.declareFieldNames(typ.Params)
+ v.declareFieldNames(typ.Results)
+}
+
+// A receiver field is not like a param or result field because
+// "func (recv R[T]) method()" uses R but declares T.
+func (v *freeVisitor) walkRecvFieldType(list *ast.FieldList) {
+ if list == nil {
+ return
+ }
+ for _, f := range list.List { // valid => len=1
+ typ := f.Type
+ if ptr, ok := typ.(*ast.StarExpr); ok {
+ typ = ptr.X
+ }
+
+ // Analyze receiver type as Base[Index, ...]
+ var (
+ base ast.Expr
+ indices []ast.Expr
+ )
+ switch typ := typ.(type) {
+ case *ast.IndexExpr: // B[T]
+ base, indices = typ.X, []ast.Expr{typ.Index}
+ case *ast.IndexListExpr: // B[K, V]
+ base, indices = typ.X, typ.Indices
+ default: // B
+ base = typ
+ }
+ for _, expr := range indices {
+ if id, ok := expr.(*ast.Ident); ok {
+ v.declare(id)
+ }
+ }
+ v.walk(base)
+ }
}
// walkTypeParams is like walkFieldList, but declares type parameters eagerly so
// that they may be resolved in the constraint expressions held in the field
// Type.
-func (r *freeVisitor) walkTypeParams(list *ast.FieldList) {
- r.declareFieldList(list)
- r.resolveFieldList(list)
+func (v *freeVisitor) walkTypeParams(list *ast.FieldList) {
+ v.declareFieldNames(list)
+ v.walkFieldTypes(list) // constraints
}
-func (r *freeVisitor) walkBody(body *ast.BlockStmt) {
+func (v *freeVisitor) walkBody(body *ast.BlockStmt) {
if body == nil {
return
}
- walkSlice(r, body.List)
+ walkSlice(v, body.List)
}
-func (r *freeVisitor) walkFieldList(list *ast.FieldList) {
+func (v *freeVisitor) walkFieldList(list *ast.FieldList) {
if list == nil {
return
}
- r.resolveFieldList(list) // .Type may contain references
- r.declareFieldList(list) // .Names declares names
+ v.walkFieldTypes(list) // .Type may contain references
+ v.declareFieldNames(list) // .Names declares names
}
-func (r *freeVisitor) shortVarDecl(lhs []ast.Expr) {
+func (v *freeVisitor) shortVarDecl(lhs []ast.Expr) {
// Go spec: A short variable declaration may redeclare variables provided
// they were originally declared in the same block with the same type, and
// at least one of the non-blank variables is new.
// In a well-formed program each expr must be an identifier,
// but be forgiving.
if id, ok := x.(*ast.Ident); ok {
- r.declare(id)
+ v.declare(id)
}
}
}
}
}
-// resolveFieldList resolves the types of the fields in list.
-// The companion method declareFieldList declares the names of the fields.
-func (r *freeVisitor) resolveFieldList(list *ast.FieldList) {
- if list == nil {
- return
- }
- for _, f := range list.List {
- r.walk(f.Type)
+// walkFieldTypes resolves the types of the walkFieldTypes in list.
+// The companion method declareFieldList declares the names of the walkFieldTypes.
+func (v *freeVisitor) walkFieldTypes(list *ast.FieldList) {
+ if list != nil {
+ for _, f := range list.List {
+ v.walk(f.Type)
+ }
}
}
-// declareFieldList declares the names of the fields in list.
+// declareFieldNames declares the names of the fields in list.
// (Names in a FieldList always establish new bindings.)
// The companion method resolveFieldList resolves the types of the fields.
-func (r *freeVisitor) declareFieldList(list *ast.FieldList) {
- if list == nil {
- return
- }
- for _, f := range list.List {
- r.declare(f.Names...)
+func (v *freeVisitor) declareFieldNames(list *ast.FieldList) {
+ if list != nil {
+ for _, f := range list.List {
+ v.declare(f.Names...)
+ }
}
}
-// resolve marks ident as free if it is not in scope.
-// TODO(jba): rename: no resolution is happening.
-func (r *freeVisitor) resolve(ident *ast.Ident) {
- if s := ident.Name; s != "_" && !r.scope.defined(s) {
- r.free[s] = true
+// use marks ident as free if it is not in scope.
+func (v *freeVisitor) use(ident *ast.Ident) {
+ if s := ident.Name; s != "_" && !v.scope.defined(s) {
+ v.free[s] = true
}
}
// declare adds each non-blank ident to the current scope.
-func (r *freeVisitor) declare(idents ...*ast.Ident) {
+func (v *freeVisitor) declare(idents ...*ast.Ident) {
for _, id := range idents {
if id.Name != "_" {
- r.scope.names[id.Name] = true
+ v.scope.names[id.Name] = true
}
}
}
// RangeInStringLiteral calculates the positional range within a string literal
// corresponding to the specified start and end byte offsets within the logical string.
-func RangeInStringLiteral(lit *ast.BasicLit, start, end int) (token.Pos, token.Pos, error) {
+func RangeInStringLiteral(lit *ast.BasicLit, start, end int) (Range, error) {
startPos, err := PosInStringLiteral(lit, start)
if err != nil {
- return 0, 0, fmt.Errorf("start: %v", err)
+ return Range{}, fmt.Errorf("start: %v", err)
}
endPos, err := PosInStringLiteral(lit, end)
if err != nil {
- return 0, 0, fmt.Errorf("end: %v", err)
+ return Range{}, fmt.Errorf("end: %v", err)
}
- return startPos, endPos, nil
+ return Range{startPos, endPos}, nil
}
// PosInStringLiteral returns the position within a string literal
package astutil
import (
+ "fmt"
"go/ast"
"go/printer"
"go/token"
}
// NodeContains reports whether the Pos/End range of node n encloses
-// the given position pos.
+// the given range.
//
// It is inclusive of both end points, to allow hovering (etc) when
// the cursor is immediately after a node.
//
-// For unfortunate historical reasons, the Pos/End extent of an
-// ast.File runs from the start of its package declaration---excluding
-// copyright comments, build tags, and package documentation---to the
-// end of its last declaration, excluding any trailing comments. So,
-// as a special case, if n is an [ast.File], NodeContains uses
-// n.FileStart <= pos && pos <= n.FileEnd to report whether the
-// position lies anywhere within the file.
+// Like [NodeRange], it treats the range of an [ast.File] as the
+// file's complete extent.
//
// Precondition: n must not be nil.
-func NodeContains(n ast.Node, pos token.Pos) bool {
- var start, end token.Pos
- if file, ok := n.(*ast.File); ok {
- start, end = file.FileStart, file.FileEnd // entire file
- } else {
- start, end = n.Pos(), n.End()
- }
- return start <= pos && pos <= end
+func NodeContains(n ast.Node, rng Range) bool {
+ return NodeRange(n).Contains(rng)
+}
+
+// NodeContainPos reports whether the Pos/End range of node n encloses
+// the given pos.
+//
+// Like [NodeRange], it treats the range of an [ast.File] as the
+// file's complete extent.
+func NodeContainsPos(n ast.Node, pos token.Pos) bool {
+ return NodeRange(n).ContainsPos(pos)
}
// IsChildOf reports whether cur.ParentEdge is ek.
printer.Fprint(&buf, fset, n) // ignore errors
return buf.String()
}
+
+// -- Range --
+
+// Range is a Pos interval.
+// It implements [analysis.Range] and [ast.Node].
+type Range struct{ Start, EndPos token.Pos }
+
+// RangeOf constructs a Range.
+//
+// RangeOf exists to pacify the "unkeyed literal" (composites) vet
+// check. It would be nice if there were a way for a type to add
+// itself to the allowlist.
+func RangeOf(start, end token.Pos) Range { return Range{start, end} }
+
+// NodeRange returns the extent of node n as a Range.
+//
+// For unfortunate historical reasons, the Pos/End extent of an
+// ast.File runs from the start of its package declaration---excluding
+// copyright comments, build tags, and package documentation---to the
+// end of its last declaration, excluding any trailing comments. So,
+// as a special case, if n is an [ast.File], NodeContains uses
+// n.FileStart <= pos && pos <= n.FileEnd to report whether the
+// position lies anywhere within the file.
+func NodeRange(n ast.Node) Range {
+ if file, ok := n.(*ast.File); ok {
+ return Range{file.FileStart, file.FileEnd} // entire file
+ }
+ return Range{n.Pos(), n.End()}
+}
+
+func (r Range) Pos() token.Pos { return r.Start }
+func (r Range) End() token.Pos { return r.EndPos }
+
+// ContainsPos reports whether the range (inclusive of both end points)
+// includes the specified position.
+func (r Range) ContainsPos(pos token.Pos) bool {
+ return r.Contains(RangeOf(pos, pos))
+}
+
+// Contains reports whether the range (inclusive of both end points)
+// includes the specified range.
+func (r Range) Contains(rng Range) bool {
+ return r.Start <= rng.Start && rng.EndPos <= r.EndPos
+}
+
+// IsValid reports whether the range is valid.
+func (r Range) IsValid() bool { return r.Start.IsValid() && r.Start <= r.EndPos }
+
+// --
+
+// Select returns the syntax nodes identified by a user's text
+// selection. It returns three nodes: the innermost node that wholly
+// encloses the selection; and the first and last nodes that are
+// wholly enclosed by the selection.
+//
+// For example, given this selection:
+//
+// { f(); g(); /* comment */ }
+// ~~~~~~~~~~~
+//
+// Select returns the enclosing BlockStmt, the f() CallExpr, and the g() CallExpr.
+//
+// Callers that require exactly one syntax tree (e.g. just f() or just
+// g()) should check that the returned start and end nodes are
+// identical.
+//
+// This function is intended to be called early in the handling of a
+// user's request, since it is tolerant of sloppy selection including
+// extraneous whitespace and comments. Use it in new code instead of
+// PathEnclosingInterval. When the exact extent of a node is known,
+// use [Cursor.FindByPos] instead.
+func Select(curFile inspector.Cursor, start, end token.Pos) (_enclosing, _start, _end inspector.Cursor, _ error) {
+ curEnclosing, ok := curFile.FindByPos(start, end)
+ if !ok {
+ return noCursor, noCursor, noCursor, fmt.Errorf("invalid selection")
+ }
+
+ // Find the first and last node wholly within the (start, end) range.
+ // We'll narrow the effective selection to them, to exclude whitespace.
+ // (This matches the functionality of PathEnclosingInterval.)
+ var curStart, curEnd inspector.Cursor
+ rng := RangeOf(start, end)
+ for cur := range curEnclosing.Preorder() {
+ if rng.Contains(NodeRange(cur.Node())) {
+ // The start node has the least Pos.
+ if !CursorValid(curStart) {
+ curStart = cur
+ }
+ // The end node has the greatest End.
+ // End positions do not change monotonically,
+ // so we must compute the max.
+ if !CursorValid(curEnd) ||
+ cur.Node().End() > curEnd.Node().End() {
+ curEnd = cur
+ }
+ }
+ }
+ if !CursorValid(curStart) {
+ return noCursor, noCursor, noCursor, fmt.Errorf("no syntax selected")
+ }
+ return curEnclosing, curStart, curEnd, nil
+}
+
+// CursorValid reports whether the cursor is valid.
+//
+// A valid cursor may yet be the virtual root node,
+// cur.Inspector.Root(), which has no [Cursor.Node].
+//
+// TODO(adonovan): move to cursorutil package, and move that package into x/tools.
+// Ultimately, make this a method of Cursor. Needs a proposal.
+func CursorValid(cur inspector.Cursor) bool {
+ return cur.Inspector() != nil
+}
+
+var noCursor inspector.Cursor
--- /dev/null
+// Copyright 2025 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cfginternal exposes internals of go/cfg.
+// It cannot actually depend on symbols from go/cfg.
+package cfginternal
+
+// IsNoReturn exposes (*cfg.CFG).noReturn to the ctrlflow analyzer.
+// TODO(adonovan): add CFG.NoReturn to the public API.
+//
+// You must link [golang.org/x/tools/go/cfg] into your application for
+// this function to be non-nil.
+var IsNoReturn = func(cfg any) bool {
+ panic("golang.org/x/tools/go/cfg not linked into application")
+}
ReplStart, ReplEnd int // offset of replacement text in B
}
-// DiffStrings returns the differences between two strings.
-// It does not respect rune boundaries.
-func DiffStrings(a, b string) []Diff { return diff(stringSeqs{a, b}) }
-
// DiffBytes returns the differences between two byte sequences.
// It does not respect rune boundaries.
func DiffBytes(a, b []byte) []Diff { return diff(bytesSeqs{a, b}) }
case typesinternal.NamedOrAlias: // *types.{Named,Alias}
// Add the type arguments if this is an instance.
if targs := T.TypeArgs(); targs.Len() > 0 {
- for i := 0; i < targs.Len(); i++ {
- addType(targs.At(i))
+ for t := range targs.Types() {
+ addType(t)
}
}
// common aspects
addObj(T.Obj())
if tparams := T.TypeParams(); tparams.Len() > 0 {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
+ for tparam := range tparams.TypeParams() {
+ addType(tparam)
}
}
addType(aliases.Rhs(T))
case *types.Named:
addType(T.Underlying())
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
+ for method := range T.Methods() {
+ addObj(method)
}
}
}
addType(T.Params())
addType(T.Results())
if tparams := T.TypeParams(); tparams != nil {
- for i := 0; i < tparams.Len(); i++ {
- addType(tparams.At(i))
+ for tparam := range tparams.TypeParams() {
+ addType(tparam)
}
}
case *types.Struct:
- for i := 0; i < T.NumFields(); i++ {
- addObj(T.Field(i))
+ for field := range T.Fields() {
+ addObj(field)
}
case *types.Tuple:
- for i := 0; i < T.Len(); i++ {
- addObj(T.At(i))
+ for v := range T.Variables() {
+ addObj(v)
}
case *types.Interface:
- for i := 0; i < T.NumMethods(); i++ {
- addObj(T.Method(i))
+ for method := range T.Methods() {
+ addObj(method)
}
- for i := 0; i < T.NumEmbeddeds(); i++ {
- addType(T.EmbeddedType(i)) // walk Embedded for implicits
+ for etyp := range T.EmbeddedTypes() {
+ addType(etyp) // walk Embedded for implicits
}
case *types.Union:
- for i := 0; i < T.Len(); i++ {
- addType(T.Term(i).Type())
+ for term := range T.Terms() {
+ addType(term.Type())
}
case *types.TypeParam:
if !typs[T] {
ErrorsAsTypeModernizer *analysis.Analyzer // = modernize.errorsastypeAnalyzer
StdIteratorsModernizer *analysis.Analyzer // = modernize.stditeratorsAnalyzer
PlusBuildModernizer *analysis.Analyzer // = modernize.plusbuildAnalyzer
+ StringsCutModernizer *analysis.Analyzer // = modernize.stringscutAnalyzer
)
}
}
+// find leftmost Pos bigger than start and rightmost less than end
+func filterPos(nds []*ast.Comment, start, end token.Pos) (token.Pos, token.Pos, bool) {
+ l, r := end, token.NoPos
+ ok := false
+ for _, n := range nds {
+ if n.Pos() > start && n.Pos() < l {
+ l = n.Pos()
+ ok = true
+ }
+ if n.End() <= end && n.End() > r {
+ r = n.End()
+ ok = true
+ }
+ }
+ return l, r, ok
+}
+
// DeleteStmt returns the edits to remove the [ast.Stmt] identified by
-// curStmt, if it is contained within a BlockStmt, CaseClause,
-// CommClause, or is the STMT in switch STMT; ... {...}. It returns nil otherwise.
-func DeleteStmt(tokFile *token.File, curStmt inspector.Cursor) []analysis.TextEdit {
- stmt := curStmt.Node().(ast.Stmt)
- // if the stmt is on a line by itself delete the whole line
- // otherwise just delete the statement.
-
- // this logic would be a lot simpler with the file contents, and somewhat simpler
- // if the cursors included the comments.
-
- lineOf := tokFile.Line
- stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End())
-
- var from, to token.Pos
- // bounds of adjacent syntax/comments on same line, if any
- limits := func(left, right token.Pos) {
+// curStmt if it recognizes the context. It returns nil otherwise.
+// TODO(pjw, adonovan): it should not return nil, it should return an error
+//
+// DeleteStmt is called with just the AST so it has trouble deciding if
+// a comment is associated with the statement to be deleted. For instance,
+//
+// for /*A*/ init()/*B*/;/*C/cond()/*D/;/*E*/post() /*F*/ { /*G*/}
+//
+// comment B and C are indistinguishable, as are D and E. That is, as the
+// AST does not say where the semicolons are, B and C could go either
+// with the init() or the cond(), so cannot be removed safely. The same
+// is true for D, E, and the post(). (And there are other similar cases.)
+// But the other comments can be removed as they are unambiguously
+// associated with the statement being deleted. In particular,
+// it removes whole lines like
+//
+// stmt // comment
+func DeleteStmt(file *token.File, curStmt inspector.Cursor) []analysis.TextEdit {
+ // if the stmt is on a line by itself, or a range of lines, delete the whole thing
+ // including comments. Except for the heads of switches, type
+ // switches, and for-statements that's the usual case. Complexity occurs where
+ // there are multiple statements on the same line, and adjacent comments.
+
+ // In that case we remove some adjacent comments:
+ // In me()/*A*/;b(), comment A cannot be removed, because the ast
+ // is indistinguishable from me();/*A*/b()
+ // and the same for cases like switch me()/*A*/; x.(type) {
+
+ // this would be more precise with the file contents, or if the ast
+ // contained the location of semicolons
+ var (
+ stmt = curStmt.Node().(ast.Stmt)
+ tokFile = file
+ lineOf = tokFile.Line
+ stmtStartLine = lineOf(stmt.Pos())
+ stmtEndLine = lineOf(stmt.End())
+
+ leftSyntax, rightSyntax token.Pos // pieces of parent node on stmt{Start,End}Line
+ leftComments, rightComments []*ast.Comment // comments before/after stmt on the same line
+ )
+
+ // remember the Pos that are on the same line as stmt
+ use := func(left, right token.Pos) {
if lineOf(left) == stmtStartLine {
- from = left
+ leftSyntax = left
}
if lineOf(right) == stmtEndLine {
- to = right
+ rightSyntax = right
}
}
- // TODO(pjw): there are other places a statement might be removed:
- // IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] .
- // (removing the blocks requires more rewriting than this routine would do)
- // CommCase = "case" ( SendStmt | RecvStmt ) | "default" .
- // (removing the stmt requires more rewriting, and it's unclear what the user means)
- switch parent := curStmt.Parent().Node().(type) {
- case *ast.SwitchStmt:
- limits(parent.Switch, parent.Body.Lbrace)
- case *ast.TypeSwitchStmt:
- limits(parent.Switch, parent.Body.Lbrace)
- if parent.Assign == stmt {
- return nil // don't let the user break the type switch
+
+ // find the comments, if any, on the same line
+Big:
+ for _, cg := range astutil.EnclosingFile(curStmt).Comments {
+ for _, co := range cg.List {
+ if lineOf(co.End()) < stmtStartLine {
+ continue
+ } else if lineOf(co.Pos()) > stmtEndLine {
+ break Big // no more are possible
+ }
+ if lineOf(co.End()) == stmtStartLine && co.End() <= stmt.Pos() {
+ // comment is before the statement
+ leftComments = append(leftComments, co)
+ } else if lineOf(co.Pos()) == stmtEndLine && co.Pos() >= stmt.End() {
+ // comment is after the statement
+ rightComments = append(rightComments, co)
+ }
}
+ }
+
+ // find any other syntax on the same line
+ var (
+ leftStmt, rightStmt token.Pos // end/start positions of sibling statements in a []Stmt list
+ inStmtList = false
+ curParent = curStmt.Parent()
+ )
+ switch parent := curParent.Node().(type) {
case *ast.BlockStmt:
- limits(parent.Lbrace, parent.Rbrace)
+ use(parent.Lbrace, parent.Rbrace)
+ inStmtList = true
+ case *ast.CaseClause:
+ use(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
+ inStmtList = true
case *ast.CommClause:
- limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
if parent.Comm == stmt {
return nil // maybe the user meant to remove the entire CommClause?
}
- case *ast.CaseClause:
- limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
+ use(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace)
+ inStmtList = true
case *ast.ForStmt:
- limits(parent.For, parent.Body.Lbrace)
-
+ use(parent.For, parent.Body.Lbrace)
+ // special handling, as init;cond;post BlockStmt is not a statment list
+ if parent.Init != nil && parent.Cond != nil && stmt == parent.Init && lineOf(parent.Cond.Pos()) == lineOf(stmt.End()) {
+ rightStmt = parent.Cond.Pos()
+ } else if parent.Post != nil && parent.Cond != nil && stmt == parent.Post && lineOf(parent.Cond.End()) == lineOf(stmt.Pos()) {
+ leftStmt = parent.Cond.End()
+ }
+ case *ast.IfStmt:
+ switch stmt {
+ case parent.Init:
+ use(parent.If, parent.Body.Lbrace)
+ case parent.Else:
+ // stmt is the {...} in "if cond {} else {...}" and removing
+ // it would require removing the 'else' keyword, but the ast
+ // does not contain its position.
+ return nil
+ }
+ case *ast.SwitchStmt:
+ use(parent.Switch, parent.Body.Lbrace)
+ case *ast.TypeSwitchStmt:
+ if stmt == parent.Assign {
+ return nil // don't remove .(type)
+ }
+ use(parent.Switch, parent.Body.Lbrace)
default:
return nil // not one of ours
}
- if prev, found := curStmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine {
- from = prev.Node().End() // preceding statement ends on same line
- }
- if next, found := curStmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine {
- to = next.Node().Pos() // following statement begins on same line
- }
- // and now for the comments
-Outer:
- for _, cg := range astutil.EnclosingFile(curStmt).Comments {
- for _, co := range cg.List {
- if lineOf(co.End()) < stmtStartLine {
- continue
- } else if lineOf(co.Pos()) > stmtEndLine {
- break Outer // no more are possible
- }
- if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() {
- if !from.IsValid() || co.End() > from {
- from = co.End()
- continue // maybe there are more
- }
- }
- if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() {
- if !to.IsValid() || co.Pos() < to {
- to = co.Pos()
- continue // maybe there are more
- }
+ if inStmtList {
+ // find the siblings, if any, on the same line
+ if prev, found := curStmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine {
+ if _, ok := prev.Node().(ast.Stmt); ok {
+ leftStmt = prev.Node().End() // preceding statement ends on same line
}
}
+ if next, found := curStmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine {
+ rightStmt = next.Node().Pos() // following statement begins on same line
+ }
}
- // if either from or to is valid, just remove the statement
- // otherwise remove the line
- edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()}
- if from.IsValid() || to.IsValid() {
- // remove just the statement.
- // we can't tell if there is a ; or whitespace right after the statement
- // ideally we'd like to remove the former and leave the latter
- // (if gofmt has run, there likely won't be a ;)
- // In type switches we know there's a semicolon somewhere after the statement,
- // but the extra work for this special case is not worth it, as gofmt will fix it.
- return []analysis.TextEdit{edit}
- }
- // remove the whole line
- for lineOf(edit.Pos) == stmtStartLine {
- edit.Pos--
+
+ // compute the left and right limits of the edit
+ var leftEdit, rightEdit token.Pos
+ if leftStmt.IsValid() {
+ leftEdit = stmt.Pos() // can't remove preceding comments: a()/*A*/; me()
+ } else if leftSyntax.IsValid() {
+ // remove intervening leftComments
+ if a, _, ok := filterPos(leftComments, leftSyntax, stmt.Pos()); ok {
+ leftEdit = a
+ } else {
+ leftEdit = stmt.Pos()
+ }
+ } else { // remove whole line
+ for leftEdit = stmt.Pos(); lineOf(leftEdit) == stmtStartLine; leftEdit-- {
+ }
+ if leftEdit < stmt.Pos() {
+ leftEdit++ // beginning of line
+ }
}
- edit.Pos++ // get back tostmtStartLine
- for lineOf(edit.End) == stmtEndLine {
- edit.End++
+ if rightStmt.IsValid() {
+ rightEdit = stmt.End() // can't remove following comments
+ } else if rightSyntax.IsValid() {
+ // remove intervening rightComments
+ if _, b, ok := filterPos(rightComments, stmt.End(), rightSyntax); ok {
+ rightEdit = b
+ } else {
+ rightEdit = stmt.End()
+ }
+ } else { // remove whole line
+ fend := token.Pos(file.Base()) + token.Pos(file.Size())
+ for rightEdit = stmt.End(); fend >= rightEdit && lineOf(rightEdit) == stmtEndLine; rightEdit++ {
+ }
+ // don't remove \n if there was other stuff earlier
+ if leftSyntax.IsValid() || leftStmt.IsValid() {
+ rightEdit--
+ }
}
- return []analysis.TextEdit{edit}
+
+ return []analysis.TextEdit{{Pos: leftEdit, End: rightEdit}}
}
// DeleteUnusedVars computes the edits required to delete the
// results of type analysis (does not reach go/types data structures)
PkgPath string // package path of declaring package
Name string // user-friendly name for error messages
+ GoVersion string // version of Go effective in callee file
Unexported []string // names of free objects that are unexported
FreeRefs []freeRef // locations of references to free objects
FreeObjs []object // descriptions of free objects
return nil, fmt.Errorf("cannot inline function %s as it has no body", name)
}
+ // Record the file's Go goVersion so that we don't
+ // inline newer code into file using an older dialect.
+ //
+ // Using the file version is overly conservative.
+ // A more precise solution would be for the type checker to
+ // record which language features the callee actually needs;
+ // see https://go.dev/issue/75726.
+ //
+ // We don't have the ast.File handy, so instead of a
+ // lookup we must scan the entire FileVersions map.
+ var goVersion string
+ for file, v := range info.FileVersions {
+ if file.Pos() < decl.Pos() && decl.Pos() < file.End() {
+ goVersion = v
+ break
+ }
+ }
+
// Record the location of all free references in the FuncDecl.
// (Parameters are not free by this definition.)
var (
Content: content,
PkgPath: pkg.Path(),
Name: name,
+ GoVersion: goVersion,
Unexported: unexported,
FreeObjs: freeObjs,
FreeRefs: freeRefs,
if sig.Recv() != nil {
params = append(params, newParamInfo(sig.Recv(), false))
}
- for i := 0; i < sig.Params().Len(); i++ {
- params = append(params, newParamInfo(sig.Params().At(i), false))
+ for v := range sig.Params().Variables() {
+ params = append(params, newParamInfo(v, false))
}
- for i := 0; i < sig.Results().Len(); i++ {
- results = append(results, newParamInfo(sig.Results().At(i), true))
+ for v := range sig.Results().Variables() {
+ results = append(results, newParamInfo(v, true))
}
}
paramInfos := make(map[*types.TypeName]*paramInfo)
var params []*paramInfo
collect := func(tpl *types.TypeParamList) {
- for i := range tpl.Len() {
- typeName := tpl.At(i).Obj()
+ for tparam := range tpl.TypeParams() {
+ typeName := tparam.Obj()
info := ¶mInfo{Name: typeName.Name()}
params = append(params, info)
paramInfos[typeName] = info
return true, types.IsInterface(under.Elem()), false
case *types.Struct: // Struct{k: expr}
if id, _ := kv.Key.(*ast.Ident); id != nil {
- for fi := range under.NumFields() {
- field := under.Field(fi)
+ for field := range under.Fields() {
if info.Uses[id] == field {
return true, types.IsInterface(field.Type()), false
}
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/types/typeutil"
internalastutil "golang.org/x/tools/internal/astutil"
+ "golang.org/x/tools/internal/astutil/free"
"golang.org/x/tools/internal/packagepath"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
+ "golang.org/x/tools/internal/versions"
)
// A Caller describes the function call and its enclosing context.
// For simplicity we ignore existing dot imports, so that a qualified
// identifier (QI) in the callee is always represented by a QI in the caller,
// allowing us to treat a QI like a selection on a package name.
- is := &importState{
+ ist := &importState{
logf: logf,
caller: caller,
importMap: make(map[string][]string),
}
+ // Build an index of used-once PkgNames.
+ type pkgNameUse struct {
+ count int
+ id *ast.Ident // an arbitrary use
+ }
+ pkgNameUses := make(map[*types.PkgName]pkgNameUse)
+ for id, obj := range caller.Info.Uses {
+ if pkgname, ok := obj.(*types.PkgName); ok {
+ u := pkgNameUses[pkgname]
+ u.id = id
+ u.count++
+ pkgNameUses[pkgname] = u
+ }
+ }
+ // soleUse returns the ident that refers to pkgname, if there is exactly one.
+ soleUse := func(pkgname *types.PkgName) *ast.Ident {
+ u := pkgNameUses[pkgname]
+ if u.count == 1 {
+ return u.id
+ }
+ return nil
+ }
+
for _, imp := range caller.File.Imports {
if pkgName, ok := importedPkgName(caller.Info, imp); ok &&
pkgName.Name() != "." &&
// need this import. Doing so eagerly simplifies the resulting logic.
needed := true
sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr)
- if ok && soleUse(caller.Info, pkgName) == sel.X {
+ if ok && soleUse(pkgName) == sel.X {
needed = false // no longer needed by caller
// Check to see if any of the inlined free objects need this package.
for _, obj := range callee.FreeObjs {
// return value holds these.
if needed {
path := pkgName.Imported().Path()
- is.importMap[path] = append(is.importMap[path], pkgName.Name())
+ ist.importMap[path] = append(ist.importMap[path], pkgName.Name())
} else {
- is.oldImports = append(is.oldImports, oldImport{pkgName: pkgName, spec: imp})
+ ist.oldImports = append(ist.oldImports, oldImport{pkgName: pkgName, spec: imp})
}
}
}
- return is
+ return ist
}
// importName finds an existing import name to use in a particular shadowing
// Since they are not relevant to removing unused imports, we instruct
// freeishNames to omit composite-literal keys that are identifiers.
func trimNewImports(newImports []newImport, new ast.Node) []newImport {
- free := map[string]bool{}
const omitComplitIdents = false
- freeishNames(free, new, omitComplitIdents)
+ free := free.Names(new, omitComplitIdents)
var res []newImport
for _, ni := range newImports {
if free[ni.pkgName] {
callee.Name, callee.Unexported[0])
}
+ // Reject cross-file inlining if callee requires a newer dialect of Go (#75726).
+ // (Versions default to types.Config.GoVersion, which is unset in many tests,
+ // though should be populated by an analysis driver.)
+ callerGoVersion := caller.Info.FileVersions[caller.File]
+ if callerGoVersion != "" && callee.GoVersion != "" && versions.Before(callerGoVersion, callee.GoVersion) {
+ return nil, fmt.Errorf("cannot inline call to %s (declared using %s) into a file using %s",
+ callee.Name, callee.GoVersion, callerGoVersion)
+ }
+
// -- analyze callee's free references in caller context --
// Compute syntax path enclosing Call, innermost first (Path[0]=Call),
pkg.Scope().Insert(types.NewTypeName(token.NoPos, pkg, typ.Name, types.Typ[typ.Kind]))
}
- // Declared constants and variables for for parameters.
+ // Declared constants and variables for parameters.
nconst := 0
for i, param := range params {
name := param.info.Name
// (caller syntax), so we can use type info.
// But Type is the untyped callee syntax,
// so we have to use a syntax-only algorithm.
- free := make(map[string]bool)
+ const includeComplitIdents = true
+ free := free.Names(spec.Type, includeComplitIdents)
for _, value := range spec.Values {
for name := range freeVars(caller.Info, value) {
free[name] = true
}
}
- const includeComplitIdents = true
- freeishNames(free, spec.Type, includeComplitIdents)
for name := range free {
if names[name] {
logf("binding decl would shadow free name %q", name)
assert(callIdx == -1, "malformed (duplicative) AST")
callIdx = i
for j, returnOperand := range returnOperands {
- freeishNames(freeNames, returnOperand, includeComplitIdents)
+ maps.Copy(freeNames, free.Names(returnOperand, includeComplitIdents))
rhs = append(rhs, returnOperand)
if resultInfo[j]&nonTrivialResult != 0 {
nonTrivial[i+j] = true
// We must clone before clearing positions, since e came from the caller.
expr = internalastutil.CloneNode(expr)
clearPositions(expr)
- freeishNames(freeNames, expr, includeComplitIdents)
+ maps.Copy(freeNames, free.Names(expr, includeComplitIdents))
rhs = append(rhs, expr)
}
}
return false
}
-// soleUse returns the ident that refers to obj, if there is exactly one.
-func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) {
- // This is not efficient, but it is called infrequently.
- for id, obj2 := range info.Uses {
- if obj2 == obj {
- if sole != nil {
- return nil // not unique
- }
- sole = id
- }
- }
- return sole
-}
-
type unit struct{} // for representing sets as maps
assert(info.Selections != nil, "types.Info.Selections is nil")
assert(info.Types != nil, "types.Info.Types is nil")
assert(info.Uses != nil, "types.Info.Uses is nil")
+ assert(info.FileVersions != nil, "types.Info.FileVersions is nil")
}
// intersects reports whether the maps' key sets intersect.
// FreshName returns the name of an identifier that is undefined
// at the specified position, based on the preferred name.
+//
+// TODO(adonovan): refine this to choose a fresh name only when there
+// would be a conflict with the existing declaration: it's fine to
+// redeclare a name in a narrower scope so long as there are no free
+// references to the outer name from within the narrower scope.
func FreshName(scope *types.Scope, pos token.Pos, preferred string) string {
newName := preferred
for i := 0; ; i++ {
}
var deps = [...]pkginfo{
- {"archive/tar", "\x03k\x03E;\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
- {"archive/zip", "\x02\x04a\a\x03\x12\x021;\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
- {"bufio", "\x03k\x83\x01D\x14"},
- {"bytes", "n*Y\x03\fG\x02\x02"},
+ {"archive/tar", "\x03n\x03E<\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
+ {"archive/zip", "\x02\x04d\a\x03\x12\x021<\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
+ {"bufio", "\x03n\x84\x01D\x14"},
+ {"bytes", "q*Z\x03\fG\x02\x02"},
{"cmp", ""},
- {"compress/bzip2", "\x02\x02\xed\x01A"},
- {"compress/flate", "\x02l\x03\x80\x01\f\x033\x01\x03"},
- {"compress/gzip", "\x02\x04a\a\x03\x14lT"},
- {"compress/lzw", "\x02l\x03\x80\x01"},
- {"compress/zlib", "\x02\x04a\a\x03\x12\x01m"},
- {"container/heap", "\xb3\x02"},
+ {"compress/bzip2", "\x02\x02\xf1\x01A"},
+ {"compress/flate", "\x02o\x03\x81\x01\f\x033\x01\x03"},
+ {"compress/gzip", "\x02\x04d\a\x03\x14mT"},
+ {"compress/lzw", "\x02o\x03\x81\x01"},
+ {"compress/zlib", "\x02\x04d\a\x03\x12\x01n"},
+ {"container/heap", "\xb7\x02"},
{"container/list", ""},
{"container/ring", ""},
- {"context", "n\\m\x01\r"},
- {"crypto", "\x83\x01nC"},
- {"crypto/aes", "\x10\n\a\x93\x02"},
- {"crypto/cipher", "\x03\x1e\x01\x01\x1e\x11\x1c+X"},
- {"crypto/des", "\x10\x13\x1e-+\x9b\x01\x03"},
- {"crypto/dsa", "A\x04)\x83\x01\r"},
- {"crypto/ecdh", "\x03\v\f\x0e\x04\x15\x04\r\x1c\x83\x01"},
- {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\a\v\x05\x01\x04\f\x01\x1c\x83\x01\r\x05K\x01"},
- {"crypto/ed25519", "\x0e\x1c\x11\x06\n\a\x1c\x83\x01C"},
- {"crypto/elliptic", "0>\x83\x01\r9"},
- {"crypto/fips140", " \x05"},
- {"crypto/hkdf", "-\x13\x01-\x15"},
- {"crypto/hmac", "\x1a\x14\x12\x01\x111"},
- {"crypto/internal/boring", "\x0e\x02\rf"},
- {"crypto/internal/boring/bbig", "\x1a\xe4\x01M"},
- {"crypto/internal/boring/bcache", "\xb8\x02\x13"},
+ {"context", "q[o\x01\r"},
+ {"crypto", "\x86\x01oC"},
+ {"crypto/aes", "\x10\n\t\x95\x02"},
+ {"crypto/cipher", "\x03 \x01\x01\x1f\x11\x1c+Y"},
+ {"crypto/des", "\x10\x15\x1f-+\x9c\x01\x03"},
+ {"crypto/dsa", "D\x04)\x84\x01\r"},
+ {"crypto/ecdh", "\x03\v\f\x10\x04\x16\x04\r\x1c\x84\x01"},
+ {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x10\a\v\x06\x01\x04\f\x01\x1c\x84\x01\r\x05K\x01"},
+ {"crypto/ed25519", "\x0e\x1e\x11\a\n\a\x1c\x84\x01C"},
+ {"crypto/elliptic", "2?\x84\x01\r9"},
+ {"crypto/fips140", "\"\x05"},
+ {"crypto/hkdf", "/\x14\x01-\x15"},
+ {"crypto/hmac", "\x1a\x16\x13\x01\x111"},
+ {"crypto/internal/boring", "\x0e\x02\ri"},
+ {"crypto/internal/boring/bbig", "\x1a\xe8\x01M"},
+ {"crypto/internal/boring/bcache", "\xbc\x02\x13"},
{"crypto/internal/boring/sig", ""},
- {"crypto/internal/cryptotest", "\x03\r\n\x06$\x0e\x19\x06\x12\x12 \x04\a\t\x16\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
- {"crypto/internal/entropy", "F"},
- {"crypto/internal/fips140", "?/\x15\xa7\x01\v\x16"},
- {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x05\x01\x01\x05*\x92\x014"},
- {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x05\x01\x06*\x8f\x01"},
- {"crypto/internal/fips140/alias", "\xcb\x02"},
- {"crypto/internal/fips140/bigmod", "%\x18\x01\x06*\x92\x01"},
- {"crypto/internal/fips140/check", " \x0e\x06\t\x02\xb2\x01Z"},
- {"crypto/internal/fips140/check/checktest", "%\x85\x02!"},
- {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x05\b\x01(\x83\x01\x0f7"},
- {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\r1\x83\x01\x0f7"},
- {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x068\x15nF"},
- {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v8\xc6\x01\x03"},
- {"crypto/internal/fips140/edwards25519", "%\a\f\x051\x92\x017"},
- {"crypto/internal/fips140/edwards25519/field", "%\x13\x051\x92\x01"},
- {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x06:\x15"},
- {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x018\x15"},
- {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x051"},
- {"crypto/internal/fips140/nistec", "%\f\a\x051\x92\x01*\r\x14"},
- {"crypto/internal/fips140/nistec/fiat", "%\x136\x92\x01"},
- {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x06:\x15"},
- {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x026\x15nF"},
- {"crypto/internal/fips140/sha256", "\x03\x1d\x1d\x01\x06*\x15}"},
- {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x05\x010\x92\x01K"},
- {"crypto/internal/fips140/sha512", "\x03\x1d\x1d\x01\x06*\x15}"},
- {"crypto/internal/fips140/ssh", "%^"},
- {"crypto/internal/fips140/subtle", "#\x1a\xc3\x01"},
- {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x028\x15"},
- {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\t1\x15"},
- {"crypto/internal/fips140cache", "\xaa\x02\r&"},
+ {"crypto/internal/constanttime", ""},
+ {"crypto/internal/cryptotest", "\x03\r\n\b%\x0e\x19\x06\x12\x12 \x04\x06\t\x18\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
+ {"crypto/internal/entropy", "I"},
+ {"crypto/internal/entropy/v1.0.0", "B/\x93\x018\x13"},
+ {"crypto/internal/fips140", "A0\xbd\x01\v\x16"},
+ {"crypto/internal/fips140/aes", "\x03\x1f\x03\x02\x13\x05\x01\x01\x06*\x93\x014"},
+ {"crypto/internal/fips140/aes/gcm", "\"\x01\x02\x02\x02\x11\x05\x01\a*\x90\x01"},
+ {"crypto/internal/fips140/alias", "\xcf\x02"},
+ {"crypto/internal/fips140/bigmod", "'\x18\x01\a*\x93\x01"},
+ {"crypto/internal/fips140/check", "\"\x0e\x06\t\x02\xb4\x01Z"},
+ {"crypto/internal/fips140/check/checktest", "'\x87\x02!"},
+ {"crypto/internal/fips140/drbg", "\x03\x1e\x01\x01\x04\x13\x05\t\x01(\x84\x01\x0f7\x01"},
+ {"crypto/internal/fips140/ecdh", "\x03\x1f\x05\x02\t\r2\x84\x01\x0f7"},
+ {"crypto/internal/fips140/ecdsa", "\x03\x1f\x04\x01\x02\a\x02\x069\x15oF"},
+ {"crypto/internal/fips140/ed25519", "\x03\x1f\x05\x02\x04\v9\xc7\x01\x03"},
+ {"crypto/internal/fips140/edwards25519", "\x1e\t\a\x112\x93\x017"},
+ {"crypto/internal/fips140/edwards25519/field", "'\x13\x052\x93\x01"},
+ {"crypto/internal/fips140/hkdf", "\x03\x1f\x05\t\x06;\x15"},
+ {"crypto/internal/fips140/hmac", "\x03\x1f\x14\x01\x019\x15"},
+ {"crypto/internal/fips140/mlkem", "\x03\x1f\x05\x02\x0e\x03\x052\xca\x01"},
+ {"crypto/internal/fips140/nistec", "\x1e\t\f\f2\x93\x01*\r\x14"},
+ {"crypto/internal/fips140/nistec/fiat", "'\x137\x93\x01"},
+ {"crypto/internal/fips140/pbkdf2", "\x03\x1f\x05\t\x06;\x15"},
+ {"crypto/internal/fips140/rsa", "\x03\x1b\x04\x04\x01\x02\r\x01\x01\x027\x15oF"},
+ {"crypto/internal/fips140/sha256", "\x03\x1f\x1d\x01\a*\x15~"},
+ {"crypto/internal/fips140/sha3", "\x03\x1f\x18\x05\x011\x93\x01K"},
+ {"crypto/internal/fips140/sha512", "\x03\x1f\x1d\x01\a*\x15~"},
+ {"crypto/internal/fips140/ssh", "'_"},
+ {"crypto/internal/fips140/subtle", "\x1e\a\x1a\xc5\x01"},
+ {"crypto/internal/fips140/tls12", "\x03\x1f\x05\t\x06\x029\x15"},
+ {"crypto/internal/fips140/tls13", "\x03\x1f\x05\b\a\t2\x15"},
+ {"crypto/internal/fips140cache", "\xae\x02\r&"},
{"crypto/internal/fips140deps", ""},
- {"crypto/internal/fips140deps/byteorder", "\x99\x01"},
- {"crypto/internal/fips140deps/cpu", "\xae\x01\a"},
- {"crypto/internal/fips140deps/godebug", "\xb6\x01"},
- {"crypto/internal/fips140hash", "5\x1b3\xc8\x01"},
- {"crypto/internal/fips140only", "'\r\x01\x01M3;"},
+ {"crypto/internal/fips140deps/byteorder", "\x9c\x01"},
+ {"crypto/internal/fips140deps/cpu", "\xb1\x01\a"},
+ {"crypto/internal/fips140deps/godebug", "\xb9\x01"},
+ {"crypto/internal/fips140deps/time", "\xc9\x02"},
+ {"crypto/internal/fips140hash", "7\x1c3\xc9\x01"},
+ {"crypto/internal/fips140only", ")\r\x01\x01N3<"},
{"crypto/internal/fips140test", ""},
- {"crypto/internal/hpke", "\x0e\x01\x01\x03\x053#+gM"},
- {"crypto/internal/impl", "\xb5\x02"},
- {"crypto/internal/randutil", "\xf1\x01\x12"},
- {"crypto/internal/sysrand", "nn! \r\r\x01\x01\f\x06"},
- {"crypto/internal/sysrand/internal/seccomp", "n"},
- {"crypto/md5", "\x0e3-\x15\x16g"},
- {"crypto/mlkem", "/"},
- {"crypto/pbkdf2", "2\x0e\x01-\x15"},
- {"crypto/rand", "\x1a\x06\a\x1a\x04\x01(\x83\x01\rM"},
- {"crypto/rc4", "#\x1e-\xc6\x01"},
- {"crypto/rsa", "\x0e\f\x01\t\x0f\r\x01\x04\x06\a\x1c\x03\x123;\f\x01"},
- {"crypto/sha1", "\x0e\f'\x03*\x15\x16\x15R"},
- {"crypto/sha256", "\x0e\f\x1aO"},
- {"crypto/sha3", "\x0e'N\xc8\x01"},
- {"crypto/sha512", "\x0e\f\x1cM"},
- {"crypto/subtle", "8\x9b\x01W"},
- {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\a\x01\r\n\x01\t\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x12\x16\x15\b;\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
- {"crypto/tls/internal/fips140tls", "\x17\xa1\x02"},
- {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x012\x05\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x038\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\x02\x05\n\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
- {"crypto/x509/pkix", "d\x06\a\x8d\x01G"},
- {"database/sql", "\x03\nK\x16\x03\x80\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
- {"database/sql/driver", "\ra\x03\xb4\x01\x0f\x11"},
- {"debug/buildinfo", "\x03X\x02\x01\x01\b\a\x03e\x19\x02\x01+\x0f\x1f"},
- {"debug/dwarf", "\x03d\a\x03\x80\x011\x11\x01\x01"},
- {"debug/elf", "\x03\x06Q\r\a\x03e\x1a\x01,\x17\x01\x16"},
- {"debug/gosym", "\x03d\n\xc2\x01\x01\x01\x02"},
- {"debug/macho", "\x03\x06Q\r\ne\x1b,\x17\x01"},
- {"debug/pe", "\x03\x06Q\r\a\x03e\x1b,\x17\x01\x16"},
- {"debug/plan9obj", "g\a\x03e\x1b,"},
- {"embed", "n*@\x19\x01S"},
+ {"crypto/internal/hpke", "\x0e\x01\x01\x03\x056#+hM"},
+ {"crypto/internal/impl", "\xb9\x02"},
+ {"crypto/internal/randutil", "\xf5\x01\x12"},
+ {"crypto/internal/sysrand", "qo! \r\r\x01\x01\f\x06"},
+ {"crypto/internal/sysrand/internal/seccomp", "q"},
+ {"crypto/md5", "\x0e6-\x15\x16h"},
+ {"crypto/mlkem", "1"},
+ {"crypto/pbkdf2", "4\x0f\x01-\x15"},
+ {"crypto/rand", "\x1a\b\a\x1b\x04\x01(\x84\x01\rM"},
+ {"crypto/rc4", "%\x1f-\xc7\x01"},
+ {"crypto/rsa", "\x0e\f\x01\v\x0f\x0e\x01\x04\x06\a\x1c\x03\x123<\f\x01"},
+ {"crypto/sha1", "\x0e\f*\x03*\x15\x16\x15S"},
+ {"crypto/sha256", "\x0e\f\x1cP"},
+ {"crypto/sha3", "\x0e)O\xc9\x01"},
+ {"crypto/sha512", "\x0e\f\x1eN"},
+ {"crypto/subtle", "\x1e\x1c\x9c\x01X"},
+ {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\t\x01\r\n\x01\n\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x12\x16\x15\b<\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
+ {"crypto/tls/internal/fips140tls", "\x17\xa5\x02"},
+ {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x015\x05\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x039\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\x02\x05\b\x02\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
+ {"crypto/x509/pkix", "g\x06\a\x8e\x01G"},
+ {"database/sql", "\x03\nN\x16\x03\x81\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
+ {"database/sql/driver", "\rd\x03\xb5\x01\x0f\x11"},
+ {"debug/buildinfo", "\x03[\x02\x01\x01\b\a\x03e\x1a\x02\x01+\x0f\x1f"},
+ {"debug/dwarf", "\x03g\a\x03\x81\x011\x11\x01\x01"},
+ {"debug/elf", "\x03\x06T\r\a\x03e\x1b\x01\f \x17\x01\x16"},
+ {"debug/gosym", "\x03g\n\xc3\x01\x01\x01\x02"},
+ {"debug/macho", "\x03\x06T\r\ne\x1c,\x17\x01"},
+ {"debug/pe", "\x03\x06T\r\a\x03e\x1c,\x17\x01\x16"},
+ {"debug/plan9obj", "j\a\x03e\x1c,"},
+ {"embed", "q*A\x19\x01S"},
{"embed/internal/embedtest", ""},
{"encoding", ""},
- {"encoding/ascii85", "\xf1\x01C"},
- {"encoding/asn1", "\x03k\x03\x8c\x01\x01'\r\x02\x01\x10\x03\x01"},
- {"encoding/base32", "\xf1\x01A\x02"},
- {"encoding/base64", "\x99\x01XA\x02"},
- {"encoding/binary", "n\x83\x01\f(\r\x05"},
- {"encoding/csv", "\x02\x01k\x03\x80\x01D\x12\x02"},
- {"encoding/gob", "\x02`\x05\a\x03e\x1b\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
- {"encoding/hex", "n\x03\x80\x01A\x03"},
- {"encoding/json", "\x03\x01^\x04\b\x03\x80\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
- {"encoding/pem", "\x03c\b\x83\x01A\x03"},
- {"encoding/xml", "\x02\x01_\f\x03\x80\x014\x05\n\x01\x02\x10\x02"},
- {"errors", "\xca\x01\x81\x01"},
- {"expvar", "kK?\b\v\x15\r\b\x02\x03\x01\x11"},
- {"flag", "b\f\x03\x80\x01,\b\x05\b\x02\x01\x10"},
- {"fmt", "nE>\f \b\r\x02\x03\x12"},
- {"go/ast", "\x03\x01m\x0e\x01q\x03)\b\r\x02\x01"},
- {"go/build", "\x02\x01k\x03\x01\x02\x02\a\x02\x01\x17\x1f\x04\x02\t\x19\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
- {"go/build/constraint", "n\xc6\x01\x01\x12\x02"},
- {"go/constant", "q\x0f}\x01\x024\x01\x02\x12"},
- {"go/doc", "\x04m\x01\x05\t>31\x10\x02\x01\x12\x02"},
- {"go/doc/comment", "\x03n\xc1\x01\x01\x01\x01\x12\x02"},
- {"go/format", "\x03n\x01\v\x01\x02qD"},
- {"go/importer", "s\a\x01\x01\x04\x01p9"},
- {"go/internal/gccgoimporter", "\x02\x01X\x13\x03\x04\v\x01n\x02,\x01\x05\x11\x01\f\b"},
- {"go/internal/gcimporter", "\x02o\x0f\x010\x05\x0e-,\x15\x03\x02"},
- {"go/internal/srcimporter", "q\x01\x01\n\x03\x01p,\x01\x05\x12\x02\x14"},
- {"go/parser", "\x03k\x03\x01\x02\v\x01q\x01+\x06\x12"},
- {"go/printer", "q\x01\x02\x03\tq\f \x15\x02\x01\x02\v\x05\x02"},
- {"go/scanner", "\x03n\x0fq2\x10\x01\x13\x02"},
- {"go/token", "\x04m\x83\x01>\x02\x03\x01\x0f\x02"},
- {"go/types", "\x03\x01\x06d\x03\x01\x03\b\x03\x02\x15\x1f\x061\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
- {"go/version", "\xbb\x01z"},
- {"hash", "\xf1\x01"},
- {"hash/adler32", "n\x15\x16"},
- {"hash/crc32", "n\x15\x16\x15\x89\x01\x01\x13"},
- {"hash/crc64", "n\x15\x16\x9e\x01"},
- {"hash/fnv", "n\x15\x16g"},
- {"hash/maphash", "\x83\x01\x11!\x03\x93\x01"},
- {"html", "\xb5\x02\x02\x12"},
- {"html/template", "\x03h\x06\x18-;\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
- {"image", "\x02l\x1ee\x0f4\x03\x01"},
+ {"encoding/ascii85", "\xf5\x01C"},
+ {"encoding/asn1", "\x03n\x03e(\x01'\r\x02\x01\x10\x03\x01"},
+ {"encoding/base32", "\xf5\x01A\x02"},
+ {"encoding/base64", "\x9c\x01YA\x02"},
+ {"encoding/binary", "q\x84\x01\f(\r\x05"},
+ {"encoding/csv", "\x02\x01n\x03\x81\x01D\x12\x02"},
+ {"encoding/gob", "\x02c\x05\a\x03e\x1c\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
+ {"encoding/hex", "q\x03\x81\x01A\x03"},
+ {"encoding/json", "\x03\x01a\x04\b\x03\x81\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
+ {"encoding/pem", "\x03f\b\x84\x01A\x03"},
+ {"encoding/xml", "\x02\x01b\f\x03\x81\x014\x05\n\x01\x02\x10\x02"},
+ {"errors", "\xcc\x01\x83\x01"},
+ {"expvar", "nK@\b\v\x15\r\b\x02\x03\x01\x11"},
+ {"flag", "e\f\x03\x81\x01,\b\x05\b\x02\x01\x10"},
+ {"fmt", "qE&\x19\f \b\r\x02\x03\x12"},
+ {"go/ast", "\x03\x01p\x0e\x01r\x03)\b\r\x02\x01\x12\x02"},
+ {"go/build", "\x02\x01n\x03\x01\x02\x02\a\x02\x01\x17\x1f\x04\x02\b\x1b\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
+ {"go/build/constraint", "q\xc7\x01\x01\x12\x02"},
+ {"go/constant", "t\x0f~\x01\x024\x01\x02\x12"},
+ {"go/doc", "\x04p\x01\x05\t=51\x10\x02\x01\x12\x02"},
+ {"go/doc/comment", "\x03q\xc2\x01\x01\x01\x01\x12\x02"},
+ {"go/format", "\x03q\x01\v\x01\x02rD"},
+ {"go/importer", "v\a\x01\x01\x04\x01q9"},
+ {"go/internal/gccgoimporter", "\x02\x01[\x13\x03\x04\v\x01o\x02,\x01\x05\x11\x01\f\b"},
+ {"go/internal/gcimporter", "\x02r\x0f\x010\x05\r/,\x15\x03\x02"},
+ {"go/internal/srcimporter", "t\x01\x01\n\x03\x01q,\x01\x05\x12\x02\x14"},
+ {"go/parser", "\x03n\x03\x01\x02\v\x01r\x01+\x06\x12"},
+ {"go/printer", "t\x01\x02\x03\tr\f \x15\x02\x01\x02\v\x05\x02"},
+ {"go/scanner", "\x03q\x0fr2\x10\x01\x13\x02"},
+ {"go/token", "\x04p\x84\x01>\x02\x03\x01\x0f\x02"},
+ {"go/types", "\x03\x01\x06g\x03\x01\x03\b\x03\x024\x062\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
+ {"go/version", "\xbe\x01{"},
+ {"hash", "\xf5\x01"},
+ {"hash/adler32", "q\x15\x16"},
+ {"hash/crc32", "q\x15\x16\x15\x8a\x01\x01\x13"},
+ {"hash/crc64", "q\x15\x16\x9f\x01"},
+ {"hash/fnv", "q\x15\x16h"},
+ {"hash/maphash", "\x86\x01\x11<|"},
+ {"html", "\xb9\x02\x02\x12"},
+ {"html/template", "\x03k\x06\x18-<\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
+ {"image", "\x02o\x1ef\x0f4\x03\x01"},
{"image/color", ""},
- {"image/color/palette", "\x8c\x01"},
- {"image/draw", "\x8b\x01\x01\x04"},
- {"image/gif", "\x02\x01\x05f\x03\x1a\x01\x01\x01\vX"},
- {"image/internal/imageutil", "\x8b\x01"},
- {"image/jpeg", "\x02l\x1d\x01\x04a"},
- {"image/png", "\x02\a^\n\x12\x02\x06\x01eC"},
- {"index/suffixarray", "\x03d\a\x83\x01\f+\n\x01"},
- {"internal/abi", "\xb5\x01\x96\x01"},
- {"internal/asan", "\xcb\x02"},
- {"internal/bisect", "\xaa\x02\r\x01"},
- {"internal/buildcfg", "qGe\x06\x02\x05\n\x01"},
- {"internal/bytealg", "\xae\x01\x9d\x01"},
+ {"image/color/palette", "\x8f\x01"},
+ {"image/draw", "\x8e\x01\x01\x04"},
+ {"image/gif", "\x02\x01\x05i\x03\x1a\x01\x01\x01\vY"},
+ {"image/internal/imageutil", "\x8e\x01"},
+ {"image/jpeg", "\x02o\x1d\x01\x04b"},
+ {"image/png", "\x02\aa\n\x12\x02\x06\x01fC"},
+ {"index/suffixarray", "\x03g\a\x84\x01\f+\n\x01"},
+ {"internal/abi", "\xb8\x01\x97\x01"},
+ {"internal/asan", "\xcf\x02"},
+ {"internal/bisect", "\xae\x02\r\x01"},
+ {"internal/buildcfg", "tGf\x06\x02\x05\n\x01"},
+ {"internal/bytealg", "\xb1\x01\x9e\x01"},
{"internal/byteorder", ""},
{"internal/cfg", ""},
- {"internal/cgrouptest", "q[Q\x06\x0f\x02\x01\x04\x01"},
- {"internal/chacha8rand", "\x99\x01\x15\a\x96\x01"},
+ {"internal/cgrouptest", "tZS\x06\x0f\x02\x01\x04\x01"},
+ {"internal/chacha8rand", "\x9c\x01\x15\a\x97\x01"},
{"internal/copyright", ""},
{"internal/coverage", ""},
{"internal/coverage/calloc", ""},
- {"internal/coverage/cfile", "k\x06\x16\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x02$,\x06\a\n\x01\x03\r\x06"},
- {"internal/coverage/cformat", "\x04m-\x04O\v6\x01\x02\r"},
- {"internal/coverage/cmerge", "q-_"},
- {"internal/coverage/decodecounter", "g\n-\v\x02F,\x17\x17"},
- {"internal/coverage/decodemeta", "\x02e\n\x16\x17\v\x02F,"},
- {"internal/coverage/encodecounter", "\x02e\n-\f\x01\x02D\v!\x15"},
- {"internal/coverage/encodemeta", "\x02\x01d\n\x12\x04\x17\r\x02D,."},
- {"internal/coverage/pods", "\x04m-\x7f\x06\x05\n\x02\x01"},
- {"internal/coverage/rtcov", "\xcb\x02"},
- {"internal/coverage/slicereader", "g\n\x80\x01Z"},
- {"internal/coverage/slicewriter", "q\x80\x01"},
- {"internal/coverage/stringtab", "q8\x04D"},
+ {"internal/coverage/cfile", "n\x06\x16\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01\"\x02&,\x06\a\n\x01\x03\r\x06"},
+ {"internal/coverage/cformat", "\x04p-\x04P\v6\x01\x02\r"},
+ {"internal/coverage/cmerge", "t-`"},
+ {"internal/coverage/decodecounter", "j\n-\v\x02G,\x17\x17"},
+ {"internal/coverage/decodemeta", "\x02h\n\x16\x17\v\x02G,"},
+ {"internal/coverage/encodecounter", "\x02h\n-\f\x01\x02E\v!\x15"},
+ {"internal/coverage/encodemeta", "\x02\x01g\n\x12\x04\x17\r\x02E,."},
+ {"internal/coverage/pods", "\x04p-\x80\x01\x06\x05\n\x02\x01"},
+ {"internal/coverage/rtcov", "\xcf\x02"},
+ {"internal/coverage/slicereader", "j\n\x81\x01Z"},
+ {"internal/coverage/slicewriter", "t\x81\x01"},
+ {"internal/coverage/stringtab", "t8\x04E"},
{"internal/coverage/test", ""},
{"internal/coverage/uleb128", ""},
- {"internal/cpu", "\xcb\x02"},
- {"internal/dag", "\x04m\xc1\x01\x03"},
- {"internal/diff", "\x03n\xc2\x01\x02"},
- {"internal/exportdata", "\x02\x01k\x03\x02c\x1b,\x01\x05\x11\x01\x02"},
- {"internal/filepathlite", "n*@\x1a@"},
- {"internal/fmtsort", "\x04\xa1\x02\r"},
- {"internal/fuzz", "\x03\nB\x18\x04\x03\x03\x01\v\x036;\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
+ {"internal/cpu", "\xcf\x02"},
+ {"internal/dag", "\x04p\xc2\x01\x03"},
+ {"internal/diff", "\x03q\xc3\x01\x02"},
+ {"internal/exportdata", "\x02\x01n\x03\x02c\x1c,\x01\x05\x11\x01\x02"},
+ {"internal/filepathlite", "q*A\x1a@"},
+ {"internal/fmtsort", "\x04\xa5\x02\r"},
+ {"internal/fuzz", "\x03\nE\x18\x04\x03\x03\x01\v\x036<\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
{"internal/goarch", ""},
- {"internal/godebug", "\x96\x01!\x80\x01\x01\x13"},
+ {"internal/godebug", "\x99\x01!\x81\x01\x01\x13"},
{"internal/godebugs", ""},
{"internal/goexperiment", ""},
{"internal/goos", ""},
- {"internal/goroot", "\x9d\x02\x01\x05\x12\x02"},
+ {"internal/goroot", "\xa1\x02\x01\x05\x12\x02"},
{"internal/gover", "\x04"},
{"internal/goversion", ""},
- {"internal/itoa", ""},
- {"internal/lazyregexp", "\x9d\x02\v\r\x02"},
- {"internal/lazytemplate", "\xf1\x01,\x18\x02\f"},
- {"internal/msan", "\xcb\x02"},
+ {"internal/lazyregexp", "\xa1\x02\v\r\x02"},
+ {"internal/lazytemplate", "\xf5\x01,\x18\x02\f"},
+ {"internal/msan", "\xcf\x02"},
{"internal/nettrace", ""},
- {"internal/obscuretestdata", "f\x8b\x01,"},
- {"internal/oserror", "n"},
- {"internal/pkgbits", "\x03L\x18\a\x03\x04\vq\r\x1f\r\n\x01"},
+ {"internal/obscuretestdata", "i\x8c\x01,"},
+ {"internal/oserror", "q"},
+ {"internal/pkgbits", "\x03O\x18\a\x03\x04\vr\r\x1f\r\n\x01"},
{"internal/platform", ""},
- {"internal/poll", "nO\x1f\x159\r\x01\x01\f\x06"},
- {"internal/profile", "\x03\x04g\x03\x80\x017\v\x01\x01\x10"},
+ {"internal/poll", "qj\x05\x159\r\x01\x01\f\x06"},
+ {"internal/profile", "\x03\x04j\x03\x81\x017\n\x01\x01\x01\x10"},
{"internal/profilerecord", ""},
- {"internal/race", "\x94\x01\xb7\x01"},
- {"internal/reflectlite", "\x94\x01!9<!"},
- {"internal/runtime/atomic", "\xb5\x01\x96\x01"},
- {"internal/runtime/cgroup", "\x98\x01:\x02w"},
- {"internal/runtime/exithook", "\xcb\x01\x80\x01"},
- {"internal/runtime/gc", "\xb5\x01"},
- {"internal/runtime/maps", "\x94\x01\x01 \v\t\a\x03x"},
- {"internal/runtime/math", "\xb5\x01"},
+ {"internal/race", "\x97\x01\xb8\x01"},
+ {"internal/reflectlite", "\x97\x01!:<!"},
+ {"internal/runtime/atomic", "\xb8\x01\x97\x01"},
+ {"internal/runtime/cgroup", "\x9b\x01<\x04t"},
+ {"internal/runtime/exithook", "\xcd\x01\x82\x01"},
+ {"internal/runtime/gc", "\xb8\x01"},
+ {"internal/runtime/gc/internal/gen", "\n`\n\x17j\x04\v\x1d\b\x10\x02"},
+ {"internal/runtime/gc/scan", "\xb1\x01\a\x18\x06y"},
+ {"internal/runtime/maps", "\x97\x01\x01 \n\t\t\x02y"},
+ {"internal/runtime/math", "\xb8\x01"},
{"internal/runtime/startlinetest", ""},
- {"internal/runtime/strconv", "\xd0\x01"},
- {"internal/runtime/sys", "\xb5\x01\x04"},
- {"internal/runtime/syscall", "\xb5\x01\x96\x01"},
+ {"internal/runtime/sys", "\xb8\x01\x04"},
+ {"internal/runtime/syscall/linux", "\xb8\x01\x97\x01"},
{"internal/runtime/wasitest", ""},
- {"internal/saferio", "\xf1\x01Z"},
- {"internal/singleflight", "\xb7\x02"},
- {"internal/stringslite", "\x98\x01\xb3\x01"},
- {"internal/sync", "\x94\x01!\x14o\x13"},
- {"internal/synctest", "\x94\x01\xb7\x01"},
- {"internal/syscall/execenv", "\xb9\x02"},
- {"internal/syscall/unix", "\xaa\x02\x0e\x01\x12"},
- {"internal/sysinfo", "\x02\x01\xab\x01C,\x18\x02"},
+ {"internal/saferio", "\xf5\x01Z"},
+ {"internal/singleflight", "\xbb\x02"},
+ {"internal/strconv", "\x84\x02K"},
+ {"internal/stringslite", "\x9b\x01\xb4\x01"},
+ {"internal/sync", "\x97\x01!\x13q\x13"},
+ {"internal/synctest", "\x97\x01\xb8\x01"},
+ {"internal/syscall/execenv", "\xbd\x02"},
+ {"internal/syscall/unix", "\xae\x02\x0e\x01\x12"},
+ {"internal/sysinfo", "\x02\x01\xae\x01D,\x18\x02"},
{"internal/syslist", ""},
- {"internal/testenv", "\x03\na\x02\x01)\x1b\x10-+\x01\x05\a\n\x01\x02\x02\x01\v"},
- {"internal/testhash", "\x03\x80\x01n\x118\v"},
- {"internal/testlog", "\xb7\x02\x01\x13"},
- {"internal/testpty", "n\x03\xac\x01"},
- {"internal/trace", "\x02\x01\x01\x06]\a\x03t\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
- {"internal/trace/internal/testgen", "\x03d\nr\x03\x02\x03\x011\v\r\x10"},
- {"internal/trace/internal/tracev1", "\x03\x01c\a\x03z\x06\f5\x01"},
- {"internal/trace/raw", "\x02e\nw\x03\x06C\x01\x12"},
- {"internal/trace/testtrace", "\x02\x01k\x03r\x03\x05\x01\x057\n\x02\x01"},
+ {"internal/testenv", "\x03\nd\x02\x01)\x1b\x0f/+\x01\x05\a\n\x01\x02\x02\x01\v"},
+ {"internal/testhash", "\x03\x83\x01o\x118\v"},
+ {"internal/testlog", "\xbb\x02\x01\x13"},
+ {"internal/testpty", "q\x03\xad\x01"},
+ {"internal/trace", "\x02\x01\x01\x06`\a\x03u\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
+ {"internal/trace/internal/testgen", "\x03g\ns\x03\x02\x03\x011\v\r\x10"},
+ {"internal/trace/internal/tracev1", "\x03\x01f\a\x03{\x06\f5\x01"},
+ {"internal/trace/raw", "\x02h\nx\x03\x06C\x01\x12"},
+ {"internal/trace/testtrace", "\x02\x01n\x03o\x04\x03\x05\x01\x05,\v\x02\b\x02\x01\x05"},
{"internal/trace/tracev2", ""},
- {"internal/trace/traceviewer", "\x02^\v\x06\x19=\x1c\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
+ {"internal/trace/traceviewer", "\x02a\v\x06\x19<\x1e\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
{"internal/trace/traceviewer/format", ""},
- {"internal/trace/version", "qw\t"},
- {"internal/txtar", "\x03n\xac\x01\x18"},
- {"internal/types/errors", "\xb4\x02"},
- {"internal/unsafeheader", "\xcb\x02"},
- {"internal/xcoff", "Z\r\a\x03e\x1b,\x17\x01"},
- {"internal/zstd", "g\a\x03\x80\x01\x0f"},
- {"io", "n\xc9\x01"},
- {"io/fs", "n*+.1\x10\x13\x04"},
- {"io/ioutil", "\xf1\x01\x01+\x15\x03"},
- {"iter", "\xc9\x01a!"},
- {"log", "q\x80\x01\x05'\r\r\x01\r"},
+ {"internal/trace/version", "tx\t"},
+ {"internal/txtar", "\x03q\xad\x01\x18"},
+ {"internal/types/errors", "\xb8\x02"},
+ {"internal/unsafeheader", "\xcf\x02"},
+ {"internal/xcoff", "]\r\a\x03e\x1c,\x17\x01"},
+ {"internal/zstd", "j\a\x03\x81\x01\x0f"},
+ {"io", "q\xca\x01"},
+ {"io/fs", "q**01\x10\x13\x04"},
+ {"io/ioutil", "\xf5\x01\x01+\x15\x03"},
+ {"iter", "\xcb\x01c!"},
+ {"log", "t\x81\x01\x05'\r\r\x01\r"},
{"log/internal", ""},
- {"log/slog", "\x03\nU\t\x03\x03\x80\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
+ {"log/slog", "\x03\nX\t\x03\x03\x81\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
{"log/slog/internal", ""},
- {"log/slog/internal/benchmarks", "\ra\x03\x80\x01\x06\x03:\x11"},
- {"log/slog/internal/buffer", "\xb7\x02"},
- {"log/syslog", "n\x03\x84\x01\x12\x16\x18\x02\x0e"},
- {"maps", "\xf4\x01W"},
- {"math", "\xae\x01RK"},
- {"math/big", "\x03k\x03(\x15C\f\x03\x020\x02\x01\x02\x14"},
- {"math/big/internal/asmgen", "\x03\x01m\x8f\x012\x03"},
- {"math/bits", "\xcb\x02"},
- {"math/cmplx", "\xfd\x01\x03"},
- {"math/rand", "\xb6\x01G:\x01\x13"},
- {"math/rand/v2", "n+\x03a\x03K"},
- {"mime", "\x02\x01c\b\x03\x80\x01\v!\x15\x03\x02\x10\x02"},
- {"mime/multipart", "\x02\x01H#\x03E;\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
- {"mime/quotedprintable", "\x02\x01n\x80\x01"},
- {"net", "\x04\ta*\x1e\a\x04\x05\x11\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
- {"net/http", "\x02\x01\x04\x04\x02>\b\x13\x01\a\x03E;\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
- {"net/http/cgi", "\x02Q\x1b\x03\x80\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
- {"net/http/cookiejar", "\x04j\x03\x96\x01\x01\b\f\x16\x03\x02\x0e\x04"},
- {"net/http/fcgi", "\x02\x01\nZ\a\x03\x80\x01\x16\x01\x01\x14\x18\x02\x0e"},
- {"net/http/httptest", "\x02\x01\nF\x02\x1b\x01\x80\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
- {"net/http/httptrace", "\rFnF\x14\n "},
- {"net/http/httputil", "\x02\x01\na\x03\x80\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x0e\x0e"},
- {"net/http/internal", "\x02\x01k\x03\x80\x01"},
- {"net/http/internal/ascii", "\xb5\x02\x12"},
- {"net/http/internal/httpcommon", "\ra\x03\x9c\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
- {"net/http/internal/testcert", "\xb5\x02"},
- {"net/http/pprof", "\x02\x01\nd\x18-\x11*\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
+ {"log/slog/internal/benchmarks", "\rd\x03\x81\x01\x06\x03:\x11"},
+ {"log/slog/internal/buffer", "\xbb\x02"},
+ {"log/syslog", "q\x03\x85\x01\x12\x16\x18\x02\x0e"},
+ {"maps", "\xf8\x01W"},
+ {"math", "\xb1\x01SK"},
+ {"math/big", "\x03n\x03(\x15D\f\x03\x020\x02\x01\x02\x14"},
+ {"math/big/internal/asmgen", "\x03\x01p\x90\x012\x03"},
+ {"math/bits", "\xcf\x02"},
+ {"math/cmplx", "\x81\x02\x03"},
+ {"math/rand", "\xb9\x01H:\x01\x13"},
+ {"math/rand/v2", "q+\x03b\x03K"},
+ {"mime", "\x02\x01f\b\x03\x81\x01\v!\x15\x03\x02\x10\x02"},
+ {"mime/multipart", "\x02\x01K#\x03E<\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
+ {"mime/quotedprintable", "\x02\x01q\x81\x01"},
+ {"net", "\x04\td*\x1e\n\x05\x12\x01\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
+ {"net/http", "\x02\x01\x03\x01\x04\x02A\b\x13\x01\a\x03E<\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
+ {"net/http/cgi", "\x02T\x1b\x03\x81\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
+ {"net/http/cookiejar", "\x04m\x03\x97\x01\x01\b\f\x16\x03\x02\x0e\x04"},
+ {"net/http/fcgi", "\x02\x01\n]\a\x03\x81\x01\x16\x01\x01\x14\x18\x02\x0e"},
+ {"net/http/httptest", "\x02\x01\nI\x02\x1b\x01\x81\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
+ {"net/http/httptrace", "\rImH\x14\n "},
+ {"net/http/httputil", "\x02\x01\nd\x03\x81\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x0e\x0e"},
+ {"net/http/internal", "\x02\x01n\x03\x81\x01"},
+ {"net/http/internal/ascii", "\xb9\x02\x12"},
+ {"net/http/internal/httpcommon", "\rd\x03\x9d\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
+ {"net/http/internal/testcert", "\xb9\x02"},
+ {"net/http/pprof", "\x02\x01\ng\x18-\x02\x0e,\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
{"net/internal/cgotest", ""},
- {"net/internal/socktest", "q\xc6\x01\x02"},
- {"net/mail", "\x02l\x03\x80\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
- {"net/netip", "\x04j*\x01$@\x034\x16"},
- {"net/rpc", "\x02g\x05\x03\x0f\ng\x04\x12\x01\x1d\r\x03\x02"},
- {"net/rpc/jsonrpc", "k\x03\x03\x80\x01\x16\x11\x1f"},
- {"net/smtp", "\x19/\v\x13\b\x03\x80\x01\x16\x14\x1a"},
- {"net/textproto", "\x02\x01k\x03\x80\x01\f\n-\x01\x02\x14"},
- {"net/url", "n\x03\x8b\x01&\x10\x02\x01\x16"},
- {"os", "n*\x01\x19\x03\b\t\x12\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
- {"os/exec", "\x03\naH%\x01\x15\x01+\x06\a\n\x01\x04\f"},
- {"os/exec/internal/fdtest", "\xb9\x02"},
- {"os/signal", "\r\x90\x02\x15\x05\x02"},
- {"os/user", "\x02\x01k\x03\x80\x01,\r\n\x01\x02"},
- {"path", "n*\xb1\x01"},
- {"path/filepath", "n*\x1a@+\r\b\x03\x04\x10"},
- {"plugin", "n"},
- {"reflect", "n&\x04\x1d\b\f\x06\x04\x1b\x06\t-\n\x03\x10\x02\x02"},
+ {"net/internal/socktest", "t\xc7\x01\x02"},
+ {"net/mail", "\x02o\x03\x81\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
+ {"net/netip", "\x04m*\x01e\x034\x16"},
+ {"net/rpc", "\x02j\x05\x03\x0f\nh\x04\x12\x01\x1d\r\x03\x02"},
+ {"net/rpc/jsonrpc", "n\x03\x03\x81\x01\x16\x11\x1f"},
+ {"net/smtp", "\x192\v\x13\b\x03\x81\x01\x16\x14\x1a"},
+ {"net/textproto", "\x02\x01n\x03\x81\x01\f\n-\x01\x02\x14"},
+ {"net/url", "q\x03\xa7\x01\v\x10\x02\x01\x16"},
+ {"os", "q*\x01\x19\x03\x10\x13\x01\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
+ {"os/exec", "\x03\ndH&\x01\x15\x01+\x06\a\n\x01\x04\f"},
+ {"os/exec/internal/fdtest", "\xbd\x02"},
+ {"os/signal", "\r\x94\x02\x15\x05\x02"},
+ {"os/user", "\x02\x01n\x03\x81\x01,\r\n\x01\x02"},
+ {"path", "q*\xb2\x01"},
+ {"path/filepath", "q*\x1aA+\r\b\x03\x04\x10"},
+ {"plugin", "q"},
+ {"reflect", "q&\x04\x1d\x13\b\x03\x05\x17\x06\t-\n\x03\x10\x02\x02"},
{"reflect/internal/example1", ""},
{"reflect/internal/example2", ""},
- {"regexp", "\x03\xee\x018\t\x02\x01\x02\x10\x02"},
- {"regexp/syntax", "\xb2\x02\x01\x01\x01\x02\x10\x02"},
- {"runtime", "\x94\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x02\x01\x01\x04\x10c"},
- {"runtime/coverage", "\xa0\x01Q"},
- {"runtime/debug", "qUW\r\b\x02\x01\x10\x06"},
- {"runtime/metrics", "\xb7\x01F-!"},
- {"runtime/pprof", "\x02\x01\x01\x03\x06Z\a\x03#4)\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
- {"runtime/race", "\xb0\x02"},
+ {"regexp", "\x03\xf2\x018\t\x02\x01\x02\x10\x02"},
+ {"regexp/syntax", "\xb6\x02\x01\x01\x01\x02\x10\x02"},
+ {"runtime", "\x97\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0e\x03\x01\x01\x01\x02\x01\x01\x02\x01\x04\x01\x10c"},
+ {"runtime/coverage", "\xa3\x01R"},
+ {"runtime/debug", "tTY\r\b\x02\x01\x10\x06"},
+ {"runtime/metrics", "\xba\x01G-!"},
+ {"runtime/pprof", "\x02\x01\x01\x03\x06]\a\x03#$\x0f+\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
+ {"runtime/race", "\xb4\x02"},
{"runtime/race/internal/amd64v1", ""},
- {"runtime/trace", "\ra\x03w\t9\b\x05\x01\r\x06"},
- {"slices", "\x04\xf0\x01\fK"},
- {"sort", "\xca\x0162"},
- {"strconv", "n*@%\x03I"},
- {"strings", "n&\x04@\x19\x03\f7\x10\x02\x02"},
+ {"runtime/trace", "\rd\x03x\t9\b\x05\x01\r\x06"},
+ {"slices", "\x04\xf4\x01\fK"},
+ {"sort", "\xcc\x0182"},
+ {"strconv", "q*@\x01q"},
+ {"strings", "q&\x04A\x19\x03\f7\x10\x02\x02"},
{"structs", ""},
- {"sync", "\xc9\x01\x10\x01P\x0e\x13"},
- {"sync/atomic", "\xcb\x02"},
- {"syscall", "n'\x03\x01\x1c\b\x03\x03\x06\vV\b\x05\x01\x13"},
- {"testing", "\x03\na\x02\x01X\x14\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x02\x02"},
- {"testing/fstest", "n\x03\x80\x01\x01\n&\x10\x03\b\b"},
- {"testing/internal/testdeps", "\x02\v\xa7\x01-\x10,\x03\x05\x03\x06\a\x02\x0e"},
- {"testing/iotest", "\x03k\x03\x80\x01\x04"},
- {"testing/quick", "p\x01\x8c\x01\x05#\x10\x10"},
- {"testing/slogtest", "\ra\x03\x86\x01.\x05\x10\v"},
- {"testing/synctest", "\xda\x01`\x11"},
- {"text/scanner", "\x03n\x80\x01,*\x02"},
- {"text/tabwriter", "q\x80\x01X"},
- {"text/template", "n\x03B>\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
- {"text/template/parse", "\x03n\xb9\x01\n\x01\x12\x02"},
- {"time", "n*\x1e\"(*\r\x02\x12"},
- {"time/tzdata", "n\xcb\x01\x12"},
+ {"sync", "\xcb\x01\x12\x01P\x0e\x13"},
+ {"sync/atomic", "\xcf\x02"},
+ {"syscall", "q'\x03\x01\x1c\n\x03\x06\f\x04S\b\x05\x01\x13"},
+ {"testing", "\x03\nd\x02\x01W\x16\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x04"},
+ {"testing/fstest", "q\x03\x81\x01\x01\n&\x10\x03\b\b"},
+ {"testing/internal/testdeps", "\x02\v\xaa\x01.\x10,\x03\x05\x03\x06\a\x02\x0e"},
+ {"testing/iotest", "\x03n\x03\x81\x01\x04"},
+ {"testing/quick", "s\x01\x8d\x01\x05#\x10\x10"},
+ {"testing/slogtest", "\rd\x03\x87\x01.\x05\x10\v"},
+ {"testing/synctest", "\xde\x01`\x11"},
+ {"text/scanner", "\x03q\x81\x01,*\x02"},
+ {"text/tabwriter", "t\x81\x01X"},
+ {"text/template", "q\x03B?\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
+ {"text/template/parse", "\x03q\xba\x01\n\x01\x12\x02"},
+ {"time", "q*\x1e#(*\r\x02\x12"},
+ {"time/tzdata", "q\xcc\x01\x12"},
{"unicode", ""},
{"unicode/utf16", ""},
{"unicode/utf8", ""},
- {"unique", "\x94\x01!#\x01Q\r\x01\x13\x12"},
+ {"unique", "\x97\x01!$\x01Q\r\x01\x13\x12"},
{"unsafe", ""},
- {"vendor/golang.org/x/crypto/chacha20", "\x10W\a\x92\x01*&"},
- {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10W\a\xde\x01\x04\x01\a"},
- {"vendor/golang.org/x/crypto/cryptobyte", "d\n\x03\x8d\x01' \n"},
+ {"vendor/golang.org/x/crypto/chacha20", "\x10Z\a\x93\x01*&"},
+ {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10Z\a\xdf\x01\x04\x01\a"},
+ {"vendor/golang.org/x/crypto/cryptobyte", "g\n\x03\x8e\x01' \n"},
{"vendor/golang.org/x/crypto/cryptobyte/asn1", ""},
- {"vendor/golang.org/x/crypto/internal/alias", "\xcb\x02"},
- {"vendor/golang.org/x/crypto/internal/poly1305", "R\x15\x99\x01"},
- {"vendor/golang.org/x/net/dns/dnsmessage", "n"},
- {"vendor/golang.org/x/net/http/httpguts", "\x87\x02\x14\x1a\x14\r"},
- {"vendor/golang.org/x/net/http/httpproxy", "n\x03\x96\x01\x10\x05\x01\x18\x14\r"},
- {"vendor/golang.org/x/net/http2/hpack", "\x03k\x03\x80\x01F"},
- {"vendor/golang.org/x/net/idna", "q\x8c\x018\x14\x10\x02\x01"},
- {"vendor/golang.org/x/net/nettest", "\x03d\a\x03\x80\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
- {"vendor/golang.org/x/sys/cpu", "\x9d\x02\r\n\x01\x16"},
- {"vendor/golang.org/x/text/secure/bidirule", "n\xdb\x01\x11\x01"},
- {"vendor/golang.org/x/text/transform", "\x03k\x83\x01X"},
- {"vendor/golang.org/x/text/unicode/bidi", "\x03\bf\x84\x01>\x16"},
- {"vendor/golang.org/x/text/unicode/norm", "g\n\x80\x01F\x12\x11"},
- {"weak", "\x94\x01\x96\x01!"},
+ {"vendor/golang.org/x/crypto/internal/alias", "\xcf\x02"},
+ {"vendor/golang.org/x/crypto/internal/poly1305", "U\x15\x9a\x01"},
+ {"vendor/golang.org/x/net/dns/dnsmessage", "q"},
+ {"vendor/golang.org/x/net/http/httpguts", "\x8b\x02\x14\x1a\x14\r"},
+ {"vendor/golang.org/x/net/http/httpproxy", "q\x03\x97\x01\x10\x05\x01\x18\x14\r"},
+ {"vendor/golang.org/x/net/http2/hpack", "\x03n\x03\x81\x01F"},
+ {"vendor/golang.org/x/net/idna", "t\x8d\x018\x14\x10\x02\x01"},
+ {"vendor/golang.org/x/net/nettest", "\x03g\a\x03\x81\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
+ {"vendor/golang.org/x/sys/cpu", "\xa1\x02\r\n\x01\x16"},
+ {"vendor/golang.org/x/text/secure/bidirule", "q\xdc\x01\x11\x01"},
+ {"vendor/golang.org/x/text/transform", "\x03n\x84\x01X"},
+ {"vendor/golang.org/x/text/unicode/bidi", "\x03\bi\x85\x01>\x16"},
+ {"vendor/golang.org/x/text/unicode/norm", "j\n\x81\x01F\x12\x11"},
+ {"weak", "\x97\x01\x97\x01!"},
}
+
+// bootstrap is the list of bootstrap packages extracted from cmd/dist.
+var bootstrap = map[string]bool{
+ "cmp": true,
+ "cmd/asm": true,
+ "cmd/asm/internal/arch": true,
+ "cmd/asm/internal/asm": true,
+ "cmd/asm/internal/flags": true,
+ "cmd/asm/internal/lex": true,
+ "cmd/cgo": true,
+ "cmd/compile": true,
+ "cmd/compile/internal/abi": true,
+ "cmd/compile/internal/abt": true,
+ "cmd/compile/internal/amd64": true,
+ "cmd/compile/internal/arm": true,
+ "cmd/compile/internal/arm64": true,
+ "cmd/compile/internal/base": true,
+ "cmd/compile/internal/bitvec": true,
+ "cmd/compile/internal/compare": true,
+ "cmd/compile/internal/coverage": true,
+ "cmd/compile/internal/deadlocals": true,
+ "cmd/compile/internal/devirtualize": true,
+ "cmd/compile/internal/dwarfgen": true,
+ "cmd/compile/internal/escape": true,
+ "cmd/compile/internal/gc": true,
+ "cmd/compile/internal/importer": true,
+ "cmd/compile/internal/inline": true,
+ "cmd/compile/internal/inline/inlheur": true,
+ "cmd/compile/internal/inline/interleaved": true,
+ "cmd/compile/internal/ir": true,
+ "cmd/compile/internal/liveness": true,
+ "cmd/compile/internal/logopt": true,
+ "cmd/compile/internal/loong64": true,
+ "cmd/compile/internal/loopvar": true,
+ "cmd/compile/internal/mips": true,
+ "cmd/compile/internal/mips64": true,
+ "cmd/compile/internal/noder": true,
+ "cmd/compile/internal/objw": true,
+ "cmd/compile/internal/pgoir": true,
+ "cmd/compile/internal/pkginit": true,
+ "cmd/compile/internal/ppc64": true,
+ "cmd/compile/internal/rangefunc": true,
+ "cmd/compile/internal/reflectdata": true,
+ "cmd/compile/internal/riscv64": true,
+ "cmd/compile/internal/rttype": true,
+ "cmd/compile/internal/s390x": true,
+ "cmd/compile/internal/ssa": true,
+ "cmd/compile/internal/ssagen": true,
+ "cmd/compile/internal/staticdata": true,
+ "cmd/compile/internal/staticinit": true,
+ "cmd/compile/internal/syntax": true,
+ "cmd/compile/internal/test": true,
+ "cmd/compile/internal/typebits": true,
+ "cmd/compile/internal/typecheck": true,
+ "cmd/compile/internal/types": true,
+ "cmd/compile/internal/types2": true,
+ "cmd/compile/internal/walk": true,
+ "cmd/compile/internal/wasm": true,
+ "cmd/compile/internal/x86": true,
+ "cmd/internal/archive": true,
+ "cmd/internal/bio": true,
+ "cmd/internal/codesign": true,
+ "cmd/internal/dwarf": true,
+ "cmd/internal/edit": true,
+ "cmd/internal/gcprog": true,
+ "cmd/internal/goobj": true,
+ "cmd/internal/hash": true,
+ "cmd/internal/macho": true,
+ "cmd/internal/obj": true,
+ "cmd/internal/obj/arm": true,
+ "cmd/internal/obj/arm64": true,
+ "cmd/internal/obj/loong64": true,
+ "cmd/internal/obj/mips": true,
+ "cmd/internal/obj/ppc64": true,
+ "cmd/internal/obj/riscv": true,
+ "cmd/internal/obj/s390x": true,
+ "cmd/internal/obj/wasm": true,
+ "cmd/internal/obj/x86": true,
+ "cmd/internal/objabi": true,
+ "cmd/internal/par": true,
+ "cmd/internal/pgo": true,
+ "cmd/internal/pkgpath": true,
+ "cmd/internal/quoted": true,
+ "cmd/internal/src": true,
+ "cmd/internal/sys": true,
+ "cmd/internal/telemetry": true,
+ "cmd/internal/telemetry/counter": true,
+ "cmd/link": true,
+ "cmd/link/internal/amd64": true,
+ "cmd/link/internal/arm": true,
+ "cmd/link/internal/arm64": true,
+ "cmd/link/internal/benchmark": true,
+ "cmd/link/internal/dwtest": true,
+ "cmd/link/internal/ld": true,
+ "cmd/link/internal/loadelf": true,
+ "cmd/link/internal/loader": true,
+ "cmd/link/internal/loadmacho": true,
+ "cmd/link/internal/loadpe": true,
+ "cmd/link/internal/loadxcoff": true,
+ "cmd/link/internal/loong64": true,
+ "cmd/link/internal/mips": true,
+ "cmd/link/internal/mips64": true,
+ "cmd/link/internal/ppc64": true,
+ "cmd/link/internal/riscv64": true,
+ "cmd/link/internal/s390x": true,
+ "cmd/link/internal/sym": true,
+ "cmd/link/internal/wasm": true,
+ "cmd/link/internal/x86": true,
+ "compress/flate": true,
+ "compress/zlib": true,
+ "container/heap": true,
+ "debug/dwarf": true,
+ "debug/elf": true,
+ "debug/macho": true,
+ "debug/pe": true,
+ "go/build/constraint": true,
+ "go/constant": true,
+ "go/version": true,
+ "internal/abi": true,
+ "internal/coverage": true,
+ "cmd/internal/cov/covcmd": true,
+ "internal/bisect": true,
+ "internal/buildcfg": true,
+ "internal/exportdata": true,
+ "internal/goarch": true,
+ "internal/godebugs": true,
+ "internal/goexperiment": true,
+ "internal/goroot": true,
+ "internal/gover": true,
+ "internal/goversion": true,
+ "internal/lazyregexp": true,
+ "internal/pkgbits": true,
+ "internal/platform": true,
+ "internal/profile": true,
+ "internal/race": true,
+ "internal/runtime/gc": true,
+ "internal/saferio": true,
+ "internal/syscall/unix": true,
+ "internal/types/errors": true,
+ "internal/unsafeheader": true,
+ "internal/xcoff": true,
+ "internal/zstd": true,
+ "math/bits": true,
+ "sort": true,
+}
+
+// BootstrapVersion is the minor version of Go used during toolchain
+// bootstrapping. Packages for which [IsBootstrapPackage] must not use
+// features of Go newer than this version.
+const BootstrapVersion = Version(24) // go1.24.6
return strings.Compare(p.name, n)
})
}
+
+// IsBootstrapPackage reports whether pkg is one of the low-level
+// packages in the Go distribution that must compile with the older
+// language version specified by [BootstrapVersion] during toolchain
+// bootstrapping; see golang.org/s/go15bootstrap.
+func IsBootstrapPackage(pkg string) bool {
+ return bootstrap[pkg]
+}
{"(*Buffer).Grow", Method, 1, ""},
{"(*Buffer).Len", Method, 0, ""},
{"(*Buffer).Next", Method, 0, ""},
+ {"(*Buffer).Peek", Method, 26, ""},
{"(*Buffer).Read", Method, 0, ""},
{"(*Buffer).ReadByte", Method, 0, ""},
{"(*Buffer).ReadBytes", Method, 0, ""},
{"ResultNoRows", Var, 0, ""},
{"Rows", Type, 0, ""},
{"RowsAffected", Type, 0, ""},
+ {"RowsColumnScanner", Type, 26, ""},
{"RowsColumnTypeDatabaseTypeName", Type, 8, ""},
{"RowsColumnTypeLength", Type, 8, ""},
{"RowsColumnTypeNullable", Type, 8, ""},
},
"errors": {
{"As", Func, 13, "func(err error, target any) bool"},
+ {"AsType", Func, 26, "func[E error](err error) (E, bool)"},
{"ErrUnsupported", Var, 21, ""},
{"Is", Func, 13, "func(err error, target error) bool"},
{"Join", Func, 20, "func(errs ...error) error"},
{"Append", Func, 19, "func(b []byte, a ...any) []byte"},
{"Appendf", Func, 19, "func(b []byte, format string, a ...any) []byte"},
{"Appendln", Func, 19, "func(b []byte, a ...any) []byte"},
- {"Errorf", Func, 0, "func(format string, a ...any) error"},
+ {"Errorf", Func, 0, "func(format string, a ...any) (err error)"},
{"FormatString", Func, 20, "func(state State, verb rune) string"},
{"Formatter", Type, 0, ""},
{"Fprint", Func, 0, "func(w io.Writer, a ...any) (n int, err error)"},
{"(*DeclStmt).Pos", Method, 0, ""},
{"(*DeferStmt).End", Method, 0, ""},
{"(*DeferStmt).Pos", Method, 0, ""},
+ {"(*Directive).End", Method, 26, ""},
+ {"(*Directive).ParseArgs", Method, 26, ""},
+ {"(*Directive).Pos", Method, 26, ""},
{"(*Ellipsis).End", Method, 0, ""},
{"(*Ellipsis).Pos", Method, 0, ""},
{"(*EmptyStmt).End", Method, 0, ""},
{"DeferStmt", Type, 0, ""},
{"DeferStmt.Call", Field, 0, ""},
{"DeferStmt.Defer", Field, 0, ""},
+ {"Directive", Type, 26, ""},
+ {"Directive.Args", Field, 26, ""},
+ {"Directive.ArgsPos", Field, 26, ""},
+ {"Directive.Name", Field, 26, ""},
+ {"Directive.Slash", Field, 26, ""},
+ {"Directive.Tool", Field, 26, ""},
+ {"DirectiveArg", Type, 26, ""},
+ {"DirectiveArg.Arg", Field, 26, ""},
+ {"DirectiveArg.Pos", Field, 26, ""},
{"Ellipsis", Type, 0, ""},
{"Ellipsis.Ellipsis", Field, 0, ""},
{"Ellipsis.Elt", Field, 0, ""},
{"ParenExpr.Lparen", Field, 0, ""},
{"ParenExpr.Rparen", Field, 0, ""},
{"ParenExpr.X", Field, 0, ""},
+ {"ParseDirective", Func, 26, "func(pos token.Pos, c string) (Directive, bool)"},
{"Pkg", Const, 0, ""},
{"Preorder", Func, 23, "func(root Node) iter.Seq[Node]"},
{"PreorderStack", Func, 25, "func(root Node, stack []Node, f func(n Node, stack []Node) bool)"},
{"(*Logger).WarnContext", Method, 21, ""},
{"(*Logger).With", Method, 21, ""},
{"(*Logger).WithGroup", Method, 21, ""},
+ {"(*MultiHandler).Enabled", Method, 26, ""},
+ {"(*MultiHandler).Handle", Method, 26, ""},
+ {"(*MultiHandler).WithAttrs", Method, 26, ""},
+ {"(*MultiHandler).WithGroup", Method, 26, ""},
{"(*Record).Add", Method, 21, ""},
{"(*Record).AddAttrs", Method, 21, ""},
{"(*TextHandler).Enabled", Method, 21, ""},
{"LogValuer", Type, 21, ""},
{"Logger", Type, 21, ""},
{"MessageKey", Const, 21, ""},
+ {"MultiHandler", Type, 26, ""},
{"New", Func, 21, "func(h Handler) *Logger"},
{"NewJSONHandler", Func, 21, "func(w io.Writer, opts *HandlerOptions) *JSONHandler"},
{"NewLogLogger", Func, 21, "func(h Handler, level Level) *log.Logger"},
+ {"NewMultiHandler", Func, 26, "func(handlers ...Handler) *MultiHandler"},
{"NewRecord", Func, 21, "func(t time.Time, level Level, msg string, pc uintptr) Record"},
{"NewTextHandler", Func, 21, "func(w io.Writer, opts *HandlerOptions) *TextHandler"},
{"Record", Type, 21, ""},
{"MinInt64", Const, 0, ""},
{"MinInt8", Const, 0, ""},
{"Mod", Func, 0, "func(x float64, y float64) float64"},
- {"Modf", Func, 0, "func(f float64) (int float64, frac float64)"},
+ {"Modf", Func, 0, "func(f float64) (integer float64, fractional float64)"},
{"NaN", Func, 0, "func() float64"},
{"Nextafter", Func, 0, "func(x float64, y float64) (r float64)"},
{"Nextafter32", Func, 4, "func(x float32, y float32) (r float32)"},
{"(*DNSError).Unwrap", Method, 23, ""},
{"(*Dialer).Dial", Method, 1, ""},
{"(*Dialer).DialContext", Method, 7, ""},
+ {"(*Dialer).DialIP", Method, 26, ""},
+ {"(*Dialer).DialTCP", Method, 26, ""},
+ {"(*Dialer).DialUDP", Method, 26, ""},
+ {"(*Dialer).DialUnix", Method, 26, ""},
{"(*Dialer).MultipathTCP", Method, 21, ""},
{"(*Dialer).SetMultipathTCP", Method, 21, ""},
{"(*IP).UnmarshalText", Method, 2, ""},
{"HTTP2Config.PermitProhibitedCipherSuites", Field, 24, ""},
{"HTTP2Config.PingTimeout", Field, 24, ""},
{"HTTP2Config.SendPingTimeout", Field, 24, ""},
+ {"HTTP2Config.StrictMaxConcurrentRequests", Field, 26, ""},
{"HTTP2Config.WriteByteTimeout", Field, 24, ""},
{"Handle", Func, 0, "func(pattern string, handler Handler)"},
{"HandleFunc", Func, 0, "func(pattern string, handler func(ResponseWriter, *Request))"},
{"(Prefix).AppendText", Method, 24, ""},
{"(Prefix).AppendTo", Method, 18, ""},
{"(Prefix).Bits", Method, 18, ""},
+ {"(Prefix).Compare", Method, 26, ""},
{"(Prefix).Contains", Method, 18, ""},
{"(Prefix).IsSingleIP", Method, 18, ""},
{"(Prefix).IsValid", Method, 18, ""},
{"(*Process).Release", Method, 0, ""},
{"(*Process).Signal", Method, 0, ""},
{"(*Process).Wait", Method, 0, ""},
+ {"(*Process).WithHandle", Method, 26, ""},
{"(*ProcessState).ExitCode", Method, 12, ""},
{"(*ProcessState).Exited", Method, 0, ""},
{"(*ProcessState).Pid", Method, 0, ""},
{"ErrExist", Var, 0, ""},
{"ErrInvalid", Var, 0, ""},
{"ErrNoDeadline", Var, 10, ""},
+ {"ErrNoHandle", Var, 26, ""},
{"ErrNotExist", Var, 0, ""},
{"ErrPermission", Var, 0, ""},
{"ErrProcessDone", Var, 16, ""},
{"ListSeparator", Const, 0, ""},
{"Localize", Func, 23, "func(path string) (string, error)"},
{"Match", Func, 0, "func(pattern string, name string) (matched bool, err error)"},
- {"Rel", Func, 0, "func(basepath string, targpath string) (string, error)"},
+ {"Rel", Func, 0, "func(basePath string, targPath string) (string, error)"},
{"Separator", Const, 0, ""},
{"SkipAll", Var, 20, ""},
{"SkipDir", Var, 0, ""},
{"PanicNilError", Type, 21, ""},
{"Pinner", Type, 21, ""},
{"ReadMemStats", Func, 0, "func(m *MemStats)"},
- {"ReadTrace", Func, 5, "func() []byte"},
+ {"ReadTrace", Func, 5, "func() (buf []byte)"},
{"SetBlockProfileRate", Func, 1, "func(rate int)"},
{"SetCPUProfileRate", Func, 0, "func(hz int)"},
{"SetCgoTraceback", Func, 7, "func(version int, traceback unsafe.Pointer, context unsafe.Pointer, symbolizer unsafe.Pointer)"},
{"ValueOf", Func, 0, ""},
},
"testing": {
+ {"(*B).ArtifactDir", Method, 26, ""},
{"(*B).Attr", Method, 25, ""},
{"(*B).Chdir", Method, 24, ""},
{"(*B).Cleanup", Method, 14, ""},
{"(*B).StopTimer", Method, 0, ""},
{"(*B).TempDir", Method, 15, ""},
{"(*F).Add", Method, 18, ""},
+ {"(*F).ArtifactDir", Method, 26, ""},
{"(*F).Attr", Method, 25, ""},
{"(*F).Chdir", Method, 24, ""},
{"(*F).Cleanup", Method, 18, ""},
{"(*F).TempDir", Method, 18, ""},
{"(*M).Run", Method, 4, ""},
{"(*PB).Next", Method, 3, ""},
+ {"(*T).ArtifactDir", Method, 26, ""},
{"(*T).Attr", Method, 25, ""},
{"(*T).Chdir", Method, 24, ""},
{"(*T).Cleanup", Method, 14, ""},
// The term set of an interface is the intersection of the term sets of its
// embedded types.
tset.terms = allTermlist
- for i := 0; i < u.NumEmbeddeds(); i++ {
- embedded := u.EmbeddedType(i)
+ for embedded := range u.EmbeddedTypes() {
if _, ok := embedded.Underlying().(*types.TypeParam); ok {
return nil, fmt.Errorf("invalid embedded type %T", embedded)
}
case *types.Union:
// The term set of a union is the union of term sets of its terms.
tset.terms = nil
- for i := 0; i < u.Len(); i++ {
- t := u.Term(i)
+ for t := range u.Terms() {
var terms termlist
switch t.Type().Underlying().(type) {
case *types.Interface:
// Recursion over signatures of each method.
tmset := msets.MethodSet(T)
- for i := 0; i < tmset.Len(); i++ {
- sig := tmset.At(i).Type().(*types.Signature)
+ for method := range tmset.Methods() {
+ sig := method.Type().(*types.Signature)
// It is tempting to call visit(sig, false)
// but, as noted in golang.org/cl/65450043,
// the Signature.Recv field is ignored by
return ok &&
IsPackageLevel(obj) &&
f.Pkg().Path() == pkgPath &&
- f.Type().(*types.Signature).Recv() == nil &&
+ f.Signature().Recv() == nil &&
slices.Contains(names, f.Name())
}
// which is important for the performance of syntax matching.
func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool {
if fn, ok := obj.(*types.Func); ok {
- if recv := fn.Type().(*types.Signature).Recv(); recv != nil {
+ if recv := fn.Signature().Recv(); recv != nil {
_, T := ReceiverNamed(recv)
return T != nil &&
IsTypeNamed(T, pkgPath, typeName) &&
// TODO(adonovan): this function ignores the effect of shadowing. It
// should accept a [token.Pos] and a [types.Info] and compute only the
// set of imports that are not shadowed at that point, analogous to
-// [analysisinternal.AddImport]. It could also compute (as a side
+// [analysis.AddImport]. It could also compute (as a side
// effect) the set of additional imports required to ensure that there
// is an accessible import for each necessary package, making it
// converge even more closely with AddImport.
case *types.Signature:
var params []*ast.Field
- for i := 0; i < t.Params().Len(); i++ {
+ for v := range t.Params().Variables() {
params = append(params, &ast.Field{
- Type: TypeExpr(t.Params().At(i).Type(), qual),
+ Type: TypeExpr(v.Type(), qual),
Names: []*ast.Ident{
{
- Name: t.Params().At(i).Name(),
+ Name: v.Name(),
},
},
})
last.Type = &ast.Ellipsis{Elt: last.Type.(*ast.ArrayType).Elt}
}
var returns []*ast.Field
- for i := 0; i < t.Results().Len(); i++ {
+ for v := range t.Results().Variables() {
returns = append(returns, &ast.Field{
- Type: TypeExpr(t.Results().At(i).Type(), qual),
+ Type: TypeExpr(v.Type(), qual),
})
}
return &ast.FuncType{
if hasTypeArgs, ok := t.(interface{ TypeArgs() *types.TypeList }); ok {
if typeArgs := hasTypeArgs.TypeArgs(); typeArgs != nil && typeArgs.Len() > 0 {
var indices []ast.Expr
- for i := range typeArgs.Len() {
- indices = append(indices, TypeExpr(typeArgs.At(i), qual))
+ for t0 := range typeArgs.Types() {
+ indices = append(indices, TypeExpr(t0, qual))
}
expr = &ast.IndexListExpr{
X: expr,
// This file contains predicates for working with file versions to
// decide when a tool should consider a language feature enabled.
-// GoVersions that features in x/tools can be gated to.
+// named constants, to avoid misspelling
const (
Go1_18 = "go1.18"
Go1_19 = "go1.19"
Go1_20 = "go1.20"
Go1_21 = "go1.21"
Go1_22 = "go1.22"
+ Go1_23 = "go1.23"
+ Go1_24 = "go1.24"
+ Go1_25 = "go1.25"
+ Go1_26 = "go1.26"
)
// Future is an invalid unknown Go version sometime in the future.
# golang.org/x/build v0.0.0-20250806225920-b7c66c047964
## explicit; go 1.23.0
golang.org/x/build/relnote
-# golang.org/x/mod v0.29.0
+# golang.org/x/mod v0.30.1-0.20251114215501-3f03020ad526
## explicit; go 1.24.0
golang.org/x/mod/internal/lazyregexp
golang.org/x/mod/modfile
golang.org/x/mod/sumdb/note
golang.org/x/mod/sumdb/tlog
golang.org/x/mod/zip
-# golang.org/x/sync v0.17.0
+# golang.org/x/sync v0.18.0
## explicit; go 1.24.0
golang.org/x/sync/errgroup
golang.org/x/sync/semaphore
-# golang.org/x/sys v0.37.0
+# golang.org/x/sys v0.38.0
## explicit; go 1.24.0
golang.org/x/sys/plan9
golang.org/x/sys/unix
golang.org/x/sys/windows
-# golang.org/x/telemetry v0.0.0-20251008203120-078029d740a8
+# golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54
## explicit; go 1.24.0
golang.org/x/telemetry
golang.org/x/telemetry/counter
golang.org/x/text/language
golang.org/x/text/transform
golang.org/x/text/unicode/norm
-# golang.org/x/tools v0.38.1-0.20251015192825-7d9453ccc0f5
+# golang.org/x/tools v0.39.1-0.20251114194111-59ff18ce4883
## explicit; go 1.24.0
golang.org/x/tools/cmd/bisect
golang.org/x/tools/cover
golang.org/x/tools/go/analysis/passes/ifaceassert
golang.org/x/tools/go/analysis/passes/inline
golang.org/x/tools/go/analysis/passes/inspect
+golang.org/x/tools/go/analysis/passes/internal/ctrlflowinternal
golang.org/x/tools/go/analysis/passes/internal/gofixdirective
golang.org/x/tools/go/analysis/passes/loopclosure
golang.org/x/tools/go/analysis/passes/lostcancel
golang.org/x/tools/go/types/objectpath
golang.org/x/tools/go/types/typeutil
golang.org/x/tools/internal/aliases
-golang.org/x/tools/internal/analysisinternal
-golang.org/x/tools/internal/analysisinternal/generated
-golang.org/x/tools/internal/analysisinternal/typeindex
+golang.org/x/tools/internal/analysis/analyzerutil
+golang.org/x/tools/internal/analysis/driverutil
+golang.org/x/tools/internal/analysis/typeindex
golang.org/x/tools/internal/astutil
+golang.org/x/tools/internal/astutil/free
golang.org/x/tools/internal/bisect
+golang.org/x/tools/internal/cfginternal
golang.org/x/tools/internal/diff
golang.org/x/tools/internal/diff/lcs
golang.org/x/tools/internal/facts
)
require (
- golang.org/x/sys v0.37.0 // indirect
+ golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.30.0 // indirect
)
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4=
golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210=
-golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
-golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
+golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
+golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k=
golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM=
HasSHA2 bool // SHA2 hardware implementation
HasCRC32 bool // CRC32 hardware implementation
HasATOMICS bool // Atomic memory operation instruction set
+ HasHPDS bool // Hierarchical permission disables in translations tables
+ HasLOR bool // Limited ordering regions
+ HasPAN bool // Privileged access never
HasFPHP bool // Half precision floating-point instruction set
HasASIMDHP bool // Advanced SIMD half precision instruction set
HasCPUID bool // CPUID identification scheme registers
func readARM64Registers() {
Initialized = true
- parseARM64SystemRegisters(getisar0(), getisar1(), getpfr0())
+ parseARM64SystemRegisters(getisar0(), getisar1(), getmmfr1(), getpfr0())
}
-func parseARM64SystemRegisters(isar0, isar1, pfr0 uint64) {
+func parseARM64SystemRegisters(isar0, isar1, mmfr1, pfr0 uint64) {
// ID_AA64ISAR0_EL1
switch extractBits(isar0, 4, 7) {
case 1:
ARM64.HasI8MM = true
}
+ // ID_AA64MMFR1_EL1
+ switch extractBits(mmfr1, 12, 15) {
+ case 1, 2:
+ ARM64.HasHPDS = true
+ }
+
+ switch extractBits(mmfr1, 16, 19) {
+ case 1:
+ ARM64.HasLOR = true
+ }
+
+ switch extractBits(mmfr1, 20, 23) {
+ case 1, 2, 3:
+ ARM64.HasPAN = true
+ }
+
// ID_AA64PFR0_EL1
switch extractBits(pfr0, 16, 19) {
case 0:
// func getisar0() uint64
TEXT ·getisar0(SB),NOSPLIT,$0-8
// get Instruction Set Attributes 0 into x0
- // mrs x0, ID_AA64ISAR0_EL1 = d5380600
- WORD $0xd5380600
+ MRS ID_AA64ISAR0_EL1, R0
MOVD R0, ret+0(FP)
RET
// func getisar1() uint64
TEXT ·getisar1(SB),NOSPLIT,$0-8
// get Instruction Set Attributes 1 into x0
- // mrs x0, ID_AA64ISAR1_EL1 = d5380620
- WORD $0xd5380620
+ MRS ID_AA64ISAR1_EL1, R0
+ MOVD R0, ret+0(FP)
+ RET
+
+// func getmmfr1() uint64
+TEXT ·getmmfr1(SB),NOSPLIT,$0-8
+ // get Memory Model Feature Register 1 into x0
+ MRS ID_AA64MMFR1_EL1, R0
MOVD R0, ret+0(FP)
RET
// func getpfr0() uint64
TEXT ·getpfr0(SB),NOSPLIT,$0-8
// get Processor Feature Register 0 into x0
- // mrs x0, ID_AA64PFR0_EL1 = d5380400
- WORD $0xd5380400
+ MRS ID_AA64PFR0_EL1, R0
MOVD R0, ret+0(FP)
RET
// func getzfr0() uint64
TEXT ·getzfr0(SB),NOSPLIT,$0-8
// get SVE Feature Register 0 into x0
- // mrs x0, ID_AA64ZFR0_EL1 = d5380480
- WORD $0xd5380480
+ MRS ID_AA64ZFR0_EL1, R0
MOVD R0, ret+0(FP)
RET
func getisar0() uint64
func getisar1() uint64
+func getmmfr1() uint64
func getpfr0() uint64
func getzfr0() uint64
func getisar0() uint64 { return 0 }
func getisar1() uint64 { return 0 }
+func getmmfr1() uint64 { return 0 }
func getpfr0() uint64 { return 0 }
setMinimalFeatures()
return
}
- parseARM64SystemRegisters(cpuid.aa64isar0, cpuid.aa64isar1, cpuid.aa64pfr0)
+ parseARM64SystemRegisters(cpuid.aa64isar0, cpuid.aa64isar1, cpuid.aa64mmfr1, cpuid.aa64pfr0)
Initialized = true
}
if !ok {
return
}
- parseARM64SystemRegisters(isar0, isar1, 0)
+ parseARM64SystemRegisters(isar0, isar1, 0, 0)
Initialized = true
}
golang.org/x/net/idna
golang.org/x/net/lif
golang.org/x/net/nettest
-# golang.org/x/sys v0.37.0
+# golang.org/x/sys v0.38.0
## explicit; go 1.24.0
golang.org/x/sys/cpu
# golang.org/x/text v0.30.0