]> Cypherpunks repositories - gostls13.git/commitdiff
cmd/vendor: add x/tools/go/analysis/cmd/vet-lite + deps
authorAlan Donovan <adonovan@google.com>
Mon, 5 Nov 2018 20:51:35 +0000 (15:51 -0500)
committerAlan Donovan <adonovan@google.com>
Tue, 6 Nov 2018 18:23:38 +0000 (18:23 +0000)
This change adds the vet-lite command (the future cmd/vet) and all its
dependencies from x/tools, but not its tests and their dependencies.
It was created with these commands:

  $ (cd $GOPATH/src/golang.org/x/tools && git checkout c76e1ad)
  $ cd GOROOT/src/cmd
  $ govendor add $(go list -deps golang.org/x/tools/go/analysis/cmd/vet-lite | grep golang.org/x/tools)
  $ rm -fr $(find vendor/golang.org/x/tools/ -name testdata)
  $ rm $(find vendor/golang.org/x/tools/ -name \*_test.go)

I feel sure I am holding govendor wrong. Please advise.

A followup CL will make cmd/vet behave like vet-lite, initially just
for users that opt in, and soon after for all users, at which point
cmd/vet will be replaced in its entirety by a copy of vet-lite's small
main.go.

In the meantime, anyone can try the new tool using these commands:

 $ go build cmd/vendor/golang.org/x/tools/go/analysis/cmd/vet-lite
 $ export GOVETTOOL=$(which vet-lite)
 $ go vet your/project/...

Change-Id: Iea168111a32ce62f82f9fb706385ca0f368bc869
Reviewed-on: https://go-review.googlesource.com/c/147444
Reviewed-by: Russ Cox <rsc@golang.org>
52 files changed:
src/cmd/vendor/golang.org/x/tools/LICENSE [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/PATENTS [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/analysis.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/cmd/vet-lite/main.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/doc.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/internal/analysisflags/flags.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/facts.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/imports.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/internal/unitchecker/unitchecker.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/analysis/validate.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/astutil/imports.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/astutil/util.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/inspector/inspector.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/ast/inspector/typeof.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/cfg/builder.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/cfg/cfg.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/typeutil/callee.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/typeutil/imports.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/typeutil/map.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go [new file with mode: 0644]
src/cmd/vendor/golang.org/x/tools/go/types/typeutil/ui.go [new file with mode: 0644]
src/cmd/vendor/vendor.json

diff --git a/src/cmd/vendor/golang.org/x/tools/LICENSE b/src/cmd/vendor/golang.org/x/tools/LICENSE
new file mode 100644 (file)
index 0000000..6a66aea
--- /dev/null
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+   * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+   * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/cmd/vendor/golang.org/x/tools/PATENTS b/src/cmd/vendor/golang.org/x/tools/PATENTS
new file mode 100644 (file)
index 0000000..7330990
--- /dev/null
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go.  This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation.  If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/analysis.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/analysis.go
new file mode 100644 (file)
index 0000000..21baa02
--- /dev/null
@@ -0,0 +1,192 @@
+package analysis
+
+import (
+       "flag"
+       "fmt"
+       "go/ast"
+       "go/token"
+       "go/types"
+       "reflect"
+)
+
+// An Analyzer describes an analysis function and its options.
+type Analyzer struct {
+       // The Name of the analyzer must be a valid Go identifier
+       // as it may appear in command-line flags, URLs, and so on.
+       Name string
+
+       // Doc is the documentation for the analyzer.
+       // The part before the first "\n\n" is the title
+       // (no capital or period, max ~60 letters).
+       Doc string
+
+       // Flags defines any flags accepted by the analyzer.
+       // The manner in which these flags are exposed to the user
+       // depends on the driver which runs the analyzer.
+       Flags flag.FlagSet
+
+       // Run applies the analyzer to a package.
+       // It returns an error if the analyzer failed.
+       //
+       // On success, the Run function may return a result
+       // computed by the Analyzer; its type must match ResultType.
+       // The driver makes this result available as an input to
+       // another Analyzer that depends directly on this one (see
+       // Requires) when it analyzes the same package.
+       //
+       // To pass analysis results between packages (and thus
+       // potentially between address spaces), use Facts, which are
+       // serializable.
+       Run func(*Pass) (interface{}, error)
+
+       // RunDespiteErrors allows the driver to invoke
+       // the Run method of this analyzer even on a
+       // package that contains parse or type errors.
+       RunDespiteErrors bool
+
+       // Requires is a set of analyzers that must run successfully
+       // before this one on a given package. This analyzer may inspect
+       // the outputs produced by each analyzer in Requires.
+       // The graph over analyzers implied by Requires edges must be acyclic.
+       //
+       // Requires establishes a "horizontal" dependency between
+       // analysis passes (different analyzers, same package).
+       Requires []*Analyzer
+
+       // ResultType is the type of the optional result of the Run function.
+       ResultType reflect.Type
+
+       // FactTypes indicates that this analyzer imports and exports
+       // Facts of the specified concrete types.
+       // An analyzer that uses facts may assume that its import
+       // dependencies have been similarly analyzed before it runs.
+       // Facts must be pointers.
+       //
+       // FactTypes establishes a "vertical" dependency between
+       // analysis passes (same analyzer, different packages).
+       FactTypes []Fact
+}
+
+func (a *Analyzer) String() string { return a.Name }
+
+// A Pass provides information to the Run function that
+// applies a specific analyzer to a single Go package.
+//
+// It forms the interface between the analysis logic and the driver
+// program, and has both input and an output components.
+//
+// As in a compiler, one pass may depend on the result computed by another.
+//
+// The Run function should not call any of the Pass functions concurrently.
+type Pass struct {
+       Analyzer *Analyzer // the identity of the current analyzer
+
+       // syntax and type information
+       Fset       *token.FileSet // file position information
+       Files      []*ast.File    // the abstract syntax tree of each file
+       OtherFiles []string       // names of non-Go files of this package
+       Pkg        *types.Package // type information about the package
+       TypesInfo  *types.Info    // type information about the syntax trees
+
+       // Report reports a Diagnostic, a finding about a specific location
+       // in the analyzed source code such as a potential mistake.
+       // It may be called by the Run function.
+       Report func(Diagnostic)
+
+       // ResultOf provides the inputs to this analysis pass, which are
+       // the corresponding results of its prerequisite analyzers.
+       // The map keys are the elements of Analysis.Required,
+       // and the type of each corresponding value is the required
+       // analysis's ResultType.
+       ResultOf map[*Analyzer]interface{}
+
+       // -- facts --
+
+       // ImportObjectFact retrieves a fact associated with obj.
+       // Given a value ptr of type *T, where *T satisfies Fact,
+       // ImportObjectFact copies the value to *ptr.
+       //
+       // ImportObjectFact panics if called after the pass is complete.
+       // ImportObjectFact is not concurrency-safe.
+       ImportObjectFact func(obj types.Object, fact Fact) bool
+
+       // ImportPackageFact retrieves a fact associated with package pkg,
+       // which must be this package or one of its dependencies.
+       // See comments for ImportObjectFact.
+       ImportPackageFact func(pkg *types.Package, fact Fact) bool
+
+       // ExportObjectFact associates a fact of type *T with the obj,
+       // replacing any previous fact of that type.
+       //
+       // ExportObjectFact panics if it is called after the pass is
+       // complete, or if obj does not belong to the package being analyzed.
+       // ExportObjectFact is not concurrency-safe.
+       ExportObjectFact func(obj types.Object, fact Fact)
+
+       // ExportPackageFact associates a fact with the current package.
+       // See comments for ExportObjectFact.
+       ExportPackageFact func(fact Fact)
+
+       /* Further fields may be added in future. */
+       // For example, suggested or applied refactorings.
+}
+
+// Reportf is a helper function that reports a Diagnostic using the
+// specified position and formatted error message.
+func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) {
+       msg := fmt.Sprintf(format, args...)
+       pass.Report(Diagnostic{Pos: pos, Message: msg})
+}
+
+func (pass *Pass) String() string {
+       return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path())
+}
+
+// A Fact is an intermediate fact produced during analysis.
+//
+// Each fact is associated with a named declaration (a types.Object) or
+// with a package as a whole. A single object or package may have
+// multiple associated facts, but only one of any particular fact type.
+//
+// A Fact represents a predicate such as "never returns", but does not
+// represent the subject of the predicate such as "function F" or "package P".
+//
+// Facts may be produced in one analysis pass and consumed by another
+// analysis pass even if these are in different address spaces.
+// If package P imports Q, all facts about Q produced during
+// analysis of that package will be available during later analysis of P.
+// Facts are analogous to type export data in a build system:
+// just as export data enables separate compilation of several passes,
+// facts enable "separate analysis".
+//
+// Each pass (a, p) starts with the set of facts produced by the
+// same analyzer a applied to the packages directly imported by p.
+// The analysis may add facts to the set, and they may be exported in turn.
+// An analysis's Run function may retrieve facts by calling
+// Pass.Import{Object,Package}Fact and update them using
+// Pass.Export{Object,Package}Fact.
+//
+// A fact is logically private to its Analysis. To pass values
+// between different analyzers, use the results mechanism;
+// see Analyzer.Requires, Analyzer.ResultType, and Pass.ResultOf.
+//
+// A Fact type must be a pointer.
+// Facts are encoded and decoded using encoding/gob.
+// A Fact may implement the GobEncoder/GobDecoder interfaces
+// to customize its encoding. Fact encoding should not fail.
+//
+// A Fact should not be modified once exported.
+type Fact interface {
+       AFact() // dummy method to avoid type errors
+}
+
+// A Diagnostic is a message associated with a source location.
+//
+// An Analyzer may return a variety of diagnostics; the optional Category,
+// which should be a constant, may be used to classify them.
+// It is primarily intended to make it easy to look up documentation.
+type Diagnostic struct {
+       Pos      token.Pos
+       Category string // optional
+       Message  string
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/cmd/vet-lite/main.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/cmd/vet-lite/main.go
new file mode 100644 (file)
index 0000000..ae66a7d
--- /dev/null
@@ -0,0 +1,83 @@
+// The vet-lite command is a driver for static checkers conforming to
+// the golang.org/x/tools/go/analysis API. It must be run by go vet:
+//
+//   $ GOVETTOOL=$(which vet-lite) go vet
+//
+// For a checker also capable of running standalone, use multichecker.
+package main
+
+import (
+       "flag"
+       "log"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/internal/analysisflags"
+       "golang.org/x/tools/go/analysis/internal/unitchecker"
+
+       "golang.org/x/tools/go/analysis/passes/asmdecl"
+       "golang.org/x/tools/go/analysis/passes/assign"
+       "golang.org/x/tools/go/analysis/passes/atomic"
+       "golang.org/x/tools/go/analysis/passes/bools"
+       "golang.org/x/tools/go/analysis/passes/buildtag"
+       "golang.org/x/tools/go/analysis/passes/cgocall"
+       "golang.org/x/tools/go/analysis/passes/composite"
+       "golang.org/x/tools/go/analysis/passes/copylock"
+       "golang.org/x/tools/go/analysis/passes/httpresponse"
+       "golang.org/x/tools/go/analysis/passes/loopclosure"
+       "golang.org/x/tools/go/analysis/passes/lostcancel"
+       "golang.org/x/tools/go/analysis/passes/nilfunc"
+       "golang.org/x/tools/go/analysis/passes/pkgfact"
+       "golang.org/x/tools/go/analysis/passes/printf"
+       "golang.org/x/tools/go/analysis/passes/shift"
+       "golang.org/x/tools/go/analysis/passes/stdmethods"
+       "golang.org/x/tools/go/analysis/passes/structtag"
+       "golang.org/x/tools/go/analysis/passes/tests"
+       "golang.org/x/tools/go/analysis/passes/unreachable"
+       "golang.org/x/tools/go/analysis/passes/unsafeptr"
+       "golang.org/x/tools/go/analysis/passes/unusedresult"
+)
+
+var analyzers = []*analysis.Analyzer{
+       // For now, just the traditional vet suite:
+       asmdecl.Analyzer,
+       assign.Analyzer,
+       atomic.Analyzer,
+       bools.Analyzer,
+       buildtag.Analyzer,
+       cgocall.Analyzer,
+       composite.Analyzer,
+       copylock.Analyzer,
+       httpresponse.Analyzer,
+       loopclosure.Analyzer,
+       lostcancel.Analyzer,
+       nilfunc.Analyzer,
+       pkgfact.Analyzer,
+       printf.Analyzer,
+       // shadow.Analyzer, // experimental; not enabled by default
+       shift.Analyzer,
+       stdmethods.Analyzer,
+       structtag.Analyzer,
+       tests.Analyzer,
+       unreachable.Analyzer,
+       unsafeptr.Analyzer,
+       unusedresult.Analyzer,
+}
+
+func main() {
+       log.SetFlags(0)
+       log.SetPrefix("vet: ")
+
+       if err := analysis.Validate(analyzers); err != nil {
+               log.Fatal(err)
+       }
+
+       analyzers = analysisflags.Parse(analyzers, true)
+
+       args := flag.Args()
+       if len(args) != 1 || !strings.HasSuffix(args[0], ".cfg") {
+               log.Fatalf("invalid command: want .cfg file (this reduced version of vet is intended to be run only by the 'go vet' command)")
+       }
+
+       unitchecker.Main(args[0], analyzers)
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/doc.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/doc.go
new file mode 100644 (file)
index 0000000..4223ab8
--- /dev/null
@@ -0,0 +1,328 @@
+/*
+
+The analysis package defines the interface between a modular static
+analysis and an analysis driver program.
+
+
+THIS INTERFACE IS EXPERIMENTAL AND SUBJECT TO CHANGE.
+We aim to finalize it by November 2018.
+
+Background
+
+A static analysis is a function that inspects a package of Go code and
+reports a set of diagnostics (typically mistakes in the code), and
+perhaps produces other results as well, such as suggested refactorings
+or other facts. An analysis that reports mistakes is informally called a
+"checker". For example, the printf checker reports mistakes in
+fmt.Printf format strings.
+
+A "modular" analysis is one that inspects one package at a time but can
+save information from a lower-level package and use it when inspecting a
+higher-level package, analogous to separate compilation in a toolchain.
+The printf checker is modular: when it discovers that a function such as
+log.Fatalf delegates to fmt.Printf, it records this fact, and checks
+calls to that function too, including calls made from another package.
+
+By implementing a common interface, checkers from a variety of sources
+can be easily selected, incorporated, and reused in a wide range of
+driver programs including command-line tools (such as vet), text editors and
+IDEs, build and test systems (such as go build, Bazel, or Buck), test
+frameworks, code review tools, code-base indexers (such as SourceGraph),
+documentation viewers (such as godoc), batch pipelines for large code
+bases, and so on.
+
+
+Analyzer
+
+The primary type in the API is Analyzer. An Analyzer statically
+describes an analysis function: its name, documentation, flags,
+relationship to other analyzers, and of course, its logic.
+
+To define an analysis, a user declares a (logically constant) variable
+of type Analyzer. Here is a typical example from one of the analyzers in
+the go/analysis/passes/ subdirectory:
+
+       package unusedresult
+
+       var Analyzer = &analysis.Analyzer{
+               Name:   "unusedresult",
+               Doc:    "check for unused results of calls to some functions",
+               Run:    run,
+               ...
+       }
+
+       func run(pass *analysis.Pass) (interface{}, error) {
+               ...
+       }
+
+
+An analysis driver is a program such as vet that runs a set of
+analyses and prints the diagnostics that they report.
+The driver program must import the list of Analyzers it needs.
+Typically each Analyzer resides in a separate package.
+To add a new Analyzer to an existing driver, add another item to the list:
+
+       import ( "unusedresult"; "nilness"; "printf" )
+
+       var analyses = []*analysis.Analyzer{
+               unusedresult.Analyzer,
+               nilness.Analyzer,
+               printf.Analyzer,
+       }
+
+A driver may use the name, flags, and documentation to provide on-line
+help that describes the analyses its performs.
+The vet command, shown below, is an example of a driver that runs
+multiple analyzers. It is based on the multichecker package
+(see the "Standalone commands" section for details).
+
+       $ go build golang.org/x/tools/cmd/vet
+       $ ./vet help
+       vet is a tool for static analysis of Go programs.
+
+       Usage: vet [-flag] [package]
+
+       Registered analyzers:
+
+           asmdecl      report mismatches between assembly files and Go declarations
+           assign       check for useless assignments
+           atomic       check for common mistakes using the sync/atomic package
+           ...
+           unusedresult check for unused results of calls to some functions
+
+       $ ./vet help unusedresult
+       unusedresult: check for unused results of calls to some functions
+
+       Analyzer flags:
+
+         -unusedresult.funcs value
+               comma-separated list of functions whose results must be used (default Error,String)
+         -unusedresult.stringmethods value
+               comma-separated list of names of methods of type func() string whose results must be used
+
+       Some functions like fmt.Errorf return a result and have no side effects,
+       so it is always a mistake to discard the result. This analyzer reports
+       calls to certain functions in which the result of the call is ignored.
+
+       The set of functions may be controlled using flags.
+
+The Analyzer type has more fields besides those shown above:
+
+       type Analyzer struct {
+               Name                    string
+               Doc                     string
+               Flags                   flag.FlagSet
+               Run                     func(*Pass) (interface{}, error)
+               RunDespiteErrors        bool
+               ResultType              reflect.Type
+               Requires                []*Analyzer
+               FactTypes               []Fact
+       }
+
+The Flags field declares a set of named (global) flag variables that
+control analysis behavior. Unlike vet, analysis flags are not declared
+directly in the command line FlagSet; it is up to the driver to set the
+flag variables. A driver for a single analysis, a, might expose its flag
+f directly on the command line as -f, whereas a driver for multiple
+analyses might prefix the flag name by the analysis name (-a.f) to avoid
+ambiguity. An IDE might expose the flags through a graphical interface,
+and a batch pipeline might configure them from a config file.
+See the "findcall" analyzer for an example of flags in action.
+
+The RunDespiteErrors flag indicates whether the analysis is equipped to
+handle ill-typed code. If not, the driver will skip the analysis if
+there were parse or type errors.
+The optional ResultType field specifies the type of the result value
+computed by this analysis and made available to other analyses.
+The Requires field specifies a list of analyses upon which
+this one depends and whose results it may access, and it constrains the
+order in which a driver may run analyses.
+The FactTypes field is discussed in the section on Modularity.
+The analysis package provides a Validate function to perform basic
+sanity checks on an Analyzer, such as that its Requires graph is
+acyclic, its fact and result types are unique, and so on.
+
+Finally, the Run field contains a function to be called by the driver to
+execute the analysis on a single package. The driver passes it an
+instance of the Pass type.
+
+
+Pass
+
+A Pass describes a single unit of work: the application of a particular
+Analyzer to a particular package of Go code.
+The Pass provides information to the Analyzer's Run function about the
+package being analyzed, and provides operations to the Run function for
+reporting diagnostics and other information back to the driver.
+
+       type Pass struct {
+               Fset            *token.FileSet
+               Files           []*ast.File
+               OtherFiles      []string
+               Pkg             *types.Package
+               TypesInfo       *types.Info
+               ResultOf        map[*Analyzer]interface{}
+               Report          func(Diagnostic)
+               ...
+       }
+
+The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees,
+type information, and source positions for a single package of Go code.
+
+The OtherFiles field provides the names, but not the contents, of non-Go
+files such as assembly that are part of this package. See the "asmdecl"
+or "buildtags" analyzers for examples of loading non-Go files and report
+diagnostics against them.
+
+The ResultOf field provides the results computed by the analyzers
+required by this one, as expressed in its Analyzer.Requires field. The
+driver runs the required analyzers first and makes their results
+available in this map. Each Analyzer must return a value of the type
+described in its Analyzer.ResultType field.
+For example, the "ctrlflow" analyzer returns a *ctrlflow.CFGs, which
+provides a control-flow graph for each function in the package (see
+golang.org/x/tools/go/cfg); the "inspect" analyzer returns a value that
+enables other Analyzers to traverse the syntax trees of the package more
+efficiently; and the "buildssa" analyzer constructs an SSA-form
+intermediate representation.
+Each of these Analyzers extends the capabilities of later Analyzers
+without adding a dependency to the core API, so an analysis tool pays
+only for the extensions it needs.
+
+The Report function emits a diagnostic, a message associated with a
+source position. For most analyses, diagnostics are their primary
+result.
+For convenience, Pass provides a helper method, Reportf, to report a new
+diagnostic by formatting a string.
+Diagnostic is defined as:
+
+       type Diagnostic struct {
+               Pos      token.Pos
+               Category string // optional
+               Message  string
+       }
+
+The optional Category field is a short identifier that classifies the
+kind of message when an analysis produces several kinds of diagnostic.
+
+Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl
+and buildtag, inspect the raw text of Go source files or even non-Go
+files such as assembly. To report a diagnostic against a line of a
+raw text file, use the following sequence:
+
+       content, err := ioutil.ReadFile(filename)
+       if err != nil { ... }
+       tf := fset.AddFile(filename, -1, len(content))
+       tf.SetLinesForContent(content)
+       ...
+       pass.Reportf(tf.LineStart(line), "oops")
+
+
+Modular analysis with Facts
+
+To improve efficiency and scalability, large programs are routinely
+built using separate compilation: units of the program are compiled
+separately, and recompiled only when one of their dependencies changes;
+independent modules may be compiled in parallel. The same technique may
+be applied to static analyses, for the same benefits. Such analyses are
+described as "modular".
+
+A compiler’s type checker is an example of a modular static analysis.
+Many other checkers we would like to apply to Go programs can be
+understood as alternative or non-standard type systems. For example,
+vet's printf checker infers whether a function has the "printf wrapper"
+type, and it applies stricter checks to calls of such functions. In
+addition, it records which functions are printf wrappers for use by
+later analysis units to identify other printf wrappers by induction.
+A result such as “f is a printf wrapper” that is not interesting by
+itself but serves as a stepping stone to an interesting result (such as
+a diagnostic) is called a "fact".
+
+The analysis API allows an analysis to define new types of facts, to
+associate facts of these types with objects (named entities) declared
+within the current package, or with the package as a whole, and to query
+for an existing fact of a given type associated with an object or
+package.
+
+An Analyzer that uses facts must declare their types:
+
+       var Analyzer = &analysis.Analyzer{
+               Name:       "printf",
+               FactTypes: []reflect.Type{reflect.TypeOf(new(isWrapper))},
+               ...
+       }
+
+       type isWrapper struct{} // => *types.Func f “is a printf wrapper”
+
+A driver program ensures that facts for a pass’s dependencies are
+generated before analyzing the pass and are responsible for propagating
+facts between from one pass to another, possibly across address spaces.
+Consequently, Facts must be serializable. The API requires that drivers
+use the gob encoding, an efficient, robust, self-describing binary
+protocol. A fact type may implement the GobEncoder/GobDecoder interfaces
+if the default encoding is unsuitable. Facts should be stateless.
+
+The Pass type has functions to import and export facts,
+associated either with an object or with a package:
+
+       type Pass struct {
+               ...
+               ExportObjectFact func(types.Object, Fact)
+               ImportObjectFact func(types.Object, Fact) bool
+
+               ExportPackageFact func(fact Fact)
+               ImportPackageFact func(*types.Package, Fact) bool
+       }
+
+An Analyzer may only export facts associated with the current package or
+its objects, though it may import facts from any package or object that
+is an import dependency of the current package.
+
+Conceptually, ExportObjectFact(obj, fact) inserts fact into a hidden map keyed by
+the pair (obj, TypeOf(fact)), and the ImportObjectFact function
+retrieves the entry from this map and copies its value into the variable
+pointed to by fact. This scheme assumes that the concrete type of fact
+is a pointer; this assumption is checked by the Validate function.
+See the "printf" analyzer for an example of object facts in action.
+
+
+Testing an Analyzer
+
+The analysistest subpackage provides utilities for testing an Analyzer.
+In a few lines of code, it is possible to run an analyzer on a package
+of testdata files and check that it reported all the expected
+diagnostics and facts (and no more). Expectations are expressed using
+"// want ..." comments in the input code.
+
+
+Standalone commands
+
+Analyzers are provided in the form of packages that a driver program is
+expected to import. The vet command imports a set of several analyses,
+but users may wish to define their own analysis commands that perform
+additional checks. To simplify the task of creating an analysis command,
+either for a single analyzer or for a whole suite, we provide the
+singlechecker and multichecker subpackages.
+
+The singlechecker package provides the main function for a command that
+runs one analysis. By convention, each analyzer such as
+go/passes/findcall should be accompanied by a singlechecker-based
+command such as go/analysis/passes/findcall/cmd/findcall, defined in its
+entirety as:
+
+       package main
+
+       import (
+               "golang.org/x/tools/go/analysis/passes/findcall"
+               "golang.org/x/tools/go/analysis/singlechecker"
+       )
+
+       func main() { singlechecker.Main(findcall.Analyzer) }
+
+A tool that provides multiple analyzers can use multichecker in a
+similar way, giving it the list of Analyzers.
+
+
+
+*/
+package analysis
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/analysisflags/flags.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/analysisflags/flags.go
new file mode 100644 (file)
index 0000000..d6c13f2
--- /dev/null
@@ -0,0 +1,223 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package analysisflags defines helpers for processing flags of
+// analysis driver tools.
+package analysisflags
+
+import (
+       "crypto/sha256"
+       "encoding/json"
+       "flag"
+       "fmt"
+       "io"
+       "log"
+       "os"
+       "strconv"
+
+       "golang.org/x/tools/go/analysis"
+)
+
+// Parse creates a flag for each of the analyzer's flags,
+// including (in multi mode) an --analysis.enable flag,
+// parses the flags, then filters and returns the list of
+// analyzers enabled by flags.
+func Parse(analyzers []*analysis.Analyzer, multi bool) []*analysis.Analyzer {
+       // Connect each analysis flag to the command line as -analysis.flag.
+       type analysisFlag struct {
+               Name  string
+               Bool  bool
+               Usage string
+       }
+       var analysisFlags []analysisFlag
+
+       enabled := make(map[*analysis.Analyzer]*triState)
+       for _, a := range analyzers {
+               var prefix string
+
+               // Add -analysis.enable flag.
+               if multi {
+                       prefix = a.Name + "."
+
+                       enable := new(triState)
+                       enableName := prefix + "enable"
+                       enableUsage := "enable " + a.Name + " analysis"
+                       flag.Var(enable, enableName, enableUsage)
+                       enabled[a] = enable
+                       analysisFlags = append(analysisFlags, analysisFlag{enableName, true, enableUsage})
+               }
+
+               a.Flags.VisitAll(func(f *flag.Flag) {
+                       if !multi && flag.Lookup(f.Name) != nil {
+                               log.Printf("%s flag -%s would conflict with driver; skipping", a.Name, f.Name)
+                               return
+                       }
+
+                       name := prefix + f.Name
+                       flag.Var(f.Value, name, f.Usage)
+
+                       var isBool bool
+                       if b, ok := f.Value.(interface{ IsBoolFlag() bool }); ok {
+                               isBool = b.IsBoolFlag()
+                       }
+                       analysisFlags = append(analysisFlags, analysisFlag{name, isBool, f.Usage})
+               })
+       }
+
+       // standard flags: -flags, -V.
+       printflags := flag.Bool("flags", false, "print analyzer flags in JSON")
+       addVersionFlag()
+
+       flag.Parse() // (ExitOnError)
+
+       // -flags: print flags so that go vet knows which ones are legitimate.
+       if *printflags {
+               data, err := json.MarshalIndent(analysisFlags, "", "\t")
+               if err != nil {
+                       log.Fatal(err)
+               }
+               os.Stdout.Write(data)
+               os.Exit(0)
+       }
+
+       // If any --foo.enable flag is true,  run only those analyzers. Otherwise,
+       // if any --foo.enable flag is false, run all but those analyzers.
+       if multi {
+               var hasTrue, hasFalse bool
+               for _, ts := range enabled {
+                       switch *ts {
+                       case setTrue:
+                               hasTrue = true
+                       case setFalse:
+                               hasFalse = true
+                       }
+               }
+
+               var keep []*analysis.Analyzer
+               if hasTrue {
+                       for _, a := range analyzers {
+                               if *enabled[a] == setTrue {
+                                       keep = append(keep, a)
+                               }
+                       }
+                       analyzers = keep
+               } else if hasFalse {
+                       for _, a := range analyzers {
+                               if *enabled[a] != setFalse {
+                                       keep = append(keep, a)
+                               }
+                       }
+                       analyzers = keep
+               }
+       }
+
+       return analyzers
+}
+
+// addVersionFlag registers a -V flag that, if set,
+// prints the executable version and exits 0.
+//
+// It is a variable not a function to permit easy
+// overriding in the copy vendored in $GOROOT/src/cmd/vet:
+//
+// func init() { addVersionFlag = objabi.AddVersionFlag }
+var addVersionFlag = func() {
+       flag.Var(versionFlag{}, "V", "print version and exit")
+}
+
+// versionFlag minimally complies with the -V protocol required by "go vet".
+type versionFlag struct{}
+
+func (versionFlag) IsBoolFlag() bool { return true }
+func (versionFlag) Get() interface{} { return nil }
+func (versionFlag) String() string   { return "" }
+func (versionFlag) Set(s string) error {
+       if s != "full" {
+               log.Fatalf("unsupported flag value: -V=%s", s)
+       }
+
+       // This replicates the miminal subset of
+       // cmd/internal/objabi.AddVersionFlag, which is private to the
+       // go tool yet forms part of our command-line interface.
+       // TODO(adonovan): clarify the contract.
+
+       // Print the tool version so the build system can track changes.
+       // Formats:
+       //   $progname version devel ... buildID=...
+       //   $progname version go1.9.1
+       progname := os.Args[0]
+       f, err := os.Open(progname)
+       if err != nil {
+               log.Fatal(err)
+       }
+       h := sha256.New()
+       if _, err := io.Copy(h, f); err != nil {
+               log.Fatal(err)
+       }
+       f.Close()
+       fmt.Printf("%s version devel comments-go-here buildID=%02x\n",
+               progname, string(h.Sum(nil)))
+       os.Exit(0)
+       return nil
+}
+
+// A triState is a boolean that knows whether
+// it has been set to either true or false.
+// It is used to identify whether a flag appears;
+// the standard boolean flag cannot
+// distinguish missing from unset.
+// It also satisfies flag.Value.
+type triState int
+
+const (
+       unset triState = iota
+       setTrue
+       setFalse
+)
+
+func triStateFlag(name string, value triState, usage string) *triState {
+       flag.Var(&value, name, usage)
+       return &value
+}
+
+// triState implements flag.Value, flag.Getter, and flag.boolFlag.
+// They work like boolean flags: we can say vet -printf as well as vet -printf=true
+func (ts *triState) Get() interface{} {
+       return *ts == setTrue
+}
+
+func (ts triState) isTrue() bool {
+       return ts == setTrue
+}
+
+func (ts *triState) Set(value string) error {
+       b, err := strconv.ParseBool(value)
+       if err != nil {
+               // This error message looks poor but package "flag" adds
+               // "invalid boolean value %q for -foo.enable: %s"
+               return fmt.Errorf("want true or false")
+       }
+       if b {
+               *ts = setTrue
+       } else {
+               *ts = setFalse
+       }
+       return nil
+}
+
+func (ts *triState) String() string {
+       switch *ts {
+       case unset:
+               return "true"
+       case setTrue:
+               return "true"
+       case setFalse:
+               return "false"
+       }
+       panic("not reached")
+}
+
+func (ts triState) IsBoolFlag() bool {
+       return true
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/facts.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/facts.go
new file mode 100644 (file)
index 0000000..468f148
--- /dev/null
@@ -0,0 +1,299 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package facts defines a serializable set of analysis.Fact.
+//
+// It provides a partial implementation of the Fact-related parts of the
+// analysis.Pass interface for use in analysis drivers such as "go vet"
+// and other build systems.
+//
+// The serial format is unspecified and may change, so the same version
+// of this package must be used for reading and writing serialized facts.
+//
+// The handling of facts in the analysis system parallels the handling
+// of type information in the compiler: during compilation of package P,
+// the compiler emits an export data file that describes the type of
+// every object (named thing) defined in package P, plus every object
+// indirectly reachable from one of those objects. Thus the downstream
+// compiler of package Q need only load one export data file per direct
+// import of Q, and it will learn everything about the API of package P
+// and everything it needs to know about the API of P's dependencies.
+//
+// Similarly, analysis of package P emits a fact set containing facts
+// about all objects exported from P, plus additional facts about only
+// those objects of P's dependencies that are reachable from the API of
+// package P; the downstream analysis of Q need only load one fact set
+// per direct import of Q.
+//
+// The notion of "exportedness" that matters here is that of the
+// compiler. According to the language spec, a method pkg.T.f is
+// unexported simply because its name starts with lowercase. But the
+// compiler must nonethless export f so that downstream compilations can
+// accurately ascertain whether pkg.T implements an interface pkg.I
+// defined as interface{f()}. Exported thus means "described in export
+// data".
+//
+package facts
+
+import (
+       "bytes"
+       "encoding/gob"
+       "fmt"
+       "go/types"
+       "io/ioutil"
+       "log"
+       "reflect"
+       "sort"
+       "sync"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/types/objectpath"
+)
+
+const debug = false
+
+// A Set is a set of analysis.Facts.
+//
+// Decode creates a Set of facts by reading from the imports of a given
+// package, and Encode writes out the set. Between these operation,
+// the Import and Export methods will query and update the set.
+//
+// All of Set's methods except String are safe to call concurrently.
+type Set struct {
+       pkg *types.Package
+       mu  sync.Mutex
+       m   map[key]analysis.Fact
+}
+
+type key struct {
+       pkg *types.Package
+       obj types.Object // (object facts only)
+       t   reflect.Type
+}
+
+// ImportObjectFact implements analysis.Pass.ImportObjectFact.
+func (s *Set) ImportObjectFact(obj types.Object, ptr analysis.Fact) bool {
+       if obj == nil {
+               panic("nil object")
+       }
+       key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(ptr)}
+       s.mu.Lock()
+       defer s.mu.Unlock()
+       if v, ok := s.m[key]; ok {
+               reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+               return true
+       }
+       return false
+}
+
+// ExportObjectFact implements analysis.Pass.ExportObjectFact.
+func (s *Set) ExportObjectFact(obj types.Object, fact analysis.Fact) {
+       if obj.Pkg() != s.pkg {
+               log.Panicf("in package %s: ExportObjectFact(%s, %T): can't set fact on object belonging another package",
+                       s.pkg, obj, fact)
+       }
+       key := key{pkg: obj.Pkg(), obj: obj, t: reflect.TypeOf(fact)}
+       s.mu.Lock()
+       s.m[key] = fact // clobber any existing entry
+       s.mu.Unlock()
+}
+
+// ImportPackageFact implements analysis.Pass.ImportPackageFact.
+func (s *Set) ImportPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
+       if pkg == nil {
+               panic("nil package")
+       }
+       key := key{pkg: pkg, t: reflect.TypeOf(ptr)}
+       s.mu.Lock()
+       defer s.mu.Unlock()
+       if v, ok := s.m[key]; ok {
+               reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+               return true
+       }
+       return false
+}
+
+// ExportPackageFact implements analysis.Pass.ExportPackageFact.
+func (s *Set) ExportPackageFact(fact analysis.Fact) {
+       key := key{pkg: s.pkg, t: reflect.TypeOf(fact)}
+       s.mu.Lock()
+       s.m[key] = fact // clobber any existing entry
+       s.mu.Unlock()
+}
+
+// gobFact is the Gob declaration of a serialized fact.
+type gobFact struct {
+       PkgPath string          // path of package
+       Object  objectpath.Path // optional path of object relative to package itself
+       Fact    analysis.Fact   // type and value of user-defined Fact
+}
+
+// Decode decodes all the facts relevant to the analysis of package pkg.
+// The read function reads serialized fact data from an external source
+// for one of of pkg's direct imports. The empty file is a valid
+// encoding of an empty fact set.
+//
+// It is the caller's responsibility to call gob.Register on all
+// necessary fact types.
+func Decode(pkg *types.Package, read func(packagePath string) ([]byte, error)) (*Set, error) {
+       // Compute the import map for this package.
+       // See the package doc comment.
+       packages := importMap(pkg.Imports())
+
+       // Read facts from imported packages.
+       // Facts may describe indirectly imported packages, or their objects.
+       m := make(map[key]analysis.Fact) // one big bucket
+       for _, imp := range pkg.Imports() {
+               logf := func(format string, args ...interface{}) {
+                       if debug {
+                               prefix := fmt.Sprintf("in %s, importing %s: ",
+                                       pkg.Path(), imp.Path())
+                               log.Print(prefix, fmt.Sprintf(format, args...))
+                       }
+               }
+
+               // Read the gob-encoded facts.
+               data, err := read(imp.Path())
+               if err != nil {
+                       return nil, fmt.Errorf("in %s, can't import facts for package %q: %v",
+                               pkg.Path(), imp.Path(), err)
+               }
+               if len(data) == 0 {
+                       continue // no facts
+               }
+               var gobFacts []gobFact
+               if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&gobFacts); err != nil {
+                       return nil, fmt.Errorf("decoding facts for %q: %v", imp.Path(), err)
+               }
+               if debug {
+                       logf("decoded %d facts: %v", len(gobFacts), gobFacts)
+               }
+
+               // Parse each one into a key and a Fact.
+               for _, f := range gobFacts {
+                       factPkg := packages[f.PkgPath]
+                       if factPkg == nil {
+                               // Fact relates to a dependency that was
+                               // unused in this translation unit. Skip.
+                               logf("no package %q; discarding %v", f.PkgPath, f.Fact)
+                               continue
+                       }
+                       key := key{pkg: factPkg, t: reflect.TypeOf(f.Fact)}
+                       if f.Object != "" {
+                               // object fact
+                               obj, err := objectpath.Object(factPkg, f.Object)
+                               if err != nil {
+                                       // (most likely due to unexported object)
+                                       // TODO(adonovan): audit for other possibilities.
+                                       logf("no object for path: %v; discarding %s", err, f.Fact)
+                                       continue
+                               }
+                               key.obj = obj
+                               logf("read %T fact %s for %v", f.Fact, f.Fact, key.obj)
+                       } else {
+                               // package fact
+                               logf("read %T fact %s for %v", f.Fact, f.Fact, factPkg)
+                       }
+                       m[key] = f.Fact
+               }
+       }
+
+       return &Set{pkg: pkg, m: m}, nil
+}
+
+// Encode encodes a set of facts to a memory buffer.
+//
+// It may fail if one of the Facts could not be gob-encoded, but this is
+// a sign of a bug in an Analyzer.
+func (s *Set) Encode() []byte {
+
+       // TODO(adonovan): opt: use a more efficient encoding
+       // that avoids repeating PkgPath for each fact.
+
+       // Gather all facts, including those from imported packages.
+       var gobFacts []gobFact
+
+       s.mu.Lock()
+       for k, fact := range s.m {
+               if debug {
+                       log.Printf("%v => %s\n", k, fact)
+               }
+               var object objectpath.Path
+               if k.obj != nil {
+                       path, err := objectpath.For(k.obj)
+                       if err != nil {
+                               if debug {
+                                       log.Printf("discarding fact %s about %s\n", fact, k.obj)
+                               }
+                               continue // object not accessible from package API; discard fact
+                       }
+                       object = path
+               }
+               gobFacts = append(gobFacts, gobFact{
+                       PkgPath: k.pkg.Path(),
+                       Object:  object,
+                       Fact:    fact,
+               })
+       }
+       s.mu.Unlock()
+
+       // Sort facts by (package, object, type) for determinism.
+       sort.Slice(gobFacts, func(i, j int) bool {
+               x, y := gobFacts[i], gobFacts[j]
+               if x.PkgPath != y.PkgPath {
+                       return x.PkgPath < y.PkgPath
+               }
+               if x.Object != y.Object {
+                       return x.Object < y.Object
+               }
+               tx := reflect.TypeOf(x.Fact)
+               ty := reflect.TypeOf(y.Fact)
+               if tx != ty {
+                       return tx.String() < ty.String()
+               }
+               return false // equal
+       })
+
+       var buf bytes.Buffer
+       if len(gobFacts) > 0 {
+               if err := gob.NewEncoder(&buf).Encode(gobFacts); err != nil {
+                       // Fact encoding should never fail. Identify the culprit.
+                       for _, gf := range gobFacts {
+                               if err := gob.NewEncoder(ioutil.Discard).Encode(gf); err != nil {
+                                       fact := gf.Fact
+                                       pkgpath := reflect.TypeOf(fact).Elem().PkgPath()
+                                       log.Panicf("internal error: gob encoding of analysis fact %s failed: %v; please report a bug against fact %T in package %q",
+                                               fact, err, fact, pkgpath)
+                               }
+                       }
+               }
+       }
+
+       if debug {
+               log.Printf("package %q: encode %d facts, %d bytes\n",
+                       s.pkg.Path(), len(gobFacts), buf.Len())
+       }
+
+       return buf.Bytes()
+}
+
+// String is provided only for debugging, and must not be called
+// concurrent with any Import/Export method.
+func (s *Set) String() string {
+       var buf bytes.Buffer
+       buf.WriteString("{")
+       for k, f := range s.m {
+               if buf.Len() > 1 {
+                       buf.WriteString(", ")
+               }
+               if k.obj != nil {
+                       buf.WriteString(k.obj.String())
+               } else {
+                       buf.WriteString(k.pkg.Path())
+               }
+               fmt.Fprintf(&buf, ": %v", f)
+       }
+       buf.WriteString("}")
+       return buf.String()
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/imports.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/facts/imports.go
new file mode 100644 (file)
index 0000000..34740f4
--- /dev/null
@@ -0,0 +1,88 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package facts
+
+import "go/types"
+
+// importMap computes the import map for a package by traversing the
+// entire exported API each of its imports.
+//
+// This is a workaround for the fact that we cannot access the map used
+// internally by the types.Importer returned by go/importer. The entries
+// in this map are the packages and objects that may be relevant to the
+// current analysis unit.
+//
+// Packages in the map that are only indirectly imported may be
+// incomplete (!pkg.Complete()).
+//
+func importMap(imports []*types.Package) map[string]*types.Package {
+       objects := make(map[types.Object]bool)
+       packages := make(map[string]*types.Package)
+
+       var addObj func(obj types.Object) bool
+       var addType func(T types.Type)
+
+       addObj = func(obj types.Object) bool {
+               if !objects[obj] {
+                       objects[obj] = true
+                       addType(obj.Type())
+                       if pkg := obj.Pkg(); pkg != nil {
+                               packages[pkg.Path()] = pkg
+                       }
+                       return true
+               }
+               return false
+       }
+
+       addType = func(T types.Type) {
+               switch T := T.(type) {
+               case *types.Basic:
+                       // nop
+               case *types.Named:
+                       if addObj(T.Obj()) {
+                               for i := 0; i < T.NumMethods(); i++ {
+                                       addObj(T.Method(i))
+                               }
+                       }
+               case *types.Pointer:
+                       addType(T.Elem())
+               case *types.Slice:
+                       addType(T.Elem())
+               case *types.Array:
+                       addType(T.Elem())
+               case *types.Chan:
+                       addType(T.Elem())
+               case *types.Map:
+                       addType(T.Key())
+                       addType(T.Elem())
+               case *types.Signature:
+                       addType(T.Params())
+                       addType(T.Results())
+               case *types.Struct:
+                       for i := 0; i < T.NumFields(); i++ {
+                               addObj(T.Field(i))
+                       }
+               case *types.Tuple:
+                       for i := 0; i < T.Len(); i++ {
+                               addObj(T.At(i))
+                       }
+               case *types.Interface:
+                       for i := 0; i < T.NumMethods(); i++ {
+                               addObj(T.Method(i))
+                       }
+               }
+       }
+
+       for _, imp := range imports {
+               packages[imp.Path()] = imp
+
+               scope := imp.Scope()
+               for _, name := range scope.Names() {
+                       addObj(scope.Lookup(name))
+               }
+       }
+
+       return packages
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/unitchecker/unitchecker.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/internal/unitchecker/unitchecker.go
new file mode 100644 (file)
index 0000000..b67c943
--- /dev/null
@@ -0,0 +1,306 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The unitchecker package defines the main function for an analysis
+// driver that analyzes a single compilation unit during a build.
+// It is invoked by a build system such as "go vet":
+//
+//   $ GOVETTOOL=$(which vet) go vet
+//
+// It supports the following command-line protocol:
+//
+//      -V=full         describe executable               (to the build tool)
+//      -flags          describe flags                    (to the build tool)
+//      foo.cfg         description of compilation unit (from the build tool)
+//
+// This package does not depend on go/packages.
+// If you need a standalone tool, use multichecker,
+// which supports this mode but can also load packages
+// from source using go/packages.
+package unitchecker
+
+// TODO(adonovan):
+// - with gccgo, go build does not build standard library,
+//   so we will not get to analyze it. Yet we must in order
+//   to create base facts for, say, the fmt package for the
+//   printf checker.
+// - support JSON output, factored with multichecker.
+
+import (
+       "encoding/gob"
+       "encoding/json"
+       "fmt"
+       "go/ast"
+       "go/build"
+       "go/importer"
+       "go/parser"
+       "go/token"
+       "go/types"
+       "io"
+       "io/ioutil"
+       "log"
+       "os"
+       "sort"
+       "strings"
+       "sync"
+       "time"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/internal/facts"
+)
+
+// A Config describes a compilation unit to be analyzed.
+// It is provided to the tool in a JSON-encoded file
+// whose name ends with ".cfg".
+type Config struct {
+       Compiler                  string
+       Dir                       string
+       ImportPath                string
+       GoFiles                   []string
+       OtherFiles                []string // TODO(adonovan): make go vet populate this (github.com/golang/go/issues/27665)
+       ImportMap                 map[string]string
+       PackageFile               map[string]string
+       Standard                  map[string]bool
+       PackageVetx               map[string]string
+       VetxOnly                  bool
+       VetxOutput                string
+       SucceedOnTypecheckFailure bool
+}
+
+// Main reads the *.cfg file, runs the analysis,
+// and calls os.Exit with an appropriate error code.
+func Main(configFile string, analyzers []*analysis.Analyzer) {
+       cfg, err := readConfig(configFile)
+       if err != nil {
+               log.Fatal(err)
+       }
+
+       fset := token.NewFileSet()
+       diags, err := run(fset, cfg, analyzers)
+       if err != nil {
+               log.Fatal(err)
+       }
+
+       if len(diags) > 0 {
+               for _, diag := range diags {
+                       fmt.Fprintf(os.Stderr, "%s: %s\n", fset.Position(diag.Pos), diag.Message)
+               }
+               os.Exit(1)
+       }
+
+       os.Exit(0)
+}
+
+func readConfig(filename string) (*Config, error) {
+       data, err := ioutil.ReadFile(filename)
+       if err != nil {
+               return nil, err
+       }
+       cfg := new(Config)
+       if err := json.Unmarshal(data, cfg); err != nil {
+               return nil, fmt.Errorf("cannot decode JSON config file %s: %v", filename, err)
+       }
+       if len(cfg.GoFiles) == 0 {
+               // The go command disallows packages with no files.
+               // The only exception is unsafe, but the go command
+               // doesn't call vet on it.
+               return nil, fmt.Errorf("package has no files: %s", cfg.ImportPath)
+       }
+       return cfg, nil
+}
+
+func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]analysis.Diagnostic, error) {
+       // Load, parse, typecheck.
+       var files []*ast.File
+       for _, name := range cfg.GoFiles {
+               f, err := parser.ParseFile(fset, name, nil, parser.ParseComments)
+               if err != nil {
+                       if cfg.SucceedOnTypecheckFailure {
+                               // Silently succeed; let the compiler
+                               // report parse errors.
+                               err = nil
+                       }
+                       return nil, err
+               }
+               files = append(files, f)
+       }
+       compilerImporter := importer.For(cfg.Compiler, func(path string) (io.ReadCloser, error) {
+               // path is a resolved package path, not an import path.
+               file, ok := cfg.PackageFile[path]
+               if !ok {
+                       if cfg.Compiler == "gccgo" && cfg.Standard[path] {
+                               return nil, nil // fall back to default gccgo lookup
+                       }
+                       return nil, fmt.Errorf("no package file for %q", path)
+               }
+               return os.Open(file)
+       })
+       importer := importerFunc(func(importPath string) (*types.Package, error) {
+               path, ok := cfg.ImportMap[importPath] // resolve vendoring, etc
+               if !ok {
+                       return nil, fmt.Errorf("can't resolve import %q", path)
+               }
+               return compilerImporter.Import(path)
+       })
+       tc := &types.Config{
+               Importer: importer,
+               Sizes:    types.SizesFor("gc", build.Default.GOARCH), // assume gccgo ≡ gc?
+       }
+       info := &types.Info{
+               Types:      make(map[ast.Expr]types.TypeAndValue),
+               Defs:       make(map[*ast.Ident]types.Object),
+               Uses:       make(map[*ast.Ident]types.Object),
+               Implicits:  make(map[ast.Node]types.Object),
+               Scopes:     make(map[ast.Node]*types.Scope),
+               Selections: make(map[*ast.SelectorExpr]*types.Selection),
+       }
+       pkg, err := tc.Check(cfg.ImportPath, fset, files, info)
+       if err != nil {
+               if cfg.SucceedOnTypecheckFailure {
+                       // Silently succeed; let the compiler
+                       // report type errors.
+                       err = nil
+               }
+               return nil, err
+       }
+
+       // Register fact types with gob.
+       // In VetxOnly mode, analyzers are only for their facts,
+       // so we can skip any analysis that neither produces facts
+       // nor depends on any analysis that produces facts.
+       // Also build a map to hold working state and result.
+       type action struct {
+               once        sync.Once
+               result      interface{}
+               err         error
+               usesFacts   bool // (transitively uses)
+               diagnostics []analysis.Diagnostic
+       }
+       actions := make(map[*analysis.Analyzer]*action)
+       var registerFacts func(a *analysis.Analyzer) bool
+       registerFacts = func(a *analysis.Analyzer) bool {
+               act, ok := actions[a]
+               if !ok {
+                       act = new(action)
+                       var usesFacts bool
+                       for _, f := range a.FactTypes {
+                               usesFacts = true
+                               gob.Register(f)
+                       }
+                       for _, req := range a.Requires {
+                               if registerFacts(req) {
+                                       usesFacts = true
+                               }
+                       }
+                       act.usesFacts = usesFacts
+                       actions[a] = act
+               }
+               return act.usesFacts
+       }
+       var filtered []*analysis.Analyzer
+       for _, a := range analyzers {
+               if registerFacts(a) || !cfg.VetxOnly {
+                       filtered = append(filtered, a)
+               }
+       }
+       analyzers = filtered
+
+       // Read facts from imported packages.
+       read := func(path string) ([]byte, error) {
+               if vetx, ok := cfg.PackageVetx[path]; ok {
+                       return ioutil.ReadFile(vetx)
+               }
+               return nil, nil // no .vetx file, no facts
+       }
+       facts, err := facts.Decode(pkg, read)
+       if err != nil {
+               return nil, err
+       }
+
+       // In parallel, execute the DAG of analyzers.
+       var exec func(a *analysis.Analyzer) *action
+       var execAll func(analyzers []*analysis.Analyzer)
+       exec = func(a *analysis.Analyzer) *action {
+               act := actions[a]
+               act.once.Do(func() {
+                       execAll(a.Requires) // prefetch dependencies in parallel
+
+                       // The inputs to this analysis are the
+                       // results of its prerequisites.
+                       inputs := make(map[*analysis.Analyzer]interface{})
+                       var failed []string
+                       for _, req := range a.Requires {
+                               reqact := exec(req)
+                               if reqact.err != nil {
+                                       failed = append(failed, req.String())
+                                       continue
+                               }
+                               inputs[req] = reqact.result
+                       }
+
+                       // Report an error if any dependency failed.
+                       if failed != nil {
+                               sort.Strings(failed)
+                               act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
+                               return
+                       }
+
+                       pass := &analysis.Pass{
+                               Analyzer:          a,
+                               Fset:              fset,
+                               Files:             files,
+                               OtherFiles:        cfg.OtherFiles,
+                               Pkg:               pkg,
+                               TypesInfo:         info,
+                               ResultOf:          inputs,
+                               Report:            func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
+                               ImportObjectFact:  facts.ImportObjectFact,
+                               ExportObjectFact:  facts.ExportObjectFact,
+                               ImportPackageFact: facts.ImportPackageFact,
+                               ExportPackageFact: facts.ExportPackageFact,
+                       }
+
+                       t0 := time.Now()
+                       act.result, act.err = a.Run(pass)
+                       if false {
+                               log.Printf("analysis %s = %s", pass, time.Since(t0))
+                       }
+               })
+               return act
+       }
+       execAll = func(analyzers []*analysis.Analyzer) {
+               var wg sync.WaitGroup
+               for _, a := range analyzers {
+                       wg.Add(1)
+                       go func(a *analysis.Analyzer) {
+                               _ = exec(a)
+                               wg.Done()
+                       }(a)
+               }
+               wg.Wait()
+       }
+
+       execAll(analyzers)
+
+       // Return diagnostics from root analyzers.
+       var diags []analysis.Diagnostic
+       for _, a := range analyzers {
+               act := actions[a]
+               if act.err != nil {
+                       return nil, act.err // some analysis failed
+               }
+               diags = append(diags, act.diagnostics...)
+       }
+
+       data := facts.Encode()
+       if err := ioutil.WriteFile(cfg.VetxOutput, data, 0666); err != nil {
+               return nil, fmt.Errorf("failed to write analysis facts: %v", err)
+       }
+
+       return diags, nil
+}
+
+type importerFunc func(path string) (*types.Package, error)
+
+func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
new file mode 100644 (file)
index 0000000..11dfbf6
--- /dev/null
@@ -0,0 +1,759 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package asmdecl defines an Analyzer that reports mismatches between
+// assembly files and Go declarations.
+package asmdecl
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "go/build"
+       "go/token"
+       "go/types"
+       "log"
+       "regexp"
+       "strconv"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name: "asmdecl",
+       Doc:  "report mismatches between assembly files and Go declarations",
+       Run:  run,
+}
+
+// 'kind' is a kind of assembly variable.
+// The kinds 1, 2, 4, 8 stand for values of that size.
+type asmKind int
+
+// These special kinds are not valid sizes.
+const (
+       asmString asmKind = 100 + iota
+       asmSlice
+       asmArray
+       asmInterface
+       asmEmptyInterface
+       asmStruct
+       asmComplex
+)
+
+// An asmArch describes assembly parameters for an architecture
+type asmArch struct {
+       name      string
+       bigEndian bool
+       stack     string
+       lr        bool
+       // calculated during initialization
+       sizes    types.Sizes
+       intSize  int
+       ptrSize  int
+       maxAlign int
+}
+
+// An asmFunc describes the expected variables for a function on a given architecture.
+type asmFunc struct {
+       arch        *asmArch
+       size        int // size of all arguments
+       vars        map[string]*asmVar
+       varByOffset map[int]*asmVar
+}
+
+// An asmVar describes a single assembly variable.
+type asmVar struct {
+       name  string
+       kind  asmKind
+       typ   string
+       off   int
+       size  int
+       inner []*asmVar
+}
+
+var (
+       asmArch386      = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false}
+       asmArchArm      = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true}
+       asmArchArm64    = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true}
+       asmArchAmd64    = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false}
+       asmArchAmd64p32 = asmArch{name: "amd64p32", bigEndian: false, stack: "SP", lr: false}
+       asmArchMips     = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true}
+       asmArchMipsLE   = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
+       asmArchMips64   = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
+       asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
+       asmArchPpc64    = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true}
+       asmArchPpc64LE  = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true}
+       asmArchS390X    = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
+       asmArchWasm     = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
+
+       arches = []*asmArch{
+               &asmArch386,
+               &asmArchArm,
+               &asmArchArm64,
+               &asmArchAmd64,
+               &asmArchAmd64p32,
+               &asmArchMips,
+               &asmArchMipsLE,
+               &asmArchMips64,
+               &asmArchMips64LE,
+               &asmArchPpc64,
+               &asmArchPpc64LE,
+               &asmArchS390X,
+               &asmArchWasm,
+       }
+)
+
+func init() {
+       for _, arch := range arches {
+               arch.sizes = types.SizesFor("gc", arch.name)
+               if arch.sizes == nil {
+                       // TODO(adonovan): fix: now that asmdecl is not in the standard
+                       // library we cannot assume types.SizesFor is consistent with arches.
+                       // For now, assume 64-bit norms and print a warning.
+                       // But this warning should really be deferred until we attempt to use
+                       // arch, which is very unlikely.
+                       arch.sizes = types.SizesFor("gc", "amd64")
+                       log.Printf("unknown architecture %s", arch.name)
+               }
+               arch.intSize = int(arch.sizes.Sizeof(types.Typ[types.Int]))
+               arch.ptrSize = int(arch.sizes.Sizeof(types.Typ[types.UnsafePointer]))
+               arch.maxAlign = int(arch.sizes.Alignof(types.Typ[types.Int64]))
+       }
+}
+
+var (
+       re           = regexp.MustCompile
+       asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`)
+       asmTEXT      = re(`\bTEXT\b(.*)·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+()]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`)
+       asmDATA      = re(`\b(DATA|GLOBL)\b`)
+       asmNamedFP   = re(`([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`)
+       asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`)
+       asmSP        = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`)
+       asmOpcode    = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`)
+       ppc64Suff    = re(`([BHWD])(ZU|Z|U|BR)?$`)
+)
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       // No work if no assembly files.
+       var sfiles []string
+       for _, fname := range pass.OtherFiles {
+               if strings.HasSuffix(fname, ".s") {
+                       sfiles = append(sfiles, fname)
+               }
+       }
+       if sfiles == nil {
+               return nil, nil
+       }
+
+       // Gather declarations. knownFunc[name][arch] is func description.
+       knownFunc := make(map[string]map[string]*asmFunc)
+
+       for _, f := range pass.Files {
+               for _, decl := range f.Decls {
+                       if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil {
+                               knownFunc[decl.Name.Name] = asmParseDecl(pass, decl)
+                       }
+               }
+       }
+
+Files:
+       for _, fname := range sfiles {
+               content, tf, err := analysisutil.ReadFile(pass.Fset, fname)
+               if err != nil {
+                       return nil, err
+               }
+
+               // Determine architecture from file name if possible.
+               var arch string
+               var archDef *asmArch
+               for _, a := range arches {
+                       if strings.HasSuffix(fname, "_"+a.name+".s") {
+                               arch = a.name
+                               archDef = a
+                               break
+                       }
+               }
+
+               lines := strings.SplitAfter(string(content), "\n")
+               var (
+                       fn                 *asmFunc
+                       fnName             string
+                       localSize, argSize int
+                       wroteSP            bool
+                       haveRetArg         bool
+                       retLine            []int
+               )
+
+               flushRet := func() {
+                       if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 {
+                               v := fn.vars["ret"]
+                               for _, line := range retLine {
+                                       pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %d-byte ret+%d(FP)", arch, fnName, v.size, v.off)
+                               }
+                       }
+                       retLine = nil
+               }
+               for lineno, line := range lines {
+                       lineno++
+
+                       badf := func(format string, args ...interface{}) {
+                               pass.Reportf(analysisutil.LineStart(tf, lineno), "[%s] %s: %s", arch, fnName, fmt.Sprintf(format, args...))
+                       }
+
+                       if arch == "" {
+                               // Determine architecture from +build line if possible.
+                               if m := asmPlusBuild.FindStringSubmatch(line); m != nil {
+                                       // There can be multiple architectures in a single +build line,
+                                       // so accumulate them all and then prefer the one that
+                                       // matches build.Default.GOARCH.
+                                       var archCandidates []*asmArch
+                                       for _, fld := range strings.Fields(m[1]) {
+                                               for _, a := range arches {
+                                                       if a.name == fld {
+                                                               archCandidates = append(archCandidates, a)
+                                                       }
+                                               }
+                                       }
+                                       for _, a := range archCandidates {
+                                               if a.name == build.Default.GOARCH {
+                                                       archCandidates = []*asmArch{a}
+                                                       break
+                                               }
+                                       }
+                                       if len(archCandidates) > 0 {
+                                               arch = archCandidates[0].name
+                                               archDef = archCandidates[0]
+                                       }
+                               }
+                       }
+
+                       if m := asmTEXT.FindStringSubmatch(line); m != nil {
+                               flushRet()
+                               if arch == "" {
+                                       // Arch not specified by filename or build tags.
+                                       // Fall back to build.Default.GOARCH.
+                                       for _, a := range arches {
+                                               if a.name == build.Default.GOARCH {
+                                                       arch = a.name
+                                                       archDef = a
+                                                       break
+                                               }
+                                       }
+                                       if arch == "" {
+                                               badf("%s: cannot determine architecture for assembly file")
+                                               continue Files
+                                       }
+                               }
+                               fnName = m[2]
+                               if pkgName := strings.TrimSpace(m[1]); pkgName != "" {
+                                       pathParts := strings.Split(pkgName, "∕")
+                                       pkgName = pathParts[len(pathParts)-1]
+                                       if pkgName != pass.Pkg.Path() {
+                                               badf("[%s] cannot check cross-package assembly function: %s is in package %s", arch, fnName, pkgName)
+                                               fn = nil
+                                               fnName = ""
+                                               continue
+                                       }
+                               }
+                               flag := m[3]
+                               fn = knownFunc[fnName][arch]
+                               if fn != nil {
+                                       size, _ := strconv.Atoi(m[5])
+                                       if size != fn.size && (flag != "7" && !strings.Contains(flag, "NOSPLIT") || size != 0) {
+                                               badf("wrong argument size %d; expected $...-%d", size, fn.size)
+                                       }
+                               }
+                               localSize, _ = strconv.Atoi(m[4])
+                               localSize += archDef.intSize
+                               if archDef.lr && !strings.Contains(flag, "NOFRAME") {
+                                       // Account for caller's saved LR
+                                       localSize += archDef.intSize
+                               }
+                               argSize, _ = strconv.Atoi(m[5])
+                               if fn == nil && !strings.Contains(fnName, "<>") {
+                                       badf("function %s missing Go declaration", fnName)
+                               }
+                               wroteSP = false
+                               haveRetArg = false
+                               continue
+                       } else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") {
+                               // function, but not visible from Go (didn't match asmTEXT), so stop checking
+                               flushRet()
+                               fn = nil
+                               fnName = ""
+                               continue
+                       }
+
+                       if strings.Contains(line, "RET") {
+                               retLine = append(retLine, lineno)
+                       }
+
+                       if fnName == "" {
+                               continue
+                       }
+
+                       if asmDATA.FindStringSubmatch(line) != nil {
+                               fn = nil
+                       }
+
+                       if archDef == nil {
+                               continue
+                       }
+
+                       if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) {
+                               wroteSP = true
+                               continue
+                       }
+
+                       for _, m := range asmSP.FindAllStringSubmatch(line, -1) {
+                               if m[3] != archDef.stack || wroteSP {
+                                       continue
+                               }
+                               off := 0
+                               if m[1] != "" {
+                                       off, _ = strconv.Atoi(m[2])
+                               }
+                               if off >= localSize {
+                                       if fn != nil {
+                                               v := fn.varByOffset[off-localSize]
+                                               if v != nil {
+                                                       badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize)
+                                                       continue
+                                               }
+                                       }
+                                       if off >= localSize+argSize {
+                                               badf("use of %s points beyond argument frame", m[1])
+                                               continue
+                                       }
+                                       badf("use of %s to access argument frame", m[1])
+                               }
+                       }
+
+                       if fn == nil {
+                               continue
+                       }
+
+                       for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) {
+                               off, _ := strconv.Atoi(m[2])
+                               v := fn.varByOffset[off]
+                               if v != nil {
+                                       badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off)
+                               } else {
+                                       badf("use of unnamed argument %s", m[1])
+                               }
+                       }
+
+                       for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) {
+                               name := m[1]
+                               off := 0
+                               if m[2] != "" {
+                                       off, _ = strconv.Atoi(m[2])
+                               }
+                               if name == "ret" || strings.HasPrefix(name, "ret_") {
+                                       haveRetArg = true
+                               }
+                               v := fn.vars[name]
+                               if v == nil {
+                                       // Allow argframe+0(FP).
+                                       if name == "argframe" && off == 0 {
+                                               continue
+                                       }
+                                       v = fn.varByOffset[off]
+                                       if v != nil {
+                                               badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off)
+                                       } else {
+                                               badf("unknown variable %s", name)
+                                       }
+                                       continue
+                               }
+                               asmCheckVar(badf, fn, line, m[0], off, v)
+                       }
+               }
+               flushRet()
+       }
+       return nil, nil
+}
+
+func asmKindForType(t types.Type, size int) asmKind {
+       switch t := t.Underlying().(type) {
+       case *types.Basic:
+               switch t.Kind() {
+               case types.String:
+                       return asmString
+               case types.Complex64, types.Complex128:
+                       return asmComplex
+               }
+               return asmKind(size)
+       case *types.Pointer, *types.Chan, *types.Map, *types.Signature:
+               return asmKind(size)
+       case *types.Struct:
+               return asmStruct
+       case *types.Interface:
+               if t.Empty() {
+                       return asmEmptyInterface
+               }
+               return asmInterface
+       case *types.Array:
+               return asmArray
+       case *types.Slice:
+               return asmSlice
+       }
+       panic("unreachable")
+}
+
+// A component is an assembly-addressable component of a composite type,
+// or a composite type itself.
+type component struct {
+       size   int
+       offset int
+       kind   asmKind
+       typ    string
+       suffix string // Such as _base for string base, _0_lo for lo half of first element of [1]uint64 on 32 bit machine.
+       outer  string // The suffix for immediately containing composite type.
+}
+
+func newComponent(suffix string, kind asmKind, typ string, offset, size int, outer string) component {
+       return component{suffix: suffix, kind: kind, typ: typ, offset: offset, size: size, outer: outer}
+}
+
+// componentsOfType generates a list of components of type t.
+// For example, given string, the components are the string itself, the base, and the length.
+func componentsOfType(arch *asmArch, t types.Type) []component {
+       return appendComponentsRecursive(arch, t, nil, "", 0)
+}
+
+// appendComponentsRecursive implements componentsOfType.
+// Recursion is required to correct handle structs and arrays,
+// which can contain arbitrary other types.
+func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suffix string, off int) []component {
+       s := t.String()
+       size := int(arch.sizes.Sizeof(t))
+       kind := asmKindForType(t, size)
+       cc = append(cc, newComponent(suffix, kind, s, off, size, suffix))
+
+       switch kind {
+       case 8:
+               if arch.ptrSize == 4 {
+                       w1, w2 := "lo", "hi"
+                       if arch.bigEndian {
+                               w1, w2 = w2, w1
+                       }
+                       cc = append(cc, newComponent(suffix+"_"+w1, 4, "half "+s, off, 4, suffix))
+                       cc = append(cc, newComponent(suffix+"_"+w2, 4, "half "+s, off+4, 4, suffix))
+               }
+
+       case asmEmptyInterface:
+               cc = append(cc, newComponent(suffix+"_type", asmKind(arch.ptrSize), "interface type", off, arch.ptrSize, suffix))
+               cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix))
+
+       case asmInterface:
+               cc = append(cc, newComponent(suffix+"_itable", asmKind(arch.ptrSize), "interface itable", off, arch.ptrSize, suffix))
+               cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix))
+
+       case asmSlice:
+               cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "slice base", off, arch.ptrSize, suffix))
+               cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "slice len", off+arch.ptrSize, arch.intSize, suffix))
+               cc = append(cc, newComponent(suffix+"_cap", asmKind(arch.intSize), "slice cap", off+arch.ptrSize+arch.intSize, arch.intSize, suffix))
+
+       case asmString:
+               cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "string base", off, arch.ptrSize, suffix))
+               cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "string len", off+arch.ptrSize, arch.intSize, suffix))
+
+       case asmComplex:
+               fsize := size / 2
+               cc = append(cc, newComponent(suffix+"_real", asmKind(fsize), fmt.Sprintf("real(complex%d)", size*8), off, fsize, suffix))
+               cc = append(cc, newComponent(suffix+"_imag", asmKind(fsize), fmt.Sprintf("imag(complex%d)", size*8), off+fsize, fsize, suffix))
+
+       case asmStruct:
+               tu := t.Underlying().(*types.Struct)
+               fields := make([]*types.Var, tu.NumFields())
+               for i := 0; i < tu.NumFields(); i++ {
+                       fields[i] = tu.Field(i)
+               }
+               offsets := arch.sizes.Offsetsof(fields)
+               for i, f := range fields {
+                       cc = appendComponentsRecursive(arch, f.Type(), cc, suffix+"_"+f.Name(), off+int(offsets[i]))
+               }
+
+       case asmArray:
+               tu := t.Underlying().(*types.Array)
+               elem := tu.Elem()
+               // Calculate offset of each element array.
+               fields := []*types.Var{
+                       types.NewVar(token.NoPos, nil, "fake0", elem),
+                       types.NewVar(token.NoPos, nil, "fake1", elem),
+               }
+               offsets := arch.sizes.Offsetsof(fields)
+               elemoff := int(offsets[1])
+               for i := 0; i < int(tu.Len()); i++ {
+                       cc = appendComponentsRecursive(arch, elem, cc, suffix+"_"+strconv.Itoa(i), i*elemoff)
+               }
+       }
+
+       return cc
+}
+
+// asmParseDecl parses a function decl for expected assembly variables.
+func asmParseDecl(pass *analysis.Pass, decl *ast.FuncDecl) map[string]*asmFunc {
+       var (
+               arch   *asmArch
+               fn     *asmFunc
+               offset int
+       )
+
+       // addParams adds asmVars for each of the parameters in list.
+       // isret indicates whether the list are the arguments or the return values.
+       // TODO(adonovan): simplify by passing (*types.Signature).{Params,Results}
+       // instead of list.
+       addParams := func(list []*ast.Field, isret bool) {
+               argnum := 0
+               for _, fld := range list {
+                       t := pass.TypesInfo.Types[fld.Type].Type
+
+                       // Work around github.com/golang/go/issues/28277.
+                       if t == nil {
+                               if ell, ok := fld.Type.(*ast.Ellipsis); ok {
+                                       t = types.NewSlice(pass.TypesInfo.Types[ell.Elt].Type)
+                               }
+                       }
+
+                       align := int(arch.sizes.Alignof(t))
+                       size := int(arch.sizes.Sizeof(t))
+                       offset += -offset & (align - 1)
+                       cc := componentsOfType(arch, t)
+
+                       // names is the list of names with this type.
+                       names := fld.Names
+                       if len(names) == 0 {
+                               // Anonymous args will be called arg, arg1, arg2, ...
+                               // Similarly so for return values: ret, ret1, ret2, ...
+                               name := "arg"
+                               if isret {
+                                       name = "ret"
+                               }
+                               if argnum > 0 {
+                                       name += strconv.Itoa(argnum)
+                               }
+                               names = []*ast.Ident{ast.NewIdent(name)}
+                       }
+                       argnum += len(names)
+
+                       // Create variable for each name.
+                       for _, id := range names {
+                               name := id.Name
+                               for _, c := range cc {
+                                       outer := name + c.outer
+                                       v := asmVar{
+                                               name: name + c.suffix,
+                                               kind: c.kind,
+                                               typ:  c.typ,
+                                               off:  offset + c.offset,
+                                               size: c.size,
+                                       }
+                                       if vo := fn.vars[outer]; vo != nil {
+                                               vo.inner = append(vo.inner, &v)
+                                       }
+                                       fn.vars[v.name] = &v
+                                       for i := 0; i < v.size; i++ {
+                                               fn.varByOffset[v.off+i] = &v
+                                       }
+                               }
+                               offset += size
+                       }
+               }
+       }
+
+       m := make(map[string]*asmFunc)
+       for _, arch = range arches {
+               fn = &asmFunc{
+                       arch:        arch,
+                       vars:        make(map[string]*asmVar),
+                       varByOffset: make(map[int]*asmVar),
+               }
+               offset = 0
+               addParams(decl.Type.Params.List, false)
+               if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 {
+                       offset += -offset & (arch.maxAlign - 1)
+                       addParams(decl.Type.Results.List, true)
+               }
+               fn.size = offset
+               m[arch.name] = fn
+       }
+
+       return m
+}
+
+// asmCheckVar checks a single variable reference.
+func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar) {
+       m := asmOpcode.FindStringSubmatch(line)
+       if m == nil {
+               if !strings.HasPrefix(strings.TrimSpace(line), "//") {
+                       badf("cannot find assembly opcode")
+               }
+               return
+       }
+
+       // Determine operand sizes from instruction.
+       // Typically the suffix suffices, but there are exceptions.
+       var src, dst, kind asmKind
+       op := m[1]
+       switch fn.arch.name + "." + op {
+       case "386.FMOVLP":
+               src, dst = 8, 4
+       case "arm.MOVD":
+               src = 8
+       case "arm.MOVW":
+               src = 4
+       case "arm.MOVH", "arm.MOVHU":
+               src = 2
+       case "arm.MOVB", "arm.MOVBU":
+               src = 1
+       // LEA* opcodes don't really read the second arg.
+       // They just take the address of it.
+       case "386.LEAL":
+               dst = 4
+       case "amd64.LEAQ":
+               dst = 8
+       case "amd64p32.LEAL":
+               dst = 4
+       default:
+               switch fn.arch.name {
+               case "386", "amd64":
+                       if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) {
+                               // FMOVDP, FXCHD, etc
+                               src = 8
+                               break
+                       }
+                       if strings.HasPrefix(op, "P") && strings.HasSuffix(op, "RD") {
+                               // PINSRD, PEXTRD, etc
+                               src = 4
+                               break
+                       }
+                       if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) {
+                               // FMOVFP, FXCHF, etc
+                               src = 4
+                               break
+                       }
+                       if strings.HasSuffix(op, "SD") {
+                               // MOVSD, SQRTSD, etc
+                               src = 8
+                               break
+                       }
+                       if strings.HasSuffix(op, "SS") {
+                               // MOVSS, SQRTSS, etc
+                               src = 4
+                               break
+                       }
+                       if strings.HasPrefix(op, "SET") {
+                               // SETEQ, etc
+                               src = 1
+                               break
+                       }
+                       switch op[len(op)-1] {
+                       case 'B':
+                               src = 1
+                       case 'W':
+                               src = 2
+                       case 'L':
+                               src = 4
+                       case 'D', 'Q':
+                               src = 8
+                       }
+               case "ppc64", "ppc64le":
+                       // Strip standard suffixes to reveal size letter.
+                       m := ppc64Suff.FindStringSubmatch(op)
+                       if m != nil {
+                               switch m[1][0] {
+                               case 'B':
+                                       src = 1
+                               case 'H':
+                                       src = 2
+                               case 'W':
+                                       src = 4
+                               case 'D':
+                                       src = 8
+                               }
+                       }
+               case "mips", "mipsle", "mips64", "mips64le":
+                       switch op {
+                       case "MOVB", "MOVBU":
+                               src = 1
+                       case "MOVH", "MOVHU":
+                               src = 2
+                       case "MOVW", "MOVWU", "MOVF":
+                               src = 4
+                       case "MOVV", "MOVD":
+                               src = 8
+                       }
+               case "s390x":
+                       switch op {
+                       case "MOVB", "MOVBZ":
+                               src = 1
+                       case "MOVH", "MOVHZ":
+                               src = 2
+                       case "MOVW", "MOVWZ", "FMOVS":
+                               src = 4
+                       case "MOVD", "FMOVD":
+                               src = 8
+                       }
+               }
+       }
+       if dst == 0 {
+               dst = src
+       }
+
+       // Determine whether the match we're holding
+       // is the first or second argument.
+       if strings.Index(line, expr) > strings.Index(line, ",") {
+               kind = dst
+       } else {
+               kind = src
+       }
+
+       vk := v.kind
+       vs := v.size
+       vt := v.typ
+       switch vk {
+       case asmInterface, asmEmptyInterface, asmString, asmSlice:
+               // allow reference to first word (pointer)
+               vk = v.inner[0].kind
+               vs = v.inner[0].size
+               vt = v.inner[0].typ
+       }
+
+       if off != v.off {
+               var inner bytes.Buffer
+               for i, vi := range v.inner {
+                       if len(v.inner) > 1 {
+                               fmt.Fprintf(&inner, ",")
+                       }
+                       fmt.Fprintf(&inner, " ")
+                       if i == len(v.inner)-1 {
+                               fmt.Fprintf(&inner, "or ")
+                       }
+                       fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
+               }
+               badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String())
+               return
+       }
+       if kind != 0 && kind != vk {
+               var inner bytes.Buffer
+               if len(v.inner) > 0 {
+                       fmt.Fprintf(&inner, " containing")
+                       for i, vi := range v.inner {
+                               if i > 0 && len(v.inner) > 2 {
+                                       fmt.Fprintf(&inner, ",")
+                               }
+                               fmt.Fprintf(&inner, " ")
+                               if i > 0 && i == len(v.inner)-1 {
+                                       fmt.Fprintf(&inner, "and ")
+                               }
+                               fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off)
+                       }
+               }
+               badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vs, inner.String())
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go
new file mode 100644 (file)
index 0000000..4dff290
--- /dev/null
@@ -0,0 +1,68 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package assign defines an Analyzer that detects useless assignments.
+package assign
+
+// TODO(adonovan): check also for assignments to struct fields inside
+// methods that are on T instead of *T.
+
+import (
+       "go/ast"
+       "go/token"
+       "reflect"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for useless assignments
+
+This checker reports assignments of the form x = x or a[i] = a[i].
+These are almost always useless, and even when they aren't they are
+usually a mistake.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "assign",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.AssignStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               stmt := n.(*ast.AssignStmt)
+               if stmt.Tok != token.ASSIGN {
+                       return // ignore :=
+               }
+               if len(stmt.Lhs) != len(stmt.Rhs) {
+                       // If LHS and RHS have different cardinality, they can't be the same.
+                       return
+               }
+               for i, lhs := range stmt.Lhs {
+                       rhs := stmt.Rhs[i]
+                       if analysisutil.HasSideEffects(pass.TypesInfo, lhs) ||
+                               analysisutil.HasSideEffects(pass.TypesInfo, rhs) {
+                               continue // expressions may not be equal
+                       }
+                       if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) {
+                               continue // short-circuit the heavy-weight gofmt check
+                       }
+                       le := analysisutil.Format(pass.Fset, lhs)
+                       re := analysisutil.Format(pass.Fset, rhs)
+                       if le == re {
+                               pass.Reportf(stmt.Pos(), "self-assignment of %s to %s", re, le)
+                       }
+               }
+       })
+
+       return nil, nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go
new file mode 100644 (file)
index 0000000..45243d6
--- /dev/null
@@ -0,0 +1,96 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package atomic defines an Analyzer that checks for common mistakes
+// using the sync/atomic package.
+package atomic
+
+import (
+       "go/ast"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for common mistakes using the sync/atomic package
+
+The atomic checker looks for assignment statements of the form:
+
+       x = atomic.AddUint64(&x, 1)
+
+which are not atomic.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "atomic",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.AssignStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(node ast.Node) {
+               n := node.(*ast.AssignStmt)
+               if len(n.Lhs) != len(n.Rhs) {
+                       return
+               }
+               if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
+                       return
+               }
+
+               for i, right := range n.Rhs {
+                       call, ok := right.(*ast.CallExpr)
+                       if !ok {
+                               continue
+                       }
+                       sel, ok := call.Fun.(*ast.SelectorExpr)
+                       if !ok {
+                               continue
+                       }
+                       pkgIdent, _ := sel.X.(*ast.Ident)
+                       pkgName, ok := pass.TypesInfo.Uses[pkgIdent].(*types.PkgName)
+                       if !ok || pkgName.Imported().Path() != "sync/atomic" {
+                               continue
+                       }
+
+                       switch sel.Sel.Name {
+                       case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
+                               checkAtomicAddAssignment(pass, n.Lhs[i], call)
+                       }
+               }
+       })
+       return nil, nil
+}
+
+// checkAtomicAddAssignment walks the atomic.Add* method calls checking
+// for assigning the return value to the same variable being used in the
+// operation
+func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.CallExpr) {
+       if len(call.Args) != 2 {
+               return
+       }
+       arg := call.Args[0]
+       broken := false
+
+       gofmt := func(e ast.Expr) string { return analysisutil.Format(pass.Fset, e) }
+
+       if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
+               broken = gofmt(left) == gofmt(uarg.X)
+       } else if star, ok := left.(*ast.StarExpr); ok {
+               broken = gofmt(star.X) == gofmt(arg)
+       }
+
+       if broken {
+               pass.Reportf(left.Pos(), "direct assignment to atomic value")
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go
new file mode 100644 (file)
index 0000000..0e6f269
--- /dev/null
@@ -0,0 +1,214 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package bools defines an Analyzer that detects common mistakes
+// involving boolean operators.
+package bools
+
+import (
+       "go/ast"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "bools",
+       Doc:      "check for common mistakes involving boolean operators",
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.BinaryExpr)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               e := n.(*ast.BinaryExpr)
+
+               var op boolOp
+               switch e.Op {
+               case token.LOR:
+                       op = or
+               case token.LAND:
+                       op = and
+               default:
+                       return
+               }
+
+               // TODO(adonovan): this reports n(n-1)/2 errors for an
+               // expression e||...||e of depth n. Fix.
+               // See https://github.com/golang/go/issues/28086.
+               comm := op.commutativeSets(pass.TypesInfo, e)
+               for _, exprs := range comm {
+                       op.checkRedundant(pass, exprs)
+                       op.checkSuspect(pass, exprs)
+               }
+       })
+       return nil, nil
+}
+
+type boolOp struct {
+       name  string
+       tok   token.Token // token corresponding to this operator
+       badEq token.Token // token corresponding to the equality test that should not be used with this operator
+}
+
+var (
+       or  = boolOp{"or", token.LOR, token.NEQ}
+       and = boolOp{"and", token.LAND, token.EQL}
+)
+
+// commutativeSets returns all side effect free sets of
+// expressions in e that are connected by op.
+// For example, given 'a || b || f() || c || d' with the or op,
+// commutativeSets returns {{b, a}, {d, c}}.
+func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr) [][]ast.Expr {
+       exprs := op.split(e)
+
+       // Partition the slice of expressions into commutative sets.
+       i := 0
+       var sets [][]ast.Expr
+       for j := 0; j <= len(exprs); j++ {
+               if j == len(exprs) || hasSideEffects(info, exprs[j]) {
+                       if i < j {
+                               sets = append(sets, exprs[i:j])
+                       }
+                       i = j + 1
+               }
+       }
+
+       return sets
+}
+
+// checkRedundant checks for expressions of the form
+//   e && e
+//   e || e
+// Exprs must contain only side effect free expressions.
+func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
+       seen := make(map[string]bool)
+       for _, e := range exprs {
+               efmt := analysisutil.Format(pass.Fset, e)
+               if seen[efmt] {
+                       pass.Reportf(e.Pos(), "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt)
+               } else {
+                       seen[efmt] = true
+               }
+       }
+}
+
+// checkSuspect checks for expressions of the form
+//   x != c1 || x != c2
+//   x == c1 && x == c2
+// where c1 and c2 are constant expressions.
+// If c1 and c2 are the same then it's redundant;
+// if c1 and c2 are different then it's always true or always false.
+// Exprs must contain only side effect free expressions.
+func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) {
+       // seen maps from expressions 'x' to equality expressions 'x != c'.
+       seen := make(map[string]string)
+
+       for _, e := range exprs {
+               bin, ok := e.(*ast.BinaryExpr)
+               if !ok || bin.Op != op.badEq {
+                       continue
+               }
+
+               // In order to avoid false positives, restrict to cases
+               // in which one of the operands is constant. We're then
+               // interested in the other operand.
+               // In the rare case in which both operands are constant
+               // (e.g. runtime.GOOS and "windows"), we'll only catch
+               // mistakes if the LHS is repeated, which is how most
+               // code is written.
+               var x ast.Expr
+               switch {
+               case pass.TypesInfo.Types[bin.Y].Value != nil:
+                       x = bin.X
+               case pass.TypesInfo.Types[bin.X].Value != nil:
+                       x = bin.Y
+               default:
+                       continue
+               }
+
+               // e is of the form 'x != c' or 'x == c'.
+               xfmt := analysisutil.Format(pass.Fset, x)
+               efmt := analysisutil.Format(pass.Fset, e)
+               if prev, found := seen[xfmt]; found {
+                       // checkRedundant handles the case in which efmt == prev.
+                       if efmt != prev {
+                               pass.Reportf(e.Pos(), "suspect %s: %s %s %s", op.name, efmt, op.tok, prev)
+                       }
+               } else {
+                       seen[xfmt] = efmt
+               }
+       }
+}
+
+// hasSideEffects reports whether evaluation of e has side effects.
+func hasSideEffects(info *types.Info, e ast.Expr) bool {
+       safe := true
+       ast.Inspect(e, func(node ast.Node) bool {
+               switch n := node.(type) {
+               case *ast.CallExpr:
+                       typVal := info.Types[n.Fun]
+                       switch {
+                       case typVal.IsType():
+                               // Type conversion, which is safe.
+                       case typVal.IsBuiltin():
+                               // Builtin func, conservatively assumed to not
+                               // be safe for now.
+                               safe = false
+                               return false
+                       default:
+                               // A non-builtin func or method call.
+                               // Conservatively assume that all of them have
+                               // side effects for now.
+                               safe = false
+                               return false
+                       }
+               case *ast.UnaryExpr:
+                       if n.Op == token.ARROW {
+                               safe = false
+                               return false
+                       }
+               }
+               return true
+       })
+       return !safe
+}
+
+// split returns a slice of all subexpressions in e that are connected by op.
+// For example, given 'a || (b || c) || d' with the or op,
+// split returns []{d, c, b, a}.
+func (op boolOp) split(e ast.Expr) (exprs []ast.Expr) {
+       for {
+               e = unparen(e)
+               if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok {
+                       exprs = append(exprs, op.split(b.Y)...)
+                       e = b.X
+               } else {
+                       exprs = append(exprs, e)
+                       break
+               }
+       }
+       return
+}
+
+// unparen returns e with any enclosing parentheses stripped.
+func unparen(e ast.Expr) ast.Expr {
+       for {
+               p, ok := e.(*ast.ParenExpr)
+               if !ok {
+                       return e
+               }
+               e = p.X
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go
new file mode 100644 (file)
index 0000000..5a441e6
--- /dev/null
@@ -0,0 +1,159 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package buildtag defines an Analyzer that checks build tags.
+package buildtag
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "strings"
+       "unicode"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name: "buildtag",
+       Doc:  "check that +build tags are well-formed and correctly located",
+       Run:  runBuildTag,
+}
+
+func runBuildTag(pass *analysis.Pass) (interface{}, error) {
+       for _, f := range pass.Files {
+               checkGoFile(pass, f)
+       }
+       for _, name := range pass.OtherFiles {
+               if err := checkOtherFile(pass, name); err != nil {
+                       return nil, err
+               }
+       }
+       return nil, nil
+}
+
+func checkGoFile(pass *analysis.Pass, f *ast.File) {
+       pastCutoff := false
+       for _, group := range f.Comments {
+               // A +build comment is ignored after or adjoining the package declaration.
+               if group.End()+1 >= f.Package {
+                       pastCutoff = true
+               }
+
+               // "+build" is ignored within or after a /*...*/ comment.
+               if !strings.HasPrefix(group.List[0].Text, "//") {
+                       pastCutoff = true
+                       continue
+               }
+
+               // Check each line of a //-comment.
+               for _, c := range group.List {
+                       if !strings.Contains(c.Text, "+build") {
+                               continue
+                       }
+                       if err := checkLine(c.Text, pastCutoff); err != nil {
+                               pass.Reportf(c.Pos(), "%s", err)
+                       }
+               }
+       }
+}
+
+func checkOtherFile(pass *analysis.Pass, filename string) error {
+       content, tf, err := analysisutil.ReadFile(pass.Fset, filename)
+       if err != nil {
+               return err
+       }
+
+       // We must look at the raw lines, as build tags may appear in non-Go
+       // files such as assembly files.
+       lines := bytes.SplitAfter(content, nl)
+
+       // Determine cutpoint where +build comments are no longer valid.
+       // They are valid in leading // comments in the file followed by
+       // a blank line.
+       //
+       // This must be done as a separate pass because of the
+       // requirement that the comment be followed by a blank line.
+       var cutoff int
+       for i, line := range lines {
+               line = bytes.TrimSpace(line)
+               if !bytes.HasPrefix(line, slashSlash) {
+                       if len(line) > 0 {
+                               break
+                       }
+                       cutoff = i
+               }
+       }
+
+       for i, line := range lines {
+               line = bytes.TrimSpace(line)
+               if !bytes.HasPrefix(line, slashSlash) {
+                       continue
+               }
+               if !bytes.Contains(line, []byte("+build")) {
+                       continue
+               }
+               if err := checkLine(string(line), i >= cutoff); err != nil {
+                       pass.Reportf(analysisutil.LineStart(tf, i+1), "%s", err)
+                       continue
+               }
+       }
+       return nil
+}
+
+// checkLine checks a line that starts with "//" and contains "+build".
+func checkLine(line string, pastCutoff bool) error {
+       line = strings.TrimPrefix(line, "//")
+       line = strings.TrimSpace(line)
+
+       if strings.HasPrefix(line, "+build") {
+               fields := strings.Fields(line)
+               if fields[0] != "+build" {
+                       // Comment is something like +buildasdf not +build.
+                       return fmt.Errorf("possible malformed +build comment")
+               }
+               if pastCutoff {
+                       return fmt.Errorf("+build comment must appear before package clause and be followed by a blank line")
+               }
+               if err := checkArguments(fields); err != nil {
+                       return err
+               }
+       } else {
+               // Comment with +build but not at beginning.
+               if !pastCutoff {
+                       return fmt.Errorf("possible malformed +build comment")
+               }
+       }
+       return nil
+}
+
+func checkArguments(fields []string) error {
+       // The original version of this checker in vet could examine
+       // files with malformed build tags that would cause the file to
+       // be always ignored by "go build". However, drivers for the new
+       // analysis API will analyze only the files selected to form a
+       // package, so these checks will never fire.
+       // TODO(adonovan): rethink this.
+
+       for _, arg := range fields[1:] {
+               for _, elem := range strings.Split(arg, ",") {
+                       if strings.HasPrefix(elem, "!!") {
+                               return fmt.Errorf("invalid double negative in build constraint: %s", arg)
+                       }
+                       elem = strings.TrimPrefix(elem, "!")
+                       for _, c := range elem {
+                               if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' {
+                                       return fmt.Errorf("invalid non-alphanumeric build constraint: %s", arg)
+                               }
+                       }
+               }
+       }
+       return nil
+}
+
+var (
+       nl         = []byte("\n")
+       slashSlash = []byte("//")
+)
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
new file mode 100644 (file)
index 0000000..7eb24a4
--- /dev/null
@@ -0,0 +1,226 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cgocall defines an Analyzer that detects some violations of
+// the cgo pointer passing rules.
+package cgocall
+
+import (
+       "fmt"
+       "go/ast"
+       "go/token"
+       "go/types"
+       "log"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `detect some violations of the cgo pointer passing rules
+
+Check for invalid cgo pointer passing.
+This looks for code that uses cgo to call C code passing values
+whose types are almost always invalid according to the cgo pointer
+sharing rules.
+Specifically, it warns about attempts to pass a Go chan, map, func,
+or slice to C, either directly, or via a pointer, array, or struct.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "cgocall",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.CallExpr)(nil),
+       }
+       inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool {
+               if !push {
+                       return true
+               }
+               call, name := findCall(pass.Fset, stack)
+               if call == nil {
+                       return true // not a call we need to check
+               }
+
+               // A call to C.CBytes passes a pointer but is always safe.
+               if name == "CBytes" {
+                       return true
+               }
+
+               if false {
+                       fmt.Printf("%s: inner call to C.%s\n", pass.Fset.Position(n.Pos()), name)
+                       fmt.Printf("%s: outer call to C.%s\n", pass.Fset.Position(call.Lparen), name)
+               }
+
+               for _, arg := range call.Args {
+                       if !typeOKForCgoCall(cgoBaseType(pass.TypesInfo, arg), make(map[types.Type]bool)) {
+                               pass.Reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
+                               break
+                       }
+
+                       // Check for passing the address of a bad type.
+                       if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 &&
+                               isUnsafePointer(pass.TypesInfo, conv.Fun) {
+                               arg = conv.Args[0]
+                       }
+                       if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND {
+                               if !typeOKForCgoCall(cgoBaseType(pass.TypesInfo, u.X), make(map[types.Type]bool)) {
+                                       pass.Reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C")
+                                       break
+                               }
+                       }
+               }
+               return true
+       })
+       return nil, nil
+}
+
+// findCall returns the CallExpr that we need to check, which may not be
+// the same as the one we're currently visiting, due to code generation.
+// It also returns the name of the function, such as "f" for C.f(...).
+//
+// This checker was initially written in vet to inpect unprocessed cgo
+// source files using partial type information. However, Analyzers in
+// the new analysis API are presented with the type-checked, processed
+// Go ASTs resulting from cgo processing files, so we must choose
+// between:
+//
+// a) locating the cgo file (e.g. from //line directives)
+//    and working with that, or
+// b) working with the file generated by cgo.
+//
+// We cannot use (a) because it does not provide type information, which
+// the analyzer needs, and it is infeasible for the analyzer to run the
+// type checker on this file. Thus we choose (b), which is fragile,
+// because the checker may need to change each time the cgo processor
+// changes.
+//
+// Consider a cgo source file containing this header:
+//
+//      /* void f(void *x, *y); */
+//      import "C"
+//
+// The cgo tool expands a call such as:
+//
+//      C.f(x, y)
+//
+// to this:
+//
+// 1   func(param0, param1 unsafe.Pointer) {
+// 2           ... various checks on params ...
+// 3           (_Cfunc_f)(param0, param1)
+// 4   }(x, y)
+//
+// We first locate the _Cfunc_f call on line 3, then
+// walk up the stack of enclosing nodes until we find
+// the call on line 4.
+//
+func findCall(fset *token.FileSet, stack []ast.Node) (*ast.CallExpr, string) {
+       last := len(stack) - 1
+       call := stack[last].(*ast.CallExpr)
+       if id, ok := analysisutil.Unparen(call.Fun).(*ast.Ident); ok {
+               if name := strings.TrimPrefix(id.Name, "_Cfunc_"); name != id.Name {
+                       // Find the outer call with the arguments (x, y) we want to check.
+                       for i := last - 1; i >= 0; i-- {
+                               if outer, ok := stack[i].(*ast.CallExpr); ok {
+                                       return outer, name
+                               }
+                       }
+                       // This shouldn't happen.
+                       // Perhaps the code generator has changed?
+                       log.Printf("%s: can't find outer call for C.%s(...)",
+                               fset.Position(call.Lparen), name)
+               }
+       }
+       return nil, ""
+}
+
+// cgoBaseType tries to look through type conversions involving
+// unsafe.Pointer to find the real type. It converts:
+//   unsafe.Pointer(x) => x
+//   *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
+func cgoBaseType(info *types.Info, arg ast.Expr) types.Type {
+       switch arg := arg.(type) {
+       case *ast.CallExpr:
+               if len(arg.Args) == 1 && isUnsafePointer(info, arg.Fun) {
+                       return cgoBaseType(info, arg.Args[0])
+               }
+       case *ast.StarExpr:
+               call, ok := arg.X.(*ast.CallExpr)
+               if !ok || len(call.Args) != 1 {
+                       break
+               }
+               // Here arg is *f(v).
+               t := info.Types[call.Fun].Type
+               if t == nil {
+                       break
+               }
+               ptr, ok := t.Underlying().(*types.Pointer)
+               if !ok {
+                       break
+               }
+               // Here arg is *(*p)(v)
+               elem, ok := ptr.Elem().Underlying().(*types.Basic)
+               if !ok || elem.Kind() != types.UnsafePointer {
+                       break
+               }
+               // Here arg is *(*unsafe.Pointer)(v)
+               call, ok = call.Args[0].(*ast.CallExpr)
+               if !ok || len(call.Args) != 1 {
+                       break
+               }
+               // Here arg is *(*unsafe.Pointer)(f(v))
+               if !isUnsafePointer(info, call.Fun) {
+                       break
+               }
+               // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v))
+               u, ok := call.Args[0].(*ast.UnaryExpr)
+               if !ok || u.Op != token.AND {
+                       break
+               }
+               // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v))
+               return cgoBaseType(info, u.X)
+       }
+
+       return info.Types[arg].Type
+}
+
+// typeOKForCgoCall reports whether the type of arg is OK to pass to a
+// C function using cgo. This is not true for Go types with embedded
+// pointers. m is used to avoid infinite recursion on recursive types.
+func typeOKForCgoCall(t types.Type, m map[types.Type]bool) bool {
+       if t == nil || m[t] {
+               return true
+       }
+       m[t] = true
+       switch t := t.Underlying().(type) {
+       case *types.Chan, *types.Map, *types.Signature, *types.Slice:
+               return false
+       case *types.Pointer:
+               return typeOKForCgoCall(t.Elem(), m)
+       case *types.Array:
+               return typeOKForCgoCall(t.Elem(), m)
+       case *types.Struct:
+               for i := 0; i < t.NumFields(); i++ {
+                       if !typeOKForCgoCall(t.Field(i).Type(), m) {
+                               return false
+                       }
+               }
+       }
+       return true
+}
+
+func isUnsafePointer(info *types.Info, e ast.Expr) bool {
+       t := info.Types[e].Type
+       return t != nil && t.Underlying() == types.Typ[types.UnsafePointer]
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
new file mode 100644 (file)
index 0000000..b7cfe8a
--- /dev/null
@@ -0,0 +1,108 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package composite defines an Analyzer that checks for unkeyed
+// composite literals.
+package composite
+
+import (
+       "go/ast"
+       "go/types"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `checked for unkeyed composite literals
+
+This analyzer reports a diagnostic for composite literals of struct
+types imported from another package that do not use the field-keyed
+syntax. Such literals are fragile because the addition of a new field
+(even if unexported) to the struct will cause compilation to fail.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "composites",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+var whitelist = true
+
+func init() {
+       Analyzer.Flags.BoolVar(&whitelist, "whitelist", whitelist, "use composite white list; for testing only")
+}
+
+// runUnkeyedLiteral checks if a composite literal is a struct literal with
+// unkeyed fields.
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.CompositeLit)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               cl := n.(*ast.CompositeLit)
+
+               typ := pass.TypesInfo.Types[cl].Type
+               if typ == nil {
+                       // cannot determine composite literals' type, skip it
+                       return
+               }
+               typeName := typ.String()
+               if whitelist && unkeyedLiteral[typeName] {
+                       // skip whitelisted types
+                       return
+               }
+               under := typ.Underlying()
+               for {
+                       ptr, ok := under.(*types.Pointer)
+                       if !ok {
+                               break
+                       }
+                       under = ptr.Elem().Underlying()
+               }
+               if _, ok := under.(*types.Struct); !ok {
+                       // skip non-struct composite literals
+                       return
+               }
+               if isLocalType(pass, typ) {
+                       // allow unkeyed locally defined composite literal
+                       return
+               }
+
+               // check if the CompositeLit contains an unkeyed field
+               allKeyValue := true
+               for _, e := range cl.Elts {
+                       if _, ok := e.(*ast.KeyValueExpr); !ok {
+                               allKeyValue = false
+                               break
+                       }
+               }
+               if allKeyValue {
+                       // all the composite literal fields are keyed
+                       return
+               }
+
+               pass.Reportf(cl.Pos(), "%s composite literal uses unkeyed fields", typeName)
+       })
+       return nil, nil
+}
+
+func isLocalType(pass *analysis.Pass, typ types.Type) bool {
+       switch x := typ.(type) {
+       case *types.Struct:
+               // struct literals are local types
+               return true
+       case *types.Pointer:
+               return isLocalType(pass, x.Elem())
+       case *types.Named:
+               // names in package foo are local to foo_test too
+               return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test")
+       }
+       return false
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go
new file mode 100644 (file)
index 0000000..ab609f2
--- /dev/null
@@ -0,0 +1,33 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package composite
+
+// unkeyedLiteral is a white list of types in the standard packages
+// that are used with unkeyed literals we deem to be acceptable.
+var unkeyedLiteral = map[string]bool{
+       // These image and image/color struct types are frozen. We will never add fields to them.
+       "image/color.Alpha16": true,
+       "image/color.Alpha":   true,
+       "image/color.CMYK":    true,
+       "image/color.Gray16":  true,
+       "image/color.Gray":    true,
+       "image/color.NRGBA64": true,
+       "image/color.NRGBA":   true,
+       "image/color.NYCbCrA": true,
+       "image/color.RGBA64":  true,
+       "image/color.RGBA":    true,
+       "image/color.YCbCr":   true,
+       "image.Point":         true,
+       "image.Rectangle":     true,
+       "image.Uniform":       true,
+
+       "unicode.Range16": true,
+
+       // These three structs are used in generated test main files,
+       // but the generator can be trusted.
+       "testing.InternalBenchmark": true,
+       "testing.InternalExample":   true,
+       "testing.InternalTest":      true,
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go
new file mode 100644 (file)
index 0000000..067aed5
--- /dev/null
@@ -0,0 +1,300 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package copylock defines an Analyzer that checks for locks
+// erroneously passed by value.
+package copylock
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for locks erroneously passed by value
+
+Inadvertently copying a value containing a lock, such as sync.Mutex or
+sync.WaitGroup, may cause both copies to malfunction. Generally such
+values should be referred to through a pointer.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "copylocks",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.AssignStmt)(nil),
+               (*ast.CallExpr)(nil),
+               (*ast.CompositeLit)(nil),
+               (*ast.FuncDecl)(nil),
+               (*ast.FuncLit)(nil),
+               (*ast.GenDecl)(nil),
+               (*ast.RangeStmt)(nil),
+               (*ast.ReturnStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(node ast.Node) {
+               switch node := node.(type) {
+               case *ast.RangeStmt:
+                       checkCopyLocksRange(pass, node)
+               case *ast.FuncDecl:
+                       checkCopyLocksFunc(pass, node.Name.Name, node.Recv, node.Type)
+               case *ast.FuncLit:
+                       checkCopyLocksFunc(pass, "func", nil, node.Type)
+               case *ast.CallExpr:
+                       checkCopyLocksCallExpr(pass, node)
+               case *ast.AssignStmt:
+                       checkCopyLocksAssign(pass, node)
+               case *ast.GenDecl:
+                       checkCopyLocksGenDecl(pass, node)
+               case *ast.CompositeLit:
+                       checkCopyLocksCompositeLit(pass, node)
+               case *ast.ReturnStmt:
+                       checkCopyLocksReturnStmt(pass, node)
+               }
+       })
+       return nil, nil
+}
+
+// checkCopyLocksAssign checks whether an assignment
+// copies a lock.
+func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) {
+       for i, x := range as.Rhs {
+               if path := lockPathRhs(pass, x); path != nil {
+                       pass.Reportf(x.Pos(), "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path)
+               }
+       }
+}
+
+// checkCopyLocksGenDecl checks whether lock is copied
+// in variable declaration.
+func checkCopyLocksGenDecl(pass *analysis.Pass, gd *ast.GenDecl) {
+       if gd.Tok != token.VAR {
+               return
+       }
+       for _, spec := range gd.Specs {
+               valueSpec := spec.(*ast.ValueSpec)
+               for i, x := range valueSpec.Values {
+                       if path := lockPathRhs(pass, x); path != nil {
+                               pass.Reportf(x.Pos(), "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path)
+                       }
+               }
+       }
+}
+
+// checkCopyLocksCompositeLit detects lock copy inside a composite literal
+func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) {
+       for _, x := range cl.Elts {
+               if node, ok := x.(*ast.KeyValueExpr); ok {
+                       x = node.Value
+               }
+               if path := lockPathRhs(pass, x); path != nil {
+                       pass.Reportf(x.Pos(), "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path)
+               }
+       }
+}
+
+// checkCopyLocksReturnStmt detects lock copy in return statement
+func checkCopyLocksReturnStmt(pass *analysis.Pass, rs *ast.ReturnStmt) {
+       for _, x := range rs.Results {
+               if path := lockPathRhs(pass, x); path != nil {
+                       pass.Reportf(x.Pos(), "return copies lock value: %v", path)
+               }
+       }
+}
+
+// checkCopyLocksCallExpr detects lock copy in the arguments to a function call
+func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) {
+       var id *ast.Ident
+       switch fun := ce.Fun.(type) {
+       case *ast.Ident:
+               id = fun
+       case *ast.SelectorExpr:
+               id = fun.Sel
+       }
+       if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok {
+               switch fun.Name() {
+               case "new", "len", "cap", "Sizeof":
+                       return
+               }
+       }
+       for _, x := range ce.Args {
+               if path := lockPathRhs(pass, x); path != nil {
+                       pass.Reportf(x.Pos(), "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path)
+               }
+       }
+}
+
+// checkCopyLocksFunc checks whether a function might
+// inadvertently copy a lock, by checking whether
+// its receiver, parameters, or return values
+// are locks.
+func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) {
+       if recv != nil && len(recv.List) > 0 {
+               expr := recv.List[0].Type
+               if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
+                       pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
+               }
+       }
+
+       if typ.Params != nil {
+               for _, field := range typ.Params.List {
+                       expr := field.Type
+                       if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil {
+                               pass.Reportf(expr.Pos(), "%s passes lock by value: %v", name, path)
+                       }
+               }
+       }
+
+       // Don't check typ.Results. If T has a Lock field it's OK to write
+       //     return T{}
+       // because that is returning the zero value. Leave result checking
+       // to the return statement.
+}
+
+// checkCopyLocksRange checks whether a range statement
+// might inadvertently copy a lock by checking whether
+// any of the range variables are locks.
+func checkCopyLocksRange(pass *analysis.Pass, r *ast.RangeStmt) {
+       checkCopyLocksRangeVar(pass, r.Tok, r.Key)
+       checkCopyLocksRangeVar(pass, r.Tok, r.Value)
+}
+
+func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) {
+       if e == nil {
+               return
+       }
+       id, isId := e.(*ast.Ident)
+       if isId && id.Name == "_" {
+               return
+       }
+
+       var typ types.Type
+       if rtok == token.DEFINE {
+               if !isId {
+                       return
+               }
+               obj := pass.TypesInfo.Defs[id]
+               if obj == nil {
+                       return
+               }
+               typ = obj.Type()
+       } else {
+               typ = pass.TypesInfo.Types[e].Type
+       }
+
+       if typ == nil {
+               return
+       }
+       if path := lockPath(pass.Pkg, typ); path != nil {
+               pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path)
+       }
+}
+
+type typePath []types.Type
+
+// String pretty-prints a typePath.
+func (path typePath) String() string {
+       n := len(path)
+       var buf bytes.Buffer
+       for i := range path {
+               if i > 0 {
+                       fmt.Fprint(&buf, " contains ")
+               }
+               // The human-readable path is in reverse order, outermost to innermost.
+               fmt.Fprint(&buf, path[n-i-1].String())
+       }
+       return buf.String()
+}
+
+func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath {
+       if _, ok := x.(*ast.CompositeLit); ok {
+               return nil
+       }
+       if _, ok := x.(*ast.CallExpr); ok {
+               // A call may return a zero value.
+               return nil
+       }
+       if star, ok := x.(*ast.StarExpr); ok {
+               if _, ok := star.X.(*ast.CallExpr); ok {
+                       // A call may return a pointer to a zero value.
+                       return nil
+               }
+       }
+       return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type)
+}
+
+// lockPath returns a typePath describing the location of a lock value
+// contained in typ. If there is no contained lock, it returns nil.
+func lockPath(tpkg *types.Package, typ types.Type) typePath {
+       if typ == nil {
+               return nil
+       }
+
+       for {
+               atyp, ok := typ.Underlying().(*types.Array)
+               if !ok {
+                       break
+               }
+               typ = atyp.Elem()
+       }
+
+       // We're only interested in the case in which the underlying
+       // type is a struct. (Interfaces and pointers are safe to copy.)
+       styp, ok := typ.Underlying().(*types.Struct)
+       if !ok {
+               return nil
+       }
+
+       // We're looking for cases in which a pointer to this type
+       // is a sync.Locker, but a value is not. This differentiates
+       // embedded interfaces from embedded values.
+       if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) {
+               return []types.Type{typ}
+       }
+
+       // In go1.10, sync.noCopy did not implement Locker.
+       // (The Unlock method was added only in CL 121876.)
+       // TODO(adonovan): remove workaround when we drop go1.10.
+       if named, ok := typ.(*types.Named); ok &&
+               named.Obj().Name() == "noCopy" &&
+               named.Obj().Pkg().Path() == "sync" {
+               return []types.Type{typ}
+       }
+
+       nfields := styp.NumFields()
+       for i := 0; i < nfields; i++ {
+               ftyp := styp.Field(i).Type()
+               subpath := lockPath(tpkg, ftyp)
+               if subpath != nil {
+                       return append(subpath, typ)
+               }
+       }
+
+       return nil
+}
+
+var lockerType *types.Interface
+
+// Construct a sync.Locker interface type.
+func init() {
+       nullary := types.NewSignature(nil, nil, nil, false) // func()
+       methods := []*types.Func{
+               types.NewFunc(token.NoPos, nil, "Lock", nullary),
+               types.NewFunc(token.NoPos, nil, "Unlock", nullary),
+       }
+       lockerType = types.NewInterface(methods, nil).Complete()
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go
new file mode 100644 (file)
index 0000000..75655c5
--- /dev/null
@@ -0,0 +1,225 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package ctrlflow is an analysis that provides a syntactic
+// control-flow graph (CFG) for the body of a function.
+// It records whether a function cannot return.
+// By itself, it does not report any diagnostics.
+package ctrlflow
+
+import (
+       "go/ast"
+       "go/types"
+       "log"
+       "reflect"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+       "golang.org/x/tools/go/cfg"
+       "golang.org/x/tools/go/types/typeutil"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name:       "ctrlflow",
+       Doc:        "build a control-flow graph",
+       Run:        run,
+       ResultType: reflect.TypeOf(new(CFGs)),
+       FactTypes:  []analysis.Fact{new(noReturn)},
+       Requires:   []*analysis.Analyzer{inspect.Analyzer},
+}
+
+// noReturn is a fact indicating that a function does not return.
+type noReturn struct{}
+
+func (*noReturn) AFact() {}
+
+func (*noReturn) String() string { return "noReturn" }
+
+// A CFGs holds the control-flow graphs
+// for all the functions of the current package.
+type CFGs struct {
+       defs      map[*ast.Ident]types.Object // from Pass.TypesInfo.Defs
+       funcDecls map[*types.Func]*declInfo
+       funcLits  map[*ast.FuncLit]*litInfo
+       pass      *analysis.Pass // transient; nil after construction
+}
+
+// CFGs has two maps: funcDecls for named functions and funcLits for
+// unnamed ones. Unlike funcLits, the funcDecls map is not keyed by its
+// syntax node, *ast.FuncDecl, because callMayReturn needs to do a
+// look-up by *types.Func, and you can get from an *ast.FuncDecl to a
+// *types.Func but not the other way.
+
+type declInfo struct {
+       decl     *ast.FuncDecl
+       cfg      *cfg.CFG // iff decl.Body != nil
+       started  bool     // to break cycles
+       noReturn bool
+}
+
+type litInfo struct {
+       cfg      *cfg.CFG
+       noReturn bool
+}
+
+// FuncDecl returns the control-flow graph for a named function.
+// It returns nil if decl.Body==nil.
+func (c *CFGs) FuncDecl(decl *ast.FuncDecl) *cfg.CFG {
+       if decl.Body == nil {
+               return nil
+       }
+       fn := c.defs[decl.Name].(*types.Func)
+       return c.funcDecls[fn].cfg
+}
+
+// FuncLit returns the control-flow graph for a literal function.
+func (c *CFGs) FuncLit(lit *ast.FuncLit) *cfg.CFG {
+       return c.funcLits[lit].cfg
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       // Because CFG construction consumes and produces noReturn
+       // facts, CFGs for exported FuncDecls must be built before 'run'
+       // returns; we cannot construct them lazily.
+       // (We could build CFGs for FuncLits lazily,
+       // but the benefit is marginal.)
+
+       // Pass 1. Map types.Funcs to ast.FuncDecls in this package.
+       funcDecls := make(map[*types.Func]*declInfo) // functions and methods
+       funcLits := make(map[*ast.FuncLit]*litInfo)
+
+       var decls []*types.Func // keys(funcDecls), in order
+       var lits []*ast.FuncLit // keys(funcLits), in order
+
+       nodeFilter := []ast.Node{
+               (*ast.FuncDecl)(nil),
+               (*ast.FuncLit)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               switch n := n.(type) {
+               case *ast.FuncDecl:
+                       fn := pass.TypesInfo.Defs[n.Name].(*types.Func)
+                       funcDecls[fn] = &declInfo{decl: n}
+                       decls = append(decls, fn)
+
+               case *ast.FuncLit:
+                       funcLits[n] = new(litInfo)
+                       lits = append(lits, n)
+               }
+       })
+
+       c := &CFGs{
+               defs:      pass.TypesInfo.Defs,
+               funcDecls: funcDecls,
+               funcLits:  funcLits,
+               pass:      pass,
+       }
+
+       // Pass 2. Build CFGs.
+
+       // Build CFGs for named functions.
+       // Cycles in the static call graph are broken
+       // arbitrarily but deterministically.
+       // We create noReturn facts as discovered.
+       for _, fn := range decls {
+               c.buildDecl(fn, funcDecls[fn])
+       }
+
+       // Build CFGs for literal functions.
+       // These aren't relevant to facts (since they aren't named)
+       // but are required for the CFGs.FuncLit API.
+       for _, lit := range lits {
+               li := funcLits[lit]
+               if li.cfg == nil {
+                       li.cfg = cfg.New(lit.Body, c.callMayReturn)
+                       if !hasReachableReturn(li.cfg) {
+                               li.noReturn = true
+                       }
+               }
+       }
+
+       // All CFGs are now built.
+       c.pass = nil
+
+       return c, nil
+}
+
+// di.cfg may be nil on return.
+func (c *CFGs) buildDecl(fn *types.Func, di *declInfo) {
+       // buildDecl may call itself recursively for the same function,
+       // because cfg.New is passed the callMayReturn method, which
+       // builds the CFG of the callee, leading to recursion.
+       // The buildDecl call tree thus resembles the static call graph.
+       // We mark each node when we start working on it to break cycles.
+
+       if !di.started { // break cycle
+               di.started = true
+
+               if isIntrinsicNoReturn(fn) {
+                       di.noReturn = true
+               }
+               if di.decl.Body != nil {
+                       di.cfg = cfg.New(di.decl.Body, c.callMayReturn)
+                       if !hasReachableReturn(di.cfg) {
+                               di.noReturn = true
+                       }
+               }
+               if di.noReturn {
+                       c.pass.ExportObjectFact(fn, new(noReturn))
+               }
+
+               // debugging
+               if false {
+                       log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), di.noReturn)
+               }
+       }
+}
+
+// callMayReturn reports whether the called function may return.
+// It is passed to the CFG builder.
+func (c *CFGs) callMayReturn(call *ast.CallExpr) (r bool) {
+       if id, ok := call.Fun.(*ast.Ident); ok && c.pass.TypesInfo.Uses[id] == panicBuiltin {
+               return false // panic never returns
+       }
+
+       // Is this a static call?
+       fn := typeutil.StaticCallee(c.pass.TypesInfo, call)
+       if fn == nil {
+               return true // callee not statically known; be conservative
+       }
+
+       // Function or method declared in this package?
+       if di, ok := c.funcDecls[fn]; ok {
+               c.buildDecl(fn, di)
+               return !di.noReturn
+       }
+
+       // Not declared in this package.
+       // Is there a fact from another package?
+       return !c.pass.ImportObjectFact(fn, new(noReturn))
+}
+
+var panicBuiltin = types.Universe.Lookup("panic").(*types.Builtin)
+
+func hasReachableReturn(g *cfg.CFG) bool {
+       for _, b := range g.Blocks {
+               if b.Live && b.Return() != nil {
+                       return true
+               }
+       }
+       return false
+}
+
+// isIntrinsicNoReturn reports whether a function intrinsically never
+// returns because it stops execution of the calling thread.
+// It is the base case in the recursion.
+func isIntrinsicNoReturn(fn *types.Func) bool {
+       // Add functions here as the need arises, but don't allocate memory.
+       path, name := fn.Pkg().Path(), fn.Name()
+       return path == "syscall" && (name == "Exit" || name == "ExitProcess" || name == "ExitThread") ||
+               path == "runtime" && name == "Goexit"
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
new file mode 100644 (file)
index 0000000..0cf21b8
--- /dev/null
@@ -0,0 +1,177 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package httpresponse defines an Analyzer that checks for mistakes
+// using HTTP responses.
+package httpresponse
+
+import (
+       "go/ast"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for mistakes using HTTP responses
+
+A common mistake when using the net/http package is to defer a function
+call to close the http.Response Body before checking the error that
+determines whether the response is valid:
+
+       resp, err := http.Head(url)
+       defer resp.Body.Close()
+       if err != nil {
+               log.Fatal(err)
+       }
+       // (defer statement belongs here)
+
+This checker helps uncover latent nil dereference bugs by reporting a
+diagnostic for such mistakes.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "httpresponse",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       // Fast path: if the package doesn't import net/http,
+       // skip the traversal.
+       if !imports(pass.Pkg, "net/http") {
+               return nil, nil
+       }
+
+       nodeFilter := []ast.Node{
+               (*ast.CallExpr)(nil),
+       }
+       inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool {
+               if !push {
+                       return true
+               }
+               call := n.(*ast.CallExpr)
+               if !isHTTPFuncOrMethodOnClient(pass.TypesInfo, call) {
+                       return true // the function call is not related to this check.
+               }
+
+               // Find the innermost containing block, and get the list
+               // of statements starting with the one containing call.
+               stmts := restOfBlock(stack)
+               if len(stmts) < 2 {
+                       return true // the call to the http function is the last statement of the block.
+               }
+
+               asg, ok := stmts[0].(*ast.AssignStmt)
+               if !ok {
+                       return true // the first statement is not assignment.
+               }
+               resp := rootIdent(asg.Lhs[0])
+               if resp == nil {
+                       return true // could not find the http.Response in the assignment.
+               }
+
+               def, ok := stmts[1].(*ast.DeferStmt)
+               if !ok {
+                       return true // the following statement is not a defer.
+               }
+               root := rootIdent(def.Call.Fun)
+               if root == nil {
+                       return true // could not find the receiver of the defer call.
+               }
+
+               if resp.Obj == root.Obj {
+                       pass.Reportf(root.Pos(), "using %s before checking for errors", resp.Name)
+               }
+               return true
+       })
+       return nil, nil
+}
+
+// isHTTPFuncOrMethodOnClient checks whether the given call expression is on
+// either a function of the net/http package or a method of http.Client that
+// returns (*http.Response, error).
+func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool {
+       fun, _ := expr.Fun.(*ast.SelectorExpr)
+       sig, _ := info.Types[fun].Type.(*types.Signature)
+       if sig == nil {
+               return false // the call is not of the form x.f()
+       }
+
+       res := sig.Results()
+       if res.Len() != 2 {
+               return false // the function called does not return two values.
+       }
+       if ptr, ok := res.At(0).Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "net/http", "Response") {
+               return false // the first return type is not *http.Response.
+       }
+
+       errorType := types.Universe.Lookup("error").Type()
+       if !types.Identical(res.At(1).Type(), errorType) {
+               return false // the second return type is not error
+       }
+
+       typ := info.Types[fun.X].Type
+       if typ == nil {
+               id, ok := fun.X.(*ast.Ident)
+               return ok && id.Name == "http" // function in net/http package.
+       }
+
+       if isNamedType(typ, "net/http", "Client") {
+               return true // method on http.Client.
+       }
+       ptr, ok := typ.(*types.Pointer)
+       return ok && isNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client.
+}
+
+// restOfBlock, given a traversal stack, finds the innermost containing
+// block and returns the suffix of its statements starting with the
+// current node (the last element of stack).
+func restOfBlock(stack []ast.Node) []ast.Stmt {
+       for i := len(stack) - 1; i >= 0; i-- {
+               if b, ok := stack[i].(*ast.BlockStmt); ok {
+                       for j, v := range b.List {
+                               if v == stack[i+1] {
+                                       return b.List[j:]
+                               }
+                       }
+                       break
+               }
+       }
+       return nil
+}
+
+// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found.
+func rootIdent(n ast.Node) *ast.Ident {
+       switch n := n.(type) {
+       case *ast.SelectorExpr:
+               return rootIdent(n.X)
+       case *ast.Ident:
+               return n
+       default:
+               return nil
+       }
+}
+
+// isNamedType reports whether t is the named type path.name.
+func isNamedType(t types.Type, path, name string) bool {
+       n, ok := t.(*types.Named)
+       if !ok {
+               return false
+       }
+       obj := n.Obj()
+       return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path
+}
+
+func imports(pkg *types.Package, path string) bool {
+       for _, imp := range pkg.Imports() {
+               if imp.Path() == path {
+                       return true
+               }
+       }
+       return false
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
new file mode 100644 (file)
index 0000000..bd06549
--- /dev/null
@@ -0,0 +1,45 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package inspect defines an Analyzer that provides an AST inspector
+// (golang.org/x/tools/go/ast/inspect.Inspect) for the syntax trees of a
+// package. It is only a building block for other analyzers.
+//
+// Example of use in another analysis:
+//
+//     import "golang.org/x/tools/go/analysis/passes/inspect"
+//
+//     var Analyzer = &analysis.Analyzer{
+//             ...
+//             Requires:       reflect.TypeOf(new(inspect.Analyzer)),
+//     }
+//
+//     func run(pass *analysis.Pass) (interface{}, error) {
+//             inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+//             inspect.Preorder(nil, func(n ast.Node) {
+//                     ...
+//             })
+//             return nil
+//     }
+//
+package inspect
+
+import (
+       "reflect"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "inspect",
+       Doc:              "optimize AST traversal for later passes",
+       Run:              run,
+       RunDespiteErrors: true,
+       ResultType:       reflect.TypeOf(new(inspector.Inspector)),
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       return inspector.New(pass.Files), nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go
new file mode 100644 (file)
index 0000000..13a458d
--- /dev/null
@@ -0,0 +1,106 @@
+// Package analysisutil defines various helper functions
+// used by two or more packages beneath go/analysis.
+package analysisutil
+
+import (
+       "bytes"
+       "go/ast"
+       "go/printer"
+       "go/token"
+       "go/types"
+       "io/ioutil"
+)
+
+// Format returns a string representation of the expression.
+func Format(fset *token.FileSet, x ast.Expr) string {
+       var b bytes.Buffer
+       printer.Fprint(&b, fset, x)
+       return b.String()
+}
+
+// HasSideEffects reports whether evaluation of e has side effects.
+func HasSideEffects(info *types.Info, e ast.Expr) bool {
+       safe := true
+       ast.Inspect(e, func(node ast.Node) bool {
+               switch n := node.(type) {
+               case *ast.CallExpr:
+                       typVal := info.Types[n.Fun]
+                       switch {
+                       case typVal.IsType():
+                               // Type conversion, which is safe.
+                       case typVal.IsBuiltin():
+                               // Builtin func, conservatively assumed to not
+                               // be safe for now.
+                               safe = false
+                               return false
+                       default:
+                               // A non-builtin func or method call.
+                               // Conservatively assume that all of them have
+                               // side effects for now.
+                               safe = false
+                               return false
+                       }
+               case *ast.UnaryExpr:
+                       if n.Op == token.ARROW {
+                               safe = false
+                               return false
+                       }
+               }
+               return true
+       })
+       return !safe
+}
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+       for {
+               p, ok := e.(*ast.ParenExpr)
+               if !ok {
+                       return e
+               }
+               e = p.X
+       }
+}
+
+// ReadFile reads a file and adds it to the FileSet
+// so that we can report errors against it using lineStart.
+func ReadFile(fset *token.FileSet, filename string) ([]byte, *token.File, error) {
+       content, err := ioutil.ReadFile(filename)
+       if err != nil {
+               return nil, nil, err
+       }
+       tf := fset.AddFile(filename, -1, len(content))
+       tf.SetLinesForContent(content)
+       return content, tf, nil
+}
+
+// LineStart returns the position of the start of the specified line
+// within file f, or NoPos if there is no line of that number.
+func LineStart(f *token.File, line int) token.Pos {
+       // Use binary search to find the start offset of this line.
+       //
+       // TODO(adonovan): eventually replace this function with the
+       // simpler and more efficient (*go/token.File).LineStart, added
+       // in go1.12.
+
+       min := 0        // inclusive
+       max := f.Size() // exclusive
+       for {
+               offset := (min + max) / 2
+               pos := f.Pos(offset)
+               posn := f.Position(pos)
+               if posn.Line == line {
+                       return pos - (token.Pos(posn.Column) - 1)
+               }
+
+               if min+1 >= max {
+                       return token.NoPos
+               }
+
+               if posn.Line < line {
+                       min = offset
+               } else {
+                       max = offset
+               }
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go
new file mode 100644 (file)
index 0000000..da07140
--- /dev/null
@@ -0,0 +1,130 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package loopclosure defines an Analyzer that checks for references to
+// enclosing loop variables from within nested functions.
+package loopclosure
+
+import (
+       "go/ast"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+// TODO(adonovan): also report an error for the following structure,
+// which is often used to ensure that deferred calls do not accumulate
+// in a loop:
+//
+//     for i, x := range c {
+//             func() {
+//                     ...reference to i or x...
+//             }()
+//     }
+
+const Doc = `check references to loop variables from within nested functions
+
+This analyzer checks for references to loop variables from within a
+function literal inside the loop body. It checks only instances where
+the function literal is called in a defer or go statement that is the
+last statement in the loop body, as otherwise we would need whole
+program analysis.
+
+For example:
+
+       for i, v := range s {
+               go func() {
+                       println(i, v) // not what you might expect
+               }()
+       }
+
+See: https://golang.org/doc/go_faq.html#closures_and_goroutines`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "loopclosure",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.RangeStmt)(nil),
+               (*ast.ForStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               // Find the variables updated by the loop statement.
+               var vars []*ast.Ident
+               addVar := func(expr ast.Expr) {
+                       if id, ok := expr.(*ast.Ident); ok {
+                               vars = append(vars, id)
+                       }
+               }
+               var body *ast.BlockStmt
+               switch n := n.(type) {
+               case *ast.RangeStmt:
+                       body = n.Body
+                       addVar(n.Key)
+                       addVar(n.Value)
+               case *ast.ForStmt:
+                       body = n.Body
+                       switch post := n.Post.(type) {
+                       case *ast.AssignStmt:
+                               // e.g. for p = head; p != nil; p = p.next
+                               for _, lhs := range post.Lhs {
+                                       addVar(lhs)
+                               }
+                       case *ast.IncDecStmt:
+                               // e.g. for i := 0; i < n; i++
+                               addVar(post.X)
+                       }
+               }
+               if vars == nil {
+                       return
+               }
+
+               // Inspect a go or defer statement
+               // if it's the last one in the loop body.
+               // (We give up if there are following statements,
+               // because it's hard to prove go isn't followed by wait,
+               // or defer by return.)
+               if len(body.List) == 0 {
+                       return
+               }
+               var last *ast.CallExpr
+               switch s := body.List[len(body.List)-1].(type) {
+               case *ast.GoStmt:
+                       last = s.Call
+               case *ast.DeferStmt:
+                       last = s.Call
+               default:
+                       return
+               }
+               lit, ok := last.Fun.(*ast.FuncLit)
+               if !ok {
+                       return
+               }
+               ast.Inspect(lit.Body, func(n ast.Node) bool {
+                       id, ok := n.(*ast.Ident)
+                       if !ok || id.Obj == nil {
+                               return true
+                       }
+                       if pass.TypesInfo.Types[id].Type == nil {
+                               // Not referring to a variable (e.g. struct field name)
+                               return true
+                       }
+                       for _, v := range vars {
+                               if v.Obj == id.Obj {
+                                       pass.Reportf(id.Pos(), "loop variable %s captured by func literal",
+                                               id.Name)
+                               }
+                       }
+                       return true
+               })
+       })
+       return nil, nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
new file mode 100644 (file)
index 0000000..fcf9f55
--- /dev/null
@@ -0,0 +1,304 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package lostcancel defines an Analyzer that checks for failure to
+// call a context cancelation function.
+package lostcancel
+
+import (
+       "fmt"
+       "go/ast"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/ctrlflow"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+       "golang.org/x/tools/go/cfg"
+)
+
+const Doc = `check cancel func returned by context.WithCancel is called
+
+The cancelation function returned by context.WithCancel, WithTimeout,
+and WithDeadline must be called or the new context will remain live
+until its parent context is cancelled.
+(The background context is never cancelled.)`
+
+var Analyzer = &analysis.Analyzer{
+       Name: "lostcancel",
+       Doc:  Doc,
+       Run:  run,
+       Requires: []*analysis.Analyzer{
+               inspect.Analyzer,
+               ctrlflow.Analyzer,
+       },
+}
+
+const debug = false
+
+var contextPackage = "context"
+
+// checkLostCancel reports a failure to the call the cancel function
+// returned by context.WithCancel, either because the variable was
+// assigned to the blank identifier, or because there exists a
+// control-flow path from the call to a return statement and that path
+// does not "use" the cancel function.  Any reference to the variable
+// counts as a use, even within a nested function literal.
+//
+// checkLostCancel analyzes a single named or literal function.
+func run(pass *analysis.Pass) (interface{}, error) {
+       // Fast path: bypass check if file doesn't use context.WithCancel.
+       if !hasImport(pass.Pkg, contextPackage) {
+               return nil, nil
+       }
+
+       // Call runFunc for each Func{Decl,Lit}.
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+       nodeTypes := []ast.Node{
+               (*ast.FuncLit)(nil),
+               (*ast.FuncDecl)(nil),
+       }
+       inspect.Preorder(nodeTypes, func(n ast.Node) {
+               runFunc(pass, n)
+       })
+       return nil, nil
+}
+
+func runFunc(pass *analysis.Pass, node ast.Node) {
+       // Maps each cancel variable to its defining ValueSpec/AssignStmt.
+       cancelvars := make(map[*types.Var]ast.Node)
+
+       // TODO(adonovan): opt: refactor to make a single pass
+       // over the AST using inspect.WithStack and node types
+       // {FuncDecl,FuncLit,CallExpr,SelectorExpr}.
+
+       // Find the set of cancel vars to analyze.
+       stack := make([]ast.Node, 0, 32)
+       ast.Inspect(node, func(n ast.Node) bool {
+               switch n.(type) {
+               case *ast.FuncLit:
+                       if len(stack) > 0 {
+                               return false // don't stray into nested functions
+                       }
+               case nil:
+                       stack = stack[:len(stack)-1] // pop
+                       return true
+               }
+               stack = append(stack, n) // push
+
+               // Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]:
+               //
+               //   ctx, cancel    := context.WithCancel(...)
+               //   ctx, cancel     = context.WithCancel(...)
+               //   var ctx, cancel = context.WithCancel(...)
+               //
+               if isContextWithCancel(pass.TypesInfo, n) && isCall(stack[len(stack)-2]) {
+                       var id *ast.Ident // id of cancel var
+                       stmt := stack[len(stack)-3]
+                       switch stmt := stmt.(type) {
+                       case *ast.ValueSpec:
+                               if len(stmt.Names) > 1 {
+                                       id = stmt.Names[1]
+                               }
+                       case *ast.AssignStmt:
+                               if len(stmt.Lhs) > 1 {
+                                       id, _ = stmt.Lhs[1].(*ast.Ident)
+                               }
+                       }
+                       if id != nil {
+                               if id.Name == "_" {
+                                       pass.Reportf(id.Pos(),
+                                               "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak",
+                                               n.(*ast.SelectorExpr).Sel.Name)
+                               } else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok {
+                                       cancelvars[v] = stmt
+                               } else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok {
+                                       cancelvars[v] = stmt
+                               }
+                       }
+               }
+
+               return true
+       })
+
+       if len(cancelvars) == 0 {
+               return // no need to inspect CFG
+       }
+
+       // Obtain the CFG.
+       cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
+       var g *cfg.CFG
+       var sig *types.Signature
+       switch node := node.(type) {
+       case *ast.FuncDecl:
+               g = cfgs.FuncDecl(node)
+               sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature)
+       case *ast.FuncLit:
+               g = cfgs.FuncLit(node)
+               sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature)
+       }
+       if sig == nil {
+               return // missing type information
+       }
+
+       // Print CFG.
+       if debug {
+               fmt.Println(g.Format(pass.Fset))
+       }
+
+       // Examine the CFG for each variable in turn.
+       // (It would be more efficient to analyze all cancelvars in a
+       // single pass over the AST, but seldom is there more than one.)
+       for v, stmt := range cancelvars {
+               if ret := lostCancelPath(pass, g, v, stmt, sig); ret != nil {
+                       lineno := pass.Fset.Position(stmt.Pos()).Line
+                       pass.Reportf(stmt.Pos(), "the %s function is not used on all paths (possible context leak)", v.Name())
+                       pass.Reportf(ret.Pos(), "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno)
+               }
+       }
+}
+
+func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok }
+
+func hasImport(pkg *types.Package, path string) bool {
+       for _, imp := range pkg.Imports() {
+               if imp.Path() == path {
+                       return true
+               }
+       }
+       return false
+}
+
+// isContextWithCancel reports whether n is one of the qualified identifiers
+// context.With{Cancel,Timeout,Deadline}.
+func isContextWithCancel(info *types.Info, n ast.Node) bool {
+       if sel, ok := n.(*ast.SelectorExpr); ok {
+               switch sel.Sel.Name {
+               case "WithCancel", "WithTimeout", "WithDeadline":
+                       if x, ok := sel.X.(*ast.Ident); ok {
+                               if pkgname, ok := info.Uses[x].(*types.PkgName); ok {
+                                       return pkgname.Imported().Path() == contextPackage
+                               }
+                               // Import failed, so we can't check package path.
+                               // Just check the local package name (heuristic).
+                               return x.Name == "context"
+                       }
+               }
+       }
+       return false
+}
+
+// lostCancelPath finds a path through the CFG, from stmt (which defines
+// the 'cancel' variable v) to a return statement, that doesn't "use" v.
+// If it finds one, it returns the return statement (which may be synthetic).
+// sig is the function's type, if known.
+func lostCancelPath(pass *analysis.Pass, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt {
+       vIsNamedResult := sig != nil && tupleContains(sig.Results(), v)
+
+       // uses reports whether stmts contain a "use" of variable v.
+       uses := func(pass *analysis.Pass, v *types.Var, stmts []ast.Node) bool {
+               found := false
+               for _, stmt := range stmts {
+                       ast.Inspect(stmt, func(n ast.Node) bool {
+                               switch n := n.(type) {
+                               case *ast.Ident:
+                                       if pass.TypesInfo.Uses[n] == v {
+                                               found = true
+                                       }
+                               case *ast.ReturnStmt:
+                                       // A naked return statement counts as a use
+                                       // of the named result variables.
+                                       if n.Results == nil && vIsNamedResult {
+                                               found = true
+                                       }
+                               }
+                               return !found
+                       })
+               }
+               return found
+       }
+
+       // blockUses computes "uses" for each block, caching the result.
+       memo := make(map[*cfg.Block]bool)
+       blockUses := func(pass *analysis.Pass, v *types.Var, b *cfg.Block) bool {
+               res, ok := memo[b]
+               if !ok {
+                       res = uses(pass, v, b.Nodes)
+                       memo[b] = res
+               }
+               return res
+       }
+
+       // Find the var's defining block in the CFG,
+       // plus the rest of the statements of that block.
+       var defblock *cfg.Block
+       var rest []ast.Node
+outer:
+       for _, b := range g.Blocks {
+               for i, n := range b.Nodes {
+                       if n == stmt {
+                               defblock = b
+                               rest = b.Nodes[i+1:]
+                               break outer
+                       }
+               }
+       }
+       if defblock == nil {
+               panic("internal error: can't find defining block for cancel var")
+       }
+
+       // Is v "used" in the remainder of its defining block?
+       if uses(pass, v, rest) {
+               return nil
+       }
+
+       // Does the defining block return without using v?
+       if ret := defblock.Return(); ret != nil {
+               return ret
+       }
+
+       // Search the CFG depth-first for a path, from defblock to a
+       // return block, in which v is never "used".
+       seen := make(map[*cfg.Block]bool)
+       var search func(blocks []*cfg.Block) *ast.ReturnStmt
+       search = func(blocks []*cfg.Block) *ast.ReturnStmt {
+               for _, b := range blocks {
+                       if !seen[b] {
+                               seen[b] = true
+
+                               // Prune the search if the block uses v.
+                               if blockUses(pass, v, b) {
+                                       continue
+                               }
+
+                               // Found path to return statement?
+                               if ret := b.Return(); ret != nil {
+                                       if debug {
+                                               fmt.Printf("found path to return in block %s\n", b)
+                                       }
+                                       return ret // found
+                               }
+
+                               // Recur
+                               if ret := search(b.Succs); ret != nil {
+                                       if debug {
+                                               fmt.Printf(" from block %s\n", b)
+                                       }
+                                       return ret
+                               }
+                       }
+               }
+               return nil
+       }
+       return search(defblock.Succs)
+}
+
+func tupleContains(tuple *types.Tuple, v *types.Var) bool {
+       for i := 0; i < tuple.Len(); i++ {
+               if tuple.At(i) == v {
+                       return true
+               }
+       }
+       return false
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go
new file mode 100644 (file)
index 0000000..9c2d4df
--- /dev/null
@@ -0,0 +1,74 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package nilfunc defines an Analyzer that checks for useless
+// comparisons against nil.
+package nilfunc
+
+import (
+       "go/ast"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for useless comparisons between functions and nil
+
+A useless comparison is one like f == nil as opposed to f() == nil.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "nilfunc",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.BinaryExpr)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               e := n.(*ast.BinaryExpr)
+
+               // Only want == or != comparisons.
+               if e.Op != token.EQL && e.Op != token.NEQ {
+                       return
+               }
+
+               // Only want comparisons with a nil identifier on one side.
+               var e2 ast.Expr
+               switch {
+               case pass.TypesInfo.Types[e.X].IsNil():
+                       e2 = e.Y
+               case pass.TypesInfo.Types[e.Y].IsNil():
+                       e2 = e.X
+               default:
+                       return
+               }
+
+               // Only want identifiers or selector expressions.
+               var obj types.Object
+               switch v := e2.(type) {
+               case *ast.Ident:
+                       obj = pass.TypesInfo.Uses[v]
+               case *ast.SelectorExpr:
+                       obj = pass.TypesInfo.Uses[v.Sel]
+               default:
+                       return
+               }
+
+               // Only want functions.
+               if _, ok := obj.(*types.Func); !ok {
+                       return
+               }
+
+               pass.Reportf(e.Pos(), "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ)
+       })
+       return nil, nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go
new file mode 100644 (file)
index 0000000..e053086
--- /dev/null
@@ -0,0 +1,127 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// The pkgfact package is a demonstration and test of the package fact
+// mechanism.
+//
+// The output of the pkgfact analysis is a set of key/values pairs
+// gathered from the analyzed package and its imported dependencies.
+// Each key/value pair comes from a top-level constant declaration
+// whose name starts and ends with "_".  For example:
+//
+//      package p
+//
+//     const _greeting_  = "hello"
+//     const _audience_  = "world"
+//
+// the pkgfact analysis output for package p would be:
+//
+//   {"greeting": "hello", "audience": "world"}.
+//
+// In addition, the analysis reports a diagnostic at each import
+// showing which key/value pairs it contributes.
+package pkgfact
+
+import (
+       "fmt"
+       "go/ast"
+       "go/token"
+       "go/types"
+       "reflect"
+       "sort"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name:       "pkgfact",
+       Doc:        "gather name/value pairs from constant declarations",
+       Run:        run,
+       FactTypes:  []analysis.Fact{new(pairsFact)},
+       ResultType: reflect.TypeOf(map[string]string{}),
+}
+
+// A pairsFact is a package-level fact that records
+// an set of key=value strings accumulated from constant
+// declarations in this package and its dependencies.
+// Elements are ordered by keys, which are unique.
+type pairsFact []string
+
+func (f *pairsFact) AFact()         {}
+func (f *pairsFact) String() string { return "pairs(" + strings.Join(*f, ", ") + ")" }
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       result := make(map[string]string)
+
+       // At each import, print the fact from the imported
+       // package and accumulate its information into the result.
+       // (Warning: accumulation leads to quadratic growth of work.)
+       doImport := func(spec *ast.ImportSpec) {
+               pkg := imported(pass.TypesInfo, spec)
+               var fact pairsFact
+               if pass.ImportPackageFact(pkg, &fact) {
+                       for _, pair := range fact {
+                               eq := strings.IndexByte(pair, '=')
+                               result[pair[:eq]] = pair[1+eq:]
+                       }
+                       pass.Reportf(spec.Pos(), "%s", strings.Join(fact, " "))
+               }
+       }
+
+       // At each "const _name_ = value", add a fact into env.
+       doConst := func(spec *ast.ValueSpec) {
+               if len(spec.Names) == len(spec.Values) {
+                       for i := range spec.Names {
+                               name := spec.Names[i].Name
+                               if strings.HasPrefix(name, "_") && strings.HasSuffix(name, "_") {
+
+                                       if key := strings.Trim(name, "_"); key != "" {
+                                               value := pass.TypesInfo.Types[spec.Values[i]].Value.String()
+                                               result[key] = value
+                                       }
+                               }
+                       }
+               }
+       }
+
+       for _, f := range pass.Files {
+               for _, decl := range f.Decls {
+                       if decl, ok := decl.(*ast.GenDecl); ok {
+                               for _, spec := range decl.Specs {
+                                       switch decl.Tok {
+                                       case token.IMPORT:
+                                               doImport(spec.(*ast.ImportSpec))
+                                       case token.CONST:
+                                               doConst(spec.(*ast.ValueSpec))
+                                       }
+                               }
+                       }
+               }
+       }
+
+       // Sort/deduplicate the result and save it as a package fact.
+       keys := make([]string, 0, len(result))
+       for key := range result {
+               keys = append(keys, key)
+       }
+       sort.Strings(keys)
+       var fact pairsFact
+       for _, key := range keys {
+               fact = append(fact, fmt.Sprintf("%s=%s", key, result[key]))
+       }
+       if len(fact) > 0 {
+               pass.ExportPackageFact(&fact)
+       }
+
+       return result, nil
+}
+
+func imported(info *types.Info, spec *ast.ImportSpec) *types.Package {
+       obj, ok := info.Implicits[spec]
+       if !ok {
+               obj = info.Defs[spec.Name] // renaming import
+       }
+       return obj.(*types.PkgName).Imported()
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
new file mode 100644 (file)
index 0000000..23f634f
--- /dev/null
@@ -0,0 +1,964 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file contains the printf-checker.
+
+package printf
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "go/constant"
+       "go/token"
+       "go/types"
+       "regexp"
+       "sort"
+       "strconv"
+       "strings"
+       "unicode/utf8"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+       "golang.org/x/tools/go/types/typeutil"
+)
+
+func init() {
+       Analyzer.Flags.Var(isPrint, "funcs", "comma-separated list of print function names to check")
+}
+
+var Analyzer = &analysis.Analyzer{
+       Name:      "printf",
+       Doc:       "check printf-like invocations",
+       Requires:  []*analysis.Analyzer{inspect.Analyzer},
+       Run:       run,
+       FactTypes: []analysis.Fact{new(isWrapper)},
+}
+
+const doc = `check consistency of Printf format strings and arguments
+
+The check applies to known functions (for example, those in package fmt)
+as well as any detected wrappers of known functions.
+
+A function that wants to avail itself of printf checking but does not
+get found by this analyzer's heuristics (for example, due to use of
+dynamic calls) can insert a bogus call:
+
+       if false {
+               fmt.Sprintf(format, args...) // enable printf checking
+       }
+
+The -funcs flag specifies a comma-separated list of names of additional
+known formatting functions or methods. If the name contains a period,
+it must denote a specific function using one of the following forms:
+
+       dir/pkg.Function
+       dir/pkg.Type.Method
+       (*dir/pkg.Type).Method
+
+Otherwise the name is interpreted as a case-insensitive unqualified
+identifier such as "errorf". Either way, if a listed name ends in f, the
+function is assumed to be Printf-like, taking a format string before the
+argument list. Otherwise it is assumed to be Print-like, taking a list
+of arguments with no format string.
+`
+
+// isWrapper is a fact indicating that a function is a print or printf wrapper.
+type isWrapper struct{ Printf bool }
+
+func (f *isWrapper) AFact() {}
+
+func (f *isWrapper) String() string {
+       if f.Printf {
+               return "printfWrapper"
+       } else {
+               return "printWrapper"
+       }
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       findPrintfLike(pass)
+       checkCall(pass)
+       return nil, nil
+}
+
+type printfWrapper struct {
+       obj     *types.Func
+       fdecl   *ast.FuncDecl
+       format  *types.Var
+       args    *types.Var
+       callers []printfCaller
+       failed  bool // if true, not a printf wrapper
+}
+
+type printfCaller struct {
+       w    *printfWrapper
+       call *ast.CallExpr
+}
+
+// maybePrintfWrapper decides whether decl (a declared function) may be a wrapper
+// around a fmt.Printf or fmt.Print function. If so it returns a printfWrapper
+// function describing the declaration. Later processing will analyze the
+// graph of potential printf wrappers to pick out the ones that are true wrappers.
+// A function may be a Printf or Print wrapper if its last argument is ...interface{}.
+// If the next-to-last argument is a string, then this may be a Printf wrapper.
+// Otherwise it may be a Print wrapper.
+func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper {
+       // Look for functions with final argument type ...interface{}.
+       fdecl, ok := decl.(*ast.FuncDecl)
+       if !ok || fdecl.Body == nil {
+               return nil
+       }
+       fn := info.Defs[fdecl.Name].(*types.Func)
+
+       sig := fn.Type().(*types.Signature)
+       if !sig.Variadic() {
+               return nil // not variadic
+       }
+
+       params := sig.Params()
+       nparams := params.Len() // variadic => nonzero
+
+       args := params.At(nparams - 1)
+       iface, ok := args.Type().(*types.Slice).Elem().(*types.Interface)
+       if !ok || !iface.Empty() {
+               return nil // final (args) param is not ...interface{}
+       }
+
+       // Is second last param 'format string'?
+       var format *types.Var
+       if nparams >= 2 {
+               if p := params.At(nparams - 2); p.Type() == types.Typ[types.String] {
+                       format = p
+               }
+       }
+
+       return &printfWrapper{
+               obj:    fn,
+               fdecl:  fdecl,
+               format: format,
+               args:   args,
+       }
+}
+
+// findPrintfLike scans the entire package to find printf-like functions.
+func findPrintfLike(pass *analysis.Pass) (interface{}, error) {
+       // Gather potential wrappers and call graph between them.
+       byObj := make(map[*types.Func]*printfWrapper)
+       var wrappers []*printfWrapper
+       for _, file := range pass.Files {
+               for _, decl := range file.Decls {
+                       w := maybePrintfWrapper(pass.TypesInfo, decl)
+                       if w == nil {
+                               continue
+                       }
+                       byObj[w.obj] = w
+                       wrappers = append(wrappers, w)
+               }
+       }
+
+       // Walk the graph to figure out which are really printf wrappers.
+       for _, w := range wrappers {
+               // Scan function for calls that could be to other printf-like functions.
+               ast.Inspect(w.fdecl.Body, func(n ast.Node) bool {
+                       if w.failed {
+                               return false
+                       }
+
+                       // TODO: Relax these checks; issue 26555.
+                       if assign, ok := n.(*ast.AssignStmt); ok {
+                               for _, lhs := range assign.Lhs {
+                                       if match(pass.TypesInfo, lhs, w.format) ||
+                                               match(pass.TypesInfo, lhs, w.args) {
+                                               // Modifies the format
+                                               // string or args in
+                                               // some way, so not a
+                                               // simple wrapper.
+                                               w.failed = true
+                                               return false
+                                       }
+                               }
+                       }
+                       if un, ok := n.(*ast.UnaryExpr); ok && un.Op == token.AND {
+                               if match(pass.TypesInfo, un.X, w.format) ||
+                                       match(pass.TypesInfo, un.X, w.args) {
+                                       // Taking the address of the
+                                       // format string or args,
+                                       // so not a simple wrapper.
+                                       w.failed = true
+                                       return false
+                               }
+                       }
+
+                       call, ok := n.(*ast.CallExpr)
+                       if !ok || len(call.Args) == 0 || !match(pass.TypesInfo, call.Args[len(call.Args)-1], w.args) {
+                               return true
+                       }
+
+                       fn, kind := printfNameAndKind(pass, call)
+                       if kind != 0 {
+                               checkPrintfFwd(pass, w, call, kind)
+                               return true
+                       }
+
+                       // If the call is to another function in this package,
+                       // maybe we will find out it is printf-like later.
+                       // Remember this call for later checking.
+                       if fn != nil && fn.Pkg() == pass.Pkg && byObj[fn] != nil {
+                               callee := byObj[fn]
+                               callee.callers = append(callee.callers, printfCaller{w, call})
+                       }
+
+                       return true
+               })
+       }
+       return nil, nil
+}
+
+func match(info *types.Info, arg ast.Expr, param *types.Var) bool {
+       id, ok := arg.(*ast.Ident)
+       return ok && info.ObjectOf(id) == param
+}
+
+const (
+       kindPrintf = 1
+       kindPrint  = 2
+)
+
+// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly.
+// It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...).
+func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind int) {
+       matched := kind == kindPrint ||
+               kind == kindPrintf && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
+       if !matched {
+               return
+       }
+
+       if !call.Ellipsis.IsValid() {
+               typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature)
+               if !ok {
+                       return
+               }
+               if len(call.Args) > typ.Params().Len() {
+                       // If we're passing more arguments than what the
+                       // print/printf function can take, adding an ellipsis
+                       // would break the program. For example:
+                       //
+                       //   func foo(arg1 string, arg2 ...interface{} {
+                       //       fmt.Printf("%s %v", arg1, arg2)
+                       //   }
+                       return
+               }
+               desc := "printf"
+               if kind == kindPrint {
+                       desc = "print"
+               }
+               pass.Reportf(call.Pos(), "missing ... in args forwarded to %s-like function", desc)
+               return
+       }
+       fn := w.obj
+       var fact isWrapper
+       if !pass.ImportObjectFact(fn, &fact) {
+               fact.Printf = kind == kindPrintf
+               pass.ExportObjectFact(fn, &fact)
+               for _, caller := range w.callers {
+                       checkPrintfFwd(pass, caller.w, caller.call, kind)
+               }
+       }
+}
+
+// isPrint records the print functions.
+// If a key ends in 'f' then it is assumed to be a formatted print.
+//
+// Keys are either values returned by (*types.Func).FullName,
+// or case-insensitive identifiers such as "errorf".
+//
+// The -funcs flag adds to this set.
+var isPrint = stringSet{
+       "fmt.Errorf":   true,
+       "fmt.Fprint":   true,
+       "fmt.Fprintf":  true,
+       "fmt.Fprintln": true,
+       "fmt.Print":    true, // technically these three
+       "fmt.Printf":   true, // are redundant because they
+       "fmt.Println":  true, // forward to Fprint{,f,ln}
+       "fmt.Sprint":   true,
+       "fmt.Sprintf":  true,
+       "fmt.Sprintln": true,
+
+       // *testing.T and B are detected by induction, but testing.TB is
+       // an interface and the inference can't follow dynamic calls.
+       "(testing.TB).Error":  true,
+       "(testing.TB).Errorf": true,
+       "(testing.TB).Fatal":  true,
+       "(testing.TB).Fatalf": true,
+       "(testing.TB).Log":    true,
+       "(testing.TB).Logf":   true,
+       "(testing.TB).Skip":   true,
+       "(testing.TB).Skipf":  true,
+}
+
+// formatString returns the format string argument and its index within
+// the given printf-like call expression.
+//
+// The last parameter before variadic arguments is assumed to be
+// a format string.
+//
+// The first string literal or string constant is assumed to be a format string
+// if the call's signature cannot be determined.
+//
+// If it cannot find any format string parameter, it returns ("", -1).
+func formatString(pass *analysis.Pass, call *ast.CallExpr) (format string, idx int) {
+       typ := pass.TypesInfo.Types[call.Fun].Type
+       if typ != nil {
+               if sig, ok := typ.(*types.Signature); ok {
+                       if !sig.Variadic() {
+                               // Skip checking non-variadic functions.
+                               return "", -1
+                       }
+                       idx := sig.Params().Len() - 2
+                       if idx < 0 {
+                               // Skip checking variadic functions without
+                               // fixed arguments.
+                               return "", -1
+                       }
+                       s, ok := stringConstantArg(pass, call, idx)
+                       if !ok {
+                               // The last argument before variadic args isn't a string.
+                               return "", -1
+                       }
+                       return s, idx
+               }
+       }
+
+       // Cannot determine call's signature. Fall back to scanning for the first
+       // string constant in the call.
+       for idx := range call.Args {
+               if s, ok := stringConstantArg(pass, call, idx); ok {
+                       return s, idx
+               }
+               if pass.TypesInfo.Types[call.Args[idx]].Type == types.Typ[types.String] {
+                       // Skip checking a call with a non-constant format
+                       // string argument, since its contents are unavailable
+                       // for validation.
+                       return "", -1
+               }
+       }
+       return "", -1
+}
+
+// stringConstantArg returns call's string constant argument at the index idx.
+//
+// ("", false) is returned if call's argument at the index idx isn't a string
+// constant.
+func stringConstantArg(pass *analysis.Pass, call *ast.CallExpr, idx int) (string, bool) {
+       if idx >= len(call.Args) {
+               return "", false
+       }
+       arg := call.Args[idx]
+       lit := pass.TypesInfo.Types[arg].Value
+       if lit != nil && lit.Kind() == constant.String {
+               return constant.StringVal(lit), true
+       }
+       return "", false
+}
+
+// checkCall triggers the print-specific checks if the call invokes a print function.
+func checkCall(pass *analysis.Pass) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+       nodeFilter := []ast.Node{
+               (*ast.CallExpr)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               call := n.(*ast.CallExpr)
+               fn, kind := printfNameAndKind(pass, call)
+               switch kind {
+               case kindPrintf:
+                       checkPrintf(pass, call, fn)
+               case kindPrint:
+                       checkPrint(pass, call, fn)
+               }
+       })
+}
+
+func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind int) {
+       fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func)
+       if fn == nil {
+               return nil, 0
+       }
+
+       var fact isWrapper
+       if pass.ImportObjectFact(fn, &fact) {
+               if fact.Printf {
+                       return fn, kindPrintf
+               } else {
+                       return fn, kindPrint
+               }
+       }
+
+       _, ok := isPrint[fn.FullName()]
+       if !ok {
+               // Next look up just "printf", for use with -printf.funcs.
+               _, ok = isPrint[strings.ToLower(fn.Name())]
+       }
+       if ok {
+               if strings.HasSuffix(fn.Name(), "f") {
+                       kind = kindPrintf
+               } else {
+                       kind = kindPrint
+               }
+       }
+       return fn, kind
+}
+
+// isFormatter reports whether t satisfies fmt.Formatter.
+// Unlike fmt.Stringer, it's impossible to satisfy fmt.Formatter without importing fmt.
+func isFormatter(pass *analysis.Pass, t types.Type) bool {
+       for _, imp := range pass.Pkg.Imports() {
+               if imp.Path() == "fmt" {
+                       formatter := imp.Scope().Lookup("Formatter").Type().Underlying().(*types.Interface)
+                       return types.Implements(t, formatter)
+               }
+       }
+       return false
+}
+
+// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
+// It is constructed by parsePrintfVerb.
+type formatState struct {
+       verb     rune   // the format verb: 'd' for "%d"
+       format   string // the full format directive from % through verb, "%.3d".
+       name     string // Printf, Sprintf etc.
+       flags    []byte // the list of # + etc.
+       argNums  []int  // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
+       firstArg int    // Index of first argument after the format in the Printf call.
+       // Used only during parse.
+       pass         *analysis.Pass
+       call         *ast.CallExpr
+       argNum       int  // Which argument we're expecting to format now.
+       hasIndex     bool // Whether the argument is indexed.
+       indexPending bool // Whether we have an indexed argument that has not resolved.
+       nbytes       int  // number of bytes of the format string consumed.
+}
+
+// checkPrintf checks a call to a formatted print routine such as Printf.
+func checkPrintf(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
+       format, idx := formatString(pass, call)
+       if idx < 0 {
+               if false {
+                       pass.Reportf(call.Lparen, "can't check non-constant format in call to %s", fn.Name())
+               }
+               return
+       }
+
+       firstArg := idx + 1 // Arguments are immediately after format string.
+       if !strings.Contains(format, "%") {
+               if len(call.Args) > firstArg {
+                       pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", fn.Name())
+               }
+               return
+       }
+       // Hard part: check formats against args.
+       argNum := firstArg
+       maxArgNum := firstArg
+       anyIndex := false
+       for i, w := 0, 0; i < len(format); i += w {
+               w = 1
+               if format[i] != '%' {
+                       continue
+               }
+               state := parsePrintfVerb(pass, call, fn.Name(), format[i:], firstArg, argNum)
+               if state == nil {
+                       return
+               }
+               w = len(state.format)
+               if !okPrintfArg(pass, call, state) { // One error per format is enough.
+                       return
+               }
+               if state.hasIndex {
+                       anyIndex = true
+               }
+               if len(state.argNums) > 0 {
+                       // Continue with the next sequential argument.
+                       argNum = state.argNums[len(state.argNums)-1] + 1
+               }
+               for _, n := range state.argNums {
+                       if n >= maxArgNum {
+                               maxArgNum = n + 1
+                       }
+               }
+       }
+       // Dotdotdot is hard.
+       if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 {
+               return
+       }
+       // If any formats are indexed, extra arguments are ignored.
+       if anyIndex {
+               return
+       }
+       // There should be no leftover arguments.
+       if maxArgNum != len(call.Args) {
+               expect := maxArgNum - firstArg
+               numArgs := len(call.Args) - firstArg
+               pass.Reportf(call.Pos(), "%s call needs %v but has %v", fn.Name(), count(expect, "arg"), count(numArgs, "arg"))
+       }
+}
+
+// parseFlags accepts any printf flags.
+func (s *formatState) parseFlags() {
+       for s.nbytes < len(s.format) {
+               switch c := s.format[s.nbytes]; c {
+               case '#', '0', '+', '-', ' ':
+                       s.flags = append(s.flags, c)
+                       s.nbytes++
+               default:
+                       return
+               }
+       }
+}
+
+// scanNum advances through a decimal number if present.
+func (s *formatState) scanNum() {
+       for ; s.nbytes < len(s.format); s.nbytes++ {
+               c := s.format[s.nbytes]
+               if c < '0' || '9' < c {
+                       return
+               }
+       }
+}
+
+// parseIndex scans an index expression. It returns false if there is a syntax error.
+func (s *formatState) parseIndex() bool {
+       if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
+               return true
+       }
+       // Argument index present.
+       s.nbytes++ // skip '['
+       start := s.nbytes
+       s.scanNum()
+       ok := true
+       if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
+               ok = false
+               s.nbytes = strings.Index(s.format, "]")
+               if s.nbytes < 0 {
+                       s.pass.Reportf(s.call.Pos(), "%s format %s is missing closing ]", s.name, s.format)
+                       return false
+               }
+       }
+       arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
+       if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) {
+               s.pass.Reportf(s.call.Pos(), "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes])
+               return false
+       }
+       s.nbytes++ // skip ']'
+       arg := int(arg32)
+       arg += s.firstArg - 1 // We want to zero-index the actual arguments.
+       s.argNum = arg
+       s.hasIndex = true
+       s.indexPending = true
+       return true
+}
+
+// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
+func (s *formatState) parseNum() bool {
+       if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
+               if s.indexPending { // Absorb it.
+                       s.indexPending = false
+               }
+               s.nbytes++
+               s.argNums = append(s.argNums, s.argNum)
+               s.argNum++
+       } else {
+               s.scanNum()
+       }
+       return true
+}
+
+// parsePrecision scans for a precision. It returns false if there's a bad index expression.
+func (s *formatState) parsePrecision() bool {
+       // If there's a period, there may be a precision.
+       if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
+               s.flags = append(s.flags, '.') // Treat precision as a flag.
+               s.nbytes++
+               if !s.parseIndex() {
+                       return false
+               }
+               if !s.parseNum() {
+                       return false
+               }
+       }
+       return true
+}
+
+// parsePrintfVerb looks the formatting directive that begins the format string
+// and returns a formatState that encodes what the directive wants, without looking
+// at the actual arguments present in the call. The result is nil if there is an error.
+func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
+       state := &formatState{
+               format:   format,
+               name:     name,
+               flags:    make([]byte, 0, 5),
+               argNum:   argNum,
+               argNums:  make([]int, 0, 1),
+               nbytes:   1, // There's guaranteed to be a percent sign.
+               firstArg: firstArg,
+               pass:     pass,
+               call:     call,
+       }
+       // There may be flags.
+       state.parseFlags()
+       // There may be an index.
+       if !state.parseIndex() {
+               return nil
+       }
+       // There may be a width.
+       if !state.parseNum() {
+               return nil
+       }
+       // There may be a precision.
+       if !state.parsePrecision() {
+               return nil
+       }
+       // Now a verb, possibly prefixed by an index (which we may already have).
+       if !state.indexPending && !state.parseIndex() {
+               return nil
+       }
+       if state.nbytes == len(state.format) {
+               pass.Reportf(call.Pos(), "%s format %s is missing verb at end of string", name, state.format)
+               return nil
+       }
+       verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
+       state.verb = verb
+       state.nbytes += w
+       if verb != '%' {
+               state.argNums = append(state.argNums, state.argNum)
+       }
+       state.format = state.format[:state.nbytes]
+       return state
+}
+
+// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
+type printfArgType int
+
+const (
+       argBool printfArgType = 1 << iota
+       argInt
+       argRune
+       argString
+       argFloat
+       argComplex
+       argPointer
+       anyType printfArgType = ^0
+)
+
+type printVerb struct {
+       verb  rune   // User may provide verb through Formatter; could be a rune.
+       flags string // known flags are all ASCII
+       typ   printfArgType
+}
+
+// Common flag sets for printf verbs.
+const (
+       noFlag       = ""
+       numFlag      = " -+.0"
+       sharpNumFlag = " -+.0#"
+       allFlags     = " -+.0#"
+)
+
+// printVerbs identifies which flags are known to printf for each verb.
+var printVerbs = []printVerb{
+       // '-' is a width modifier, always valid.
+       // '.' is a precision for float, max width for strings.
+       // '+' is required sign for numbers, Go format for %v.
+       // '#' is alternate format for several verbs.
+       // ' ' is spacer for numbers
+       {'%', noFlag, 0},
+       {'b', numFlag, argInt | argFloat | argComplex},
+       {'c', "-", argRune | argInt},
+       {'d', numFlag, argInt | argPointer},
+       {'e', sharpNumFlag, argFloat | argComplex},
+       {'E', sharpNumFlag, argFloat | argComplex},
+       {'f', sharpNumFlag, argFloat | argComplex},
+       {'F', sharpNumFlag, argFloat | argComplex},
+       {'g', sharpNumFlag, argFloat | argComplex},
+       {'G', sharpNumFlag, argFloat | argComplex},
+       {'o', sharpNumFlag, argInt},
+       {'p', "-#", argPointer},
+       {'q', " -+.0#", argRune | argInt | argString},
+       {'s', " -+.0", argString},
+       {'t', "-", argBool},
+       {'T', "-", anyType},
+       {'U', "-#", argRune | argInt},
+       {'v', allFlags, anyType},
+       {'x', sharpNumFlag, argRune | argInt | argString | argPointer},
+       {'X', sharpNumFlag, argRune | argInt | argString | argPointer},
+}
+
+// okPrintfArg compares the formatState to the arguments actually present,
+// reporting any discrepancies it can discern. If the final argument is ellipsissed,
+// there's little it can do for that.
+func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (ok bool) {
+       var v printVerb
+       found := false
+       // Linear scan is fast enough for a small list.
+       for _, v = range printVerbs {
+               if v.verb == state.verb {
+                       found = true
+                       break
+               }
+       }
+
+       // Does current arg implement fmt.Formatter?
+       formatter := false
+       if state.argNum < len(call.Args) {
+               if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok {
+                       formatter = isFormatter(pass, tv.Type)
+               }
+       }
+
+       if !formatter {
+               if !found {
+                       pass.Reportf(call.Pos(), "%s format %s has unknown verb %c", state.name, state.format, state.verb)
+                       return false
+               }
+               for _, flag := range state.flags {
+                       // TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11.
+                       // See issues 23598 and 23605.
+                       if flag == '0' {
+                               continue
+                       }
+                       if !strings.ContainsRune(v.flags, rune(flag)) {
+                               pass.Reportf(call.Pos(), "%s format %s has unrecognized flag %c", state.name, state.format, flag)
+                               return false
+                       }
+               }
+       }
+       // Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
+       // but the final arg must be an integer.
+       trueArgs := 1
+       if state.verb == '%' {
+               trueArgs = 0
+       }
+       nargs := len(state.argNums)
+       for i := 0; i < nargs-trueArgs; i++ {
+               argNum := state.argNums[i]
+               if !argCanBeChecked(pass, call, i, state) {
+                       return
+               }
+               arg := call.Args[argNum]
+               if !matchArgType(pass, argInt, nil, arg) {
+                       pass.Reportf(call.Pos(), "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg))
+                       return false
+               }
+       }
+
+       if state.verb == '%' || formatter {
+               return true
+       }
+       argNum := state.argNums[len(state.argNums)-1]
+       if !argCanBeChecked(pass, call, len(state.argNums)-1, state) {
+               return false
+       }
+       arg := call.Args[argNum]
+       if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' {
+               pass.Reportf(call.Pos(), "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg))
+               return false
+       }
+       if !matchArgType(pass, v.typ, nil, arg) {
+               typeString := ""
+               if typ := pass.TypesInfo.Types[arg].Type; typ != nil {
+                       typeString = typ.String()
+               }
+               pass.Reportf(call.Pos(), "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString)
+               return false
+       }
+       if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) && recursiveStringer(pass, arg) {
+               pass.Reportf(call.Pos(), "%s format %s with arg %s causes recursive String method call", state.name, state.format, analysisutil.Format(pass.Fset, arg))
+               return false
+       }
+       return true
+}
+
+// recursiveStringer reports whether the argument e is a potential
+// recursive call to stringer, such as t and &t in these examples:
+//
+//     func (t *T) String() string { printf("%s",  t) }
+//     func (t  T) String() string { printf("%s",  t) }
+//     func (t  T) String() string { printf("%s", &t) }
+//
+func recursiveStringer(pass *analysis.Pass, e ast.Expr) bool {
+       typ := pass.TypesInfo.Types[e].Type
+
+       // It's unlikely to be a recursive stringer if it has a Format method.
+       if isFormatter(pass, typ) {
+               return false
+       }
+
+       // Does e allow e.String()?
+       obj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "String")
+       stringMethod, ok := obj.(*types.Func)
+       if !ok {
+               return false
+       }
+
+       // Is the expression e within the body of that String method?
+       return stringMethod.Pkg() == pass.Pkg && stringMethod.Scope().Contains(e.Pos())
+}
+
+// isFunctionValue reports whether the expression is a function as opposed to a function call.
+// It is almost always a mistake to print a function value.
+func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool {
+       if typ := pass.TypesInfo.Types[e].Type; typ != nil {
+               _, ok := typ.(*types.Signature)
+               return ok
+       }
+       return false
+}
+
+// argCanBeChecked reports whether the specified argument is statically present;
+// it may be beyond the list of arguments or in a terminal slice... argument, which
+// means we can't see it.
+func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, state *formatState) bool {
+       argNum := state.argNums[formatArg]
+       if argNum <= 0 {
+               // Shouldn't happen, so catch it with prejudice.
+               panic("negative arg num")
+       }
+       if argNum < len(call.Args)-1 {
+               return true // Always OK.
+       }
+       if call.Ellipsis.IsValid() {
+               return false // We just can't tell; there could be many more arguments.
+       }
+       if argNum < len(call.Args) {
+               return true
+       }
+       // There are bad indexes in the format or there are fewer arguments than the format needs.
+       // This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
+       arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
+       pass.Reportf(call.Pos(), "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg"))
+       return false
+}
+
+// printFormatRE is the regexp we match and report as a possible format string
+// in the first argument to unformatted prints like fmt.Print.
+// We exclude the space flag, so that printing a string like "x % y" is not reported as a format.
+var printFormatRE = regexp.MustCompile(`%` + flagsRE + numOptRE + `\.?` + numOptRE + indexOptRE + verbRE)
+
+const (
+       flagsRE    = `[+\-#]*`
+       indexOptRE = `(\[[0-9]+\])?`
+       numOptRE   = `([0-9]+|` + indexOptRE + `\*)?`
+       verbRE     = `[bcdefgopqstvxEFGTUX]`
+)
+
+// checkPrint checks a call to an unformatted print routine such as Println.
+func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) {
+       firstArg := 0
+       typ := pass.TypesInfo.Types[call.Fun].Type
+       if typ == nil {
+               // Skip checking functions with unknown type.
+               return
+       }
+       if sig, ok := typ.(*types.Signature); ok {
+               if !sig.Variadic() {
+                       // Skip checking non-variadic functions.
+                       return
+               }
+               params := sig.Params()
+               firstArg = params.Len() - 1
+
+               typ := params.At(firstArg).Type()
+               typ = typ.(*types.Slice).Elem()
+               it, ok := typ.(*types.Interface)
+               if !ok || !it.Empty() {
+                       // Skip variadic functions accepting non-interface{} args.
+                       return
+               }
+       }
+       args := call.Args
+       if len(args) <= firstArg {
+               // Skip calls without variadic args.
+               return
+       }
+       args = args[firstArg:]
+
+       if firstArg == 0 {
+               if sel, ok := call.Args[0].(*ast.SelectorExpr); ok {
+                       if x, ok := sel.X.(*ast.Ident); ok {
+                               if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") {
+                                       pass.Reportf(call.Pos(), "%s does not take io.Writer but has first arg %s", fn.Name(), analysisutil.Format(pass.Fset, call.Args[0]))
+                               }
+                       }
+               }
+       }
+
+       arg := args[0]
+       if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
+               // Ignore trailing % character in lit.Value.
+               // The % in "abc 0.0%" couldn't be a formatting directive.
+               s := strings.TrimSuffix(lit.Value, `%"`)
+               if strings.Contains(s, "%") {
+                       m := printFormatRE.FindStringSubmatch(s)
+                       if m != nil {
+                               pass.Reportf(call.Pos(), "%s call has possible formatting directive %s", fn.Name(), m[0])
+                       }
+               }
+       }
+       if strings.HasSuffix(fn.Name(), "ln") {
+               // The last item, if a string, should not have a newline.
+               arg = args[len(args)-1]
+               if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
+                       str, _ := strconv.Unquote(lit.Value)
+                       if strings.HasSuffix(str, "\n") {
+                               pass.Reportf(call.Pos(), "%s arg list ends with redundant newline", fn.Name())
+                       }
+               }
+       }
+       for _, arg := range args {
+               if isFunctionValue(pass, arg) {
+                       pass.Reportf(call.Pos(), "%s arg %s is a func value, not called", fn.Name(), analysisutil.Format(pass.Fset, arg))
+               }
+               if recursiveStringer(pass, arg) {
+                       pass.Reportf(call.Pos(), "%s arg %s causes recursive call to String method", fn.Name(), analysisutil.Format(pass.Fset, arg))
+               }
+       }
+}
+
+// count(n, what) returns "1 what" or "N whats"
+// (assuming the plural of what is whats).
+func count(n int, what string) string {
+       if n == 1 {
+               return "1 " + what
+       }
+       return fmt.Sprintf("%d %ss", n, what)
+}
+
+// stringSet is a set-of-nonempty-strings-valued flag.
+// Note: elements without a '.' get lower-cased.
+type stringSet map[string]bool
+
+func (ss stringSet) String() string {
+       var list []string
+       for name := range ss {
+               list = append(list, name)
+       }
+       sort.Strings(list)
+       return strings.Join(list, ",")
+}
+
+func (ss stringSet) Set(flag string) error {
+       for _, name := range strings.Split(flag, ",") {
+               if len(name) == 0 {
+                       return fmt.Errorf("empty string")
+               }
+               if !strings.Contains(name, ".") {
+                       name = strings.ToLower(name)
+               }
+               ss[name] = true
+       }
+       return nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go
new file mode 100644 (file)
index 0000000..701d08b
--- /dev/null
@@ -0,0 +1,223 @@
+package printf
+
+import (
+       "go/ast"
+       "go/build"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+)
+
+var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+// matchArgType reports an error if printf verb t is not appropriate
+// for operand arg.
+//
+// typ is used only for recursive calls; external callers must supply nil.
+//
+// (Recursion arises from the compound types {map,chan,slice} which
+// may be printed with %d etc. if that is appropriate for their element
+// types.)
+func matchArgType(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr) bool {
+       return matchArgTypeInternal(pass, t, typ, arg, make(map[types.Type]bool))
+}
+
+// matchArgTypeInternal is the internal version of matchArgType. It carries a map
+// remembering what types are in progress so we don't recur when faced with recursive
+// types or mutually recursive types.
+func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool {
+       // %v, %T accept any argument type.
+       if t == anyType {
+               return true
+       }
+       if typ == nil {
+               // external call
+               typ = pass.TypesInfo.Types[arg].Type
+               if typ == nil {
+                       return true // probably a type check problem
+               }
+       }
+       // If the type implements fmt.Formatter, we have nothing to check.
+       if isFormatter(pass, typ) {
+               return true
+       }
+       // If we can use a string, might arg (dynamically) implement the Stringer or Error interface?
+       if t&argString != 0 && isConvertibleToString(pass, typ) {
+               return true
+       }
+
+       typ = typ.Underlying()
+       if inProgress[typ] {
+               // We're already looking at this type. The call that started it will take care of it.
+               return true
+       }
+       inProgress[typ] = true
+
+       switch typ := typ.(type) {
+       case *types.Signature:
+               return t&argPointer != 0
+
+       case *types.Map:
+               // Recur: map[int]int matches %d.
+               return t&argPointer != 0 ||
+                       (matchArgTypeInternal(pass, t, typ.Key(), arg, inProgress) && matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress))
+
+       case *types.Chan:
+               return t&argPointer != 0
+
+       case *types.Array:
+               // Same as slice.
+               if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
+                       return true // %s matches []byte
+               }
+               // Recur: []int matches %d.
+               return t&argPointer != 0 || matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
+
+       case *types.Slice:
+               // Same as array.
+               if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 {
+                       return true // %s matches []byte
+               }
+               // Recur: []int matches %d. But watch out for
+               //      type T []T
+               // If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below.
+               return t&argPointer != 0 || matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)
+
+       case *types.Pointer:
+               // Ugly, but dealing with an edge case: a known pointer to an invalid type,
+               // probably something from a failed import.
+               if typ.Elem().String() == "invalid type" {
+                       if false {
+                               pass.Reportf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", analysisutil.Format(pass.Fset, arg))
+                       }
+                       return true // special case
+               }
+               // If it's actually a pointer with %p, it prints as one.
+               if t == argPointer {
+                       return true
+               }
+               // If it's pointer to struct, that's equivalent in our analysis to whether we can print the struct.
+               if str, ok := typ.Elem().Underlying().(*types.Struct); ok {
+                       return matchStructArgType(pass, t, str, arg, inProgress)
+               }
+               // Check whether the rest can print pointers.
+               return t&argPointer != 0
+
+       case *types.Struct:
+               return matchStructArgType(pass, t, typ, arg, inProgress)
+
+       case *types.Interface:
+               // There's little we can do.
+               // Whether any particular verb is valid depends on the argument.
+               // The user may have reasonable prior knowledge of the contents of the interface.
+               return true
+
+       case *types.Basic:
+               switch typ.Kind() {
+               case types.UntypedBool,
+                       types.Bool:
+                       return t&argBool != 0
+
+               case types.UntypedInt,
+                       types.Int,
+                       types.Int8,
+                       types.Int16,
+                       types.Int32,
+                       types.Int64,
+                       types.Uint,
+                       types.Uint8,
+                       types.Uint16,
+                       types.Uint32,
+                       types.Uint64,
+                       types.Uintptr:
+                       return t&argInt != 0
+
+               case types.UntypedFloat,
+                       types.Float32,
+                       types.Float64:
+                       return t&argFloat != 0
+
+               case types.UntypedComplex,
+                       types.Complex64,
+                       types.Complex128:
+                       return t&argComplex != 0
+
+               case types.UntypedString,
+                       types.String:
+                       return t&argString != 0
+
+               case types.UnsafePointer:
+                       return t&(argPointer|argInt) != 0
+
+               case types.UntypedRune:
+                       return t&(argInt|argRune) != 0
+
+               case types.UntypedNil:
+                       return false
+
+               case types.Invalid:
+                       if false {
+                               pass.Reportf(arg.Pos(), "printf argument %v has invalid or unknown type", analysisutil.Format(pass.Fset, arg))
+                       }
+                       return true // Probably a type check problem.
+               }
+               panic("unreachable")
+       }
+
+       return false
+}
+
+func isConvertibleToString(pass *analysis.Pass, typ types.Type) bool {
+       if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil {
+               // We explicitly don't want untyped nil, which is
+               // convertible to both of the interfaces below, as it
+               // would just panic anyway.
+               return false
+       }
+       if types.ConvertibleTo(typ, errorType) {
+               return true // via .Error()
+       }
+
+       // Does it implement fmt.Stringer?
+       if obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "String"); obj != nil {
+               if fn, ok := obj.(*types.Func); ok {
+                       sig := fn.Type().(*types.Signature)
+                       if sig.Params().Len() == 0 &&
+                               sig.Results().Len() == 1 &&
+                               sig.Results().At(0).Type() == types.Typ[types.String] {
+                               return true
+                       }
+               }
+       }
+
+       return false
+}
+
+// hasBasicType reports whether x's type is a types.Basic with the given kind.
+func hasBasicType(pass *analysis.Pass, x ast.Expr, kind types.BasicKind) bool {
+       t := pass.TypesInfo.Types[x].Type
+       if t != nil {
+               t = t.Underlying()
+       }
+       b, ok := t.(*types.Basic)
+       return ok && b.Kind() == kind
+}
+
+// matchStructArgType reports whether all the elements of the struct match the expected
+// type. For instance, with "%d" all the elements must be printable with the "%d" format.
+func matchStructArgType(pass *analysis.Pass, t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool {
+       for i := 0; i < typ.NumFields(); i++ {
+               typf := typ.Field(i)
+               if !matchArgTypeInternal(pass, t, typf.Type(), arg, inProgress) {
+                       return false
+               }
+               if t&argString != 0 && !typf.Exported() && isConvertibleToString(pass, typf.Type()) {
+                       // Issue #17798: unexported Stringer or error cannot be properly fomatted.
+                       return false
+               }
+       }
+       return true
+}
+
+var archSizes = types.SizesFor("gc", build.Default.GOARCH)
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go
new file mode 100644 (file)
index 0000000..43415a9
--- /dev/null
@@ -0,0 +1,101 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package shift
+
+// Simplified dead code detector.
+// Used for skipping shift checks on unreachable arch-specific code.
+
+import (
+       "go/ast"
+       "go/constant"
+       "go/types"
+)
+
+// updateDead puts unreachable "if" and "case" nodes into dead.
+func updateDead(info *types.Info, dead map[ast.Node]bool, node ast.Node) {
+       if dead[node] {
+               // The node is already marked as dead.
+               return
+       }
+
+       // setDead marks the node and all the children as dead.
+       setDead := func(n ast.Node) {
+               ast.Inspect(n, func(node ast.Node) bool {
+                       if node != nil {
+                               dead[node] = true
+                       }
+                       return true
+               })
+       }
+
+       switch stmt := node.(type) {
+       case *ast.IfStmt:
+               // "if" branch is dead if its condition evaluates
+               // to constant false.
+               v := info.Types[stmt.Cond].Value
+               if v == nil {
+                       return
+               }
+               if !constant.BoolVal(v) {
+                       setDead(stmt.Body)
+                       return
+               }
+               if stmt.Else != nil {
+                       setDead(stmt.Else)
+               }
+       case *ast.SwitchStmt:
+               // Case clause with empty switch tag is dead if it evaluates
+               // to constant false.
+               if stmt.Tag == nil {
+               BodyLoopBool:
+                       for _, stmt := range stmt.Body.List {
+                               cc := stmt.(*ast.CaseClause)
+                               if cc.List == nil {
+                                       // Skip default case.
+                                       continue
+                               }
+                               for _, expr := range cc.List {
+                                       v := info.Types[expr].Value
+                                       if v == nil || v.Kind() != constant.Bool || constant.BoolVal(v) {
+                                               continue BodyLoopBool
+                                       }
+                               }
+                               setDead(cc)
+                       }
+                       return
+               }
+
+               // Case clause is dead if its constant value doesn't match
+               // the constant value from the switch tag.
+               // TODO: This handles integer comparisons only.
+               v := info.Types[stmt.Tag].Value
+               if v == nil || v.Kind() != constant.Int {
+                       return
+               }
+               tagN, ok := constant.Uint64Val(v)
+               if !ok {
+                       return
+               }
+       BodyLoopInt:
+               for _, x := range stmt.Body.List {
+                       cc := x.(*ast.CaseClause)
+                       if cc.List == nil {
+                               // Skip default case.
+                               continue
+                       }
+                       for _, expr := range cc.List {
+                               v := info.Types[expr].Value
+                               if v == nil {
+                                       continue BodyLoopInt
+                               }
+                               n, ok := constant.Uint64Val(v)
+                               if !ok || tagN == n {
+                                       continue BodyLoopInt
+                               }
+                       }
+                       setDead(cc)
+               }
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go
new file mode 100644 (file)
index 0000000..56b150b
--- /dev/null
@@ -0,0 +1,128 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package shift defines an Analyzer that checks for shifts that exceed
+// the width of an integer.
+package shift
+
+// TODO(adonovan): integrate with ctrflow (CFG-based) dead code analysis. May
+// have impedance mismatch due to its (non-)treatment of constant
+// expressions (such as runtime.GOARCH=="386").
+
+import (
+       "go/ast"
+       "go/build"
+       "go/constant"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "shift",
+       Doc:      "check for shifts that equal or exceed the width of the integer",
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       // Do a complete pass to compute dead nodes.
+       dead := make(map[ast.Node]bool)
+       nodeFilter := []ast.Node{
+               (*ast.IfStmt)(nil),
+               (*ast.SwitchStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               // TODO(adonovan): move updateDead into this file.
+               updateDead(pass.TypesInfo, dead, n)
+       })
+
+       nodeFilter = []ast.Node{
+               (*ast.AssignStmt)(nil),
+               (*ast.BinaryExpr)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(node ast.Node) {
+               if dead[node] {
+                       // Skip shift checks on unreachable nodes.
+                       return
+               }
+
+               switch node := node.(type) {
+               case *ast.BinaryExpr:
+                       if node.Op == token.SHL || node.Op == token.SHR {
+                               checkLongShift(pass, node, node.X, node.Y)
+                       }
+               case *ast.AssignStmt:
+                       if len(node.Lhs) != 1 || len(node.Rhs) != 1 {
+                               return
+                       }
+                       if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN {
+                               checkLongShift(pass, node, node.Lhs[0], node.Rhs[0])
+                       }
+               }
+       })
+       return nil, nil
+}
+
+// checkLongShift checks if shift or shift-assign operations shift by more than
+// the length of the underlying variable.
+func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) {
+       if pass.TypesInfo.Types[x].Value != nil {
+               // Ignore shifts of constants.
+               // These are frequently used for bit-twiddling tricks
+               // like ^uint(0) >> 63 for 32/64 bit detection and compatibility.
+               return
+       }
+
+       v := pass.TypesInfo.Types[y].Value
+       if v == nil {
+               return
+       }
+       amt, ok := constant.Int64Val(v)
+       if !ok {
+               return
+       }
+       t := pass.TypesInfo.Types[x].Type
+       if t == nil {
+               return
+       }
+       b, ok := t.Underlying().(*types.Basic)
+       if !ok {
+               return
+       }
+       var size int64
+       switch b.Kind() {
+       case types.Uint8, types.Int8:
+               size = 8
+       case types.Uint16, types.Int16:
+               size = 16
+       case types.Uint32, types.Int32:
+               size = 32
+       case types.Uint64, types.Int64:
+               size = 64
+       case types.Int, types.Uint:
+               size = uintBitSize
+       case types.Uintptr:
+               size = uintptrBitSize
+       default:
+               return
+       }
+       if amt >= size {
+               ident := analysisutil.Format(pass.Fset, x)
+               pass.Reportf(node.Pos(), "%s (%d bits) too small for shift of %d", ident, size, amt)
+       }
+}
+
+var (
+       uintBitSize    = 8 * archSizes.Sizeof(types.Typ[types.Uint])
+       uintptrBitSize = 8 * archSizes.Sizeof(types.Typ[types.Uintptr])
+)
+
+var archSizes = types.SizesFor("gc", build.Default.GOARCH)
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go
new file mode 100644 (file)
index 0000000..eead289
--- /dev/null
@@ -0,0 +1,211 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package stdmethods defines an Analyzer that checks for misspellings
+// in the signatures of methods similar to well-known interfaces.
+package stdmethods
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "go/printer"
+       "go/token"
+       "go/types"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check signature of methods of well-known interfaces
+
+Sometimes a type may be intended to satisfy an interface but may fail to
+do so because of a mistake in its method signature.
+For example, the result of this WriteTo method should be (int64, error),
+not error, to satisfy io.WriterTo:
+
+       type myWriterTo struct{...}
+        func (myWriterTo) WriteTo(w io.Writer) error { ... }
+
+This check ensures that each method whose name matches one of several
+well-known interface methods from the standard library has the correct
+signature for that interface.
+
+Checked method names include:
+       Format GobEncode GobDecode MarshalJSON MarshalXML
+       Peek ReadByte ReadFrom ReadRune Scan Seek
+       UnmarshalJSON UnreadByte UnreadRune WriteByte
+       WriteTo
+`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "stdmethods",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+// canonicalMethods lists the input and output types for Go methods
+// that are checked using dynamic interface checks. Because the
+// checks are dynamic, such methods would not cause a compile error
+// if they have the wrong signature: instead the dynamic check would
+// fail, sometimes mysteriously. If a method is found with a name listed
+// here but not the input/output types listed here, vet complains.
+//
+// A few of the canonical methods have very common names.
+// For example, a type might implement a Scan method that
+// has nothing to do with fmt.Scanner, but we still want to check
+// the methods that are intended to implement fmt.Scanner.
+// To do that, the arguments that have a = prefix are treated as
+// signals that the canonical meaning is intended: if a Scan
+// method doesn't have a fmt.ScanState as its first argument,
+// we let it go. But if it does have a fmt.ScanState, then the
+// rest has to match.
+var canonicalMethods = map[string]struct{ args, results []string }{
+       // "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict
+       "Format":        {[]string{"=fmt.State", "rune"}, []string{}},                      // fmt.Formatter
+       "GobDecode":     {[]string{"[]byte"}, []string{"error"}},                           // gob.GobDecoder
+       "GobEncode":     {[]string{}, []string{"[]byte", "error"}},                         // gob.GobEncoder
+       "MarshalJSON":   {[]string{}, []string{"[]byte", "error"}},                         // json.Marshaler
+       "MarshalXML":    {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler
+       "ReadByte":      {[]string{}, []string{"byte", "error"}},                           // io.ByteReader
+       "ReadFrom":      {[]string{"=io.Reader"}, []string{"int64", "error"}},              // io.ReaderFrom
+       "ReadRune":      {[]string{}, []string{"rune", "int", "error"}},                    // io.RuneReader
+       "Scan":          {[]string{"=fmt.ScanState", "rune"}, []string{"error"}},           // fmt.Scanner
+       "Seek":          {[]string{"=int64", "int"}, []string{"int64", "error"}},           // io.Seeker
+       "UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}},                           // json.Unmarshaler
+       "UnmarshalXML":  {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler
+       "UnreadByte":    {[]string{}, []string{"error"}},
+       "UnreadRune":    {[]string{}, []string{"error"}},
+       "WriteByte":     {[]string{"byte"}, []string{"error"}},                // jpeg.writer (matching bufio.Writer)
+       "WriteTo":       {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.FuncDecl)(nil),
+               (*ast.InterfaceType)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               switch n := n.(type) {
+               case *ast.FuncDecl:
+                       if n.Recv != nil {
+                               canonicalMethod(pass, n.Name, n.Type)
+                       }
+               case *ast.InterfaceType:
+                       for _, field := range n.Methods.List {
+                               for _, id := range field.Names {
+                                       canonicalMethod(pass, id, field.Type.(*ast.FuncType))
+                               }
+                       }
+               }
+       })
+       return nil, nil
+}
+
+func canonicalMethod(pass *analysis.Pass, id *ast.Ident, t *ast.FuncType) {
+       // Expected input/output.
+       expect, ok := canonicalMethods[id.Name]
+       if !ok {
+               return
+       }
+
+       // Actual input/output
+       args := typeFlatten(t.Params.List)
+       var results []ast.Expr
+       if t.Results != nil {
+               results = typeFlatten(t.Results.List)
+       }
+
+       // Do the =s (if any) all match?
+       if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") {
+               return
+       }
+
+       // Everything must match.
+       if !matchParams(pass, expect.args, args, "") || !matchParams(pass, expect.results, results, "") {
+               expectFmt := id.Name + "(" + argjoin(expect.args) + ")"
+               if len(expect.results) == 1 {
+                       expectFmt += " " + argjoin(expect.results)
+               } else if len(expect.results) > 1 {
+                       expectFmt += " (" + argjoin(expect.results) + ")"
+               }
+
+               var buf bytes.Buffer
+               if err := printer.Fprint(&buf, pass.Fset, t); err != nil {
+                       fmt.Fprintf(&buf, "<%s>", err)
+               }
+               actual := buf.String()
+               actual = strings.TrimPrefix(actual, "func")
+               actual = id.Name + actual
+
+               pass.Reportf(id.Pos(), "method %s should have signature %s", actual, expectFmt)
+       }
+}
+
+func argjoin(x []string) string {
+       y := make([]string, len(x))
+       for i, s := range x {
+               if s[0] == '=' {
+                       s = s[1:]
+               }
+               y[i] = s
+       }
+       return strings.Join(y, ", ")
+}
+
+// Turn parameter list into slice of types
+// (in the ast, types are Exprs).
+// Have to handle f(int, bool) and f(x, y, z int)
+// so not a simple 1-to-1 conversion.
+func typeFlatten(l []*ast.Field) []ast.Expr {
+       var t []ast.Expr
+       for _, f := range l {
+               if len(f.Names) == 0 {
+                       t = append(t, f.Type)
+                       continue
+               }
+               for range f.Names {
+                       t = append(t, f.Type)
+               }
+       }
+       return t
+}
+
+// Does each type in expect with the given prefix match the corresponding type in actual?
+func matchParams(pass *analysis.Pass, expect []string, actual []ast.Expr, prefix string) bool {
+       for i, x := range expect {
+               if !strings.HasPrefix(x, prefix) {
+                       continue
+               }
+               if i >= len(actual) {
+                       return false
+               }
+               if !matchParamType(pass.Fset, pass.Pkg, x, actual[i]) {
+                       return false
+               }
+       }
+       if prefix == "" && len(actual) > len(expect) {
+               return false
+       }
+       return true
+}
+
+// Does this one type match?
+func matchParamType(fset *token.FileSet, pkg *types.Package, expect string, actual ast.Expr) bool {
+       expect = strings.TrimPrefix(expect, "=")
+       // Strip package name if we're in that package.
+       if n := len(pkg.Name()); len(expect) > n && expect[:n] == pkg.Name() && expect[n] == '.' {
+               expect = expect[n+1:]
+       }
+
+       // Overkill but easy.
+       var buf bytes.Buffer
+       printer.Fprint(&buf, fset, actual)
+       return buf.String() == expect
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go
new file mode 100644 (file)
index 0000000..78133fe
--- /dev/null
@@ -0,0 +1,260 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package structtag defines an Analyzer that checks struct field tags
+// are well formed.
+package structtag
+
+import (
+       "errors"
+       "go/ast"
+       "go/token"
+       "go/types"
+       "path/filepath"
+       "reflect"
+       "strconv"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check that struct field tags conform to reflect.StructTag.Get
+
+Also report certain struct tags (json, xml) used with unexported fields.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "structtag",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.StructType)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               styp := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct)
+               var seen map[[2]string]token.Pos
+               for i := 0; i < styp.NumFields(); i++ {
+                       field := styp.Field(i)
+                       tag := styp.Tag(i)
+                       checkCanonicalFieldTag(pass, field, tag, &seen)
+               }
+       })
+       return nil, nil
+}
+
+var checkTagDups = []string{"json", "xml"}
+var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true}
+
+// checkCanonicalFieldTag checks a single struct field tag.
+func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *map[[2]string]token.Pos) {
+       for _, key := range checkTagDups {
+               checkTagDuplicates(pass, tag, key, field, field, seen)
+       }
+
+       if err := validateStructTag(tag); err != nil {
+               pass.Reportf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err)
+       }
+
+       // Check for use of json or xml tags with unexported fields.
+
+       // Embedded struct. Nothing to do for now, but that
+       // may change, depending on what happens with issue 7363.
+       // TODO(adonovan): investigate, now that that issue is fixed.
+       if field.Anonymous() {
+               return
+       }
+
+       if field.Exported() {
+               return
+       }
+
+       for _, enc := range [...]string{"json", "xml"} {
+               if reflect.StructTag(tag).Get(enc) != "" {
+                       pass.Reportf(field.Pos(), "struct field %s has %s tag but is not exported", field.Name(), enc)
+                       return
+               }
+       }
+}
+
+// checkTagDuplicates checks a single struct field tag to see if any tags are
+// duplicated. nearest is the field that's closest to the field being checked,
+// while still being part of the top-level struct type.
+func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *map[[2]string]token.Pos) {
+       val := reflect.StructTag(tag).Get(key)
+       if val == "-" {
+               // Ignored, even if the field is anonymous.
+               return
+       }
+       if val == "" || val[0] == ',' {
+               if field.Anonymous() {
+                       typ, ok := field.Type().Underlying().(*types.Struct)
+                       if !ok {
+                               return
+                       }
+                       for i := 0; i < typ.NumFields(); i++ {
+                               field := typ.Field(i)
+                               if !field.Exported() {
+                                       continue
+                               }
+                               tag := typ.Tag(i)
+                               checkTagDuplicates(pass, tag, key, nearest, field, seen)
+                       }
+               }
+               // Ignored if the field isn't anonymous.
+               return
+       }
+       if key == "xml" && field.Name() == "XMLName" {
+               // XMLName defines the XML element name of the struct being
+               // checked. That name cannot collide with element or attribute
+               // names defined on other fields of the struct. Vet does not have a
+               // check for untagged fields of type struct defining their own name
+               // by containing a field named XMLName; see issue 18256.
+               return
+       }
+       if i := strings.Index(val, ","); i >= 0 {
+               if key == "xml" {
+                       // Use a separate namespace for XML attributes.
+                       for _, opt := range strings.Split(val[i:], ",") {
+                               if opt == "attr" {
+                                       key += " attribute" // Key is part of the error message.
+                                       break
+                               }
+                       }
+               }
+               val = val[:i]
+       }
+       if *seen == nil {
+               *seen = map[[2]string]token.Pos{}
+       }
+       if pos, ok := (*seen)[[2]string{key, val}]; ok {
+               posn := pass.Fset.Position(pos)
+               posn.Filename = filepath.Base(posn.Filename)
+               posn.Column = 0
+               pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, posn)
+       } else {
+               (*seen)[[2]string{key, val}] = field.Pos()
+       }
+}
+
+var (
+       errTagSyntax      = errors.New("bad syntax for struct tag pair")
+       errTagKeySyntax   = errors.New("bad syntax for struct tag key")
+       errTagValueSyntax = errors.New("bad syntax for struct tag value")
+       errTagValueSpace  = errors.New("suspicious space in struct tag value")
+       errTagSpace       = errors.New("key:\"value\" pairs not separated by spaces")
+)
+
+// validateStructTag parses the struct tag and returns an error if it is not
+// in the canonical format, which is a space-separated list of key:"value"
+// settings. The value may contain spaces.
+func validateStructTag(tag string) error {
+       // This code is based on the StructTag.Get code in package reflect.
+
+       n := 0
+       for ; tag != ""; n++ {
+               if n > 0 && tag != "" && tag[0] != ' ' {
+                       // More restrictive than reflect, but catches likely mistakes
+                       // like `x:"foo",y:"bar"`, which parses as `x:"foo" ,y:"bar"` with second key ",y".
+                       return errTagSpace
+               }
+               // Skip leading space.
+               i := 0
+               for i < len(tag) && tag[i] == ' ' {
+                       i++
+               }
+               tag = tag[i:]
+               if tag == "" {
+                       break
+               }
+
+               // Scan to colon. A space, a quote or a control character is a syntax error.
+               // Strictly speaking, control chars include the range [0x7f, 0x9f], not just
+               // [0x00, 0x1f], but in practice, we ignore the multi-byte control characters
+               // as it is simpler to inspect the tag's bytes than the tag's runes.
+               i = 0
+               for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
+                       i++
+               }
+               if i == 0 {
+                       return errTagKeySyntax
+               }
+               if i+1 >= len(tag) || tag[i] != ':' {
+                       return errTagSyntax
+               }
+               if tag[i+1] != '"' {
+                       return errTagValueSyntax
+               }
+               key := tag[:i]
+               tag = tag[i+1:]
+
+               // Scan quoted string to find value.
+               i = 1
+               for i < len(tag) && tag[i] != '"' {
+                       if tag[i] == '\\' {
+                               i++
+                       }
+                       i++
+               }
+               if i >= len(tag) {
+                       return errTagValueSyntax
+               }
+               qvalue := tag[:i+1]
+               tag = tag[i+1:]
+
+               value, err := strconv.Unquote(qvalue)
+               if err != nil {
+                       return errTagValueSyntax
+               }
+
+               if !checkTagSpaces[key] {
+                       continue
+               }
+
+               switch key {
+               case "xml":
+                       // If the first or last character in the XML tag is a space, it is
+                       // suspicious.
+                       if strings.Trim(value, " ") != value {
+                               return errTagValueSpace
+                       }
+
+                       // If there are multiple spaces, they are suspicious.
+                       if strings.Count(value, " ") > 1 {
+                               return errTagValueSpace
+                       }
+
+                       // If there is no comma, skip the rest of the checks.
+                       comma := strings.IndexRune(value, ',')
+                       if comma < 0 {
+                               continue
+                       }
+
+                       // If the character before a comma is a space, this is suspicious.
+                       if comma > 0 && value[comma-1] == ' ' {
+                               return errTagValueSpace
+                       }
+                       value = value[comma+1:]
+               case "json":
+                       // JSON allows using spaces in the name, so skip it.
+                       comma := strings.IndexRune(value, ',')
+                       if comma < 0 {
+                               continue
+                       }
+                       value = value[comma+1:]
+               }
+
+               if strings.IndexByte(value, ' ') >= 0 {
+                       return errTagValueSpace
+               }
+       }
+       return nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
new file mode 100644 (file)
index 0000000..35b0a3e
--- /dev/null
@@ -0,0 +1,175 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package tests defines an Analyzer that checks for common mistaken
+// usages of tests and examples.
+package tests
+
+import (
+       "go/ast"
+       "go/types"
+       "strings"
+       "unicode"
+       "unicode/utf8"
+
+       "golang.org/x/tools/go/analysis"
+)
+
+const Doc = `check for common mistaken usages of tests and examples
+
+The tests checker walks Test, Benchmark and Example functions checking
+malformed names, wrong signatures and examples documenting non-existent
+identifiers.`
+
+var Analyzer = &analysis.Analyzer{
+       Name: "tests",
+       Doc:  Doc,
+       Run:  run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       for _, f := range pass.Files {
+               if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") {
+                       continue
+               }
+               for _, decl := range f.Decls {
+                       fn, ok := decl.(*ast.FuncDecl)
+                       if !ok || fn.Recv != nil {
+                               // Ignore non-functions or functions with receivers.
+                               continue
+                       }
+
+                       switch {
+                       case strings.HasPrefix(fn.Name.Name, "Example"):
+                               checkExample(pass, fn)
+                       case strings.HasPrefix(fn.Name.Name, "Test"):
+                               checkTest(pass, fn, "Test")
+                       case strings.HasPrefix(fn.Name.Name, "Benchmark"):
+                               checkTest(pass, fn, "Benchmark")
+                       }
+               }
+       }
+       return nil, nil
+}
+
+func isExampleSuffix(s string) bool {
+       r, size := utf8.DecodeRuneInString(s)
+       return size > 0 && unicode.IsLower(r)
+}
+
+func isTestSuffix(name string) bool {
+       if len(name) == 0 {
+               // "Test" is ok.
+               return true
+       }
+       r, _ := utf8.DecodeRuneInString(name)
+       return !unicode.IsLower(r)
+}
+
+func isTestParam(typ ast.Expr, wantType string) bool {
+       ptr, ok := typ.(*ast.StarExpr)
+       if !ok {
+               // Not a pointer.
+               return false
+       }
+       // No easy way of making sure it's a *testing.T or *testing.B:
+       // ensure the name of the type matches.
+       if name, ok := ptr.X.(*ast.Ident); ok {
+               return name.Name == wantType
+       }
+       if sel, ok := ptr.X.(*ast.SelectorExpr); ok {
+               return sel.Sel.Name == wantType
+       }
+       return false
+}
+
+func lookup(pkg *types.Package, name string) types.Object {
+       if o := pkg.Scope().Lookup(name); o != nil {
+               return o
+       }
+
+       // If this package is ".../foo_test" and it imports a package
+       // ".../foo", try looking in the latter package.
+       // This heuristic should work even on build systems that do not
+       // record any special link between the packages.
+       if basePath := strings.TrimSuffix(pkg.Path(), "_test"); basePath != pkg.Path() {
+               for _, imp := range pkg.Imports() {
+                       if imp.Path() == basePath {
+                               return imp.Scope().Lookup(name)
+                       }
+               }
+       }
+       return nil
+}
+
+func checkExample(pass *analysis.Pass, fn *ast.FuncDecl) {
+       fnName := fn.Name.Name
+       if params := fn.Type.Params; len(params.List) != 0 {
+               pass.Reportf(fn.Pos(), "%s should be niladic", fnName)
+       }
+       if results := fn.Type.Results; results != nil && len(results.List) != 0 {
+               pass.Reportf(fn.Pos(), "%s should return nothing", fnName)
+       }
+
+       if fnName == "Example" {
+               // Nothing more to do.
+               return
+       }
+
+       var (
+               exName = strings.TrimPrefix(fnName, "Example")
+               elems  = strings.SplitN(exName, "_", 3)
+               ident  = elems[0]
+               obj    = lookup(pass.Pkg, ident)
+       )
+       if ident != "" && obj == nil {
+               // Check ExampleFoo and ExampleBadFoo.
+               pass.Reportf(fn.Pos(), "%s refers to unknown identifier: %s", fnName, ident)
+               // Abort since obj is absent and no subsequent checks can be performed.
+               return
+       }
+       if len(elems) < 2 {
+               // Nothing more to do.
+               return
+       }
+
+       if ident == "" {
+               // Check Example_suffix and Example_BadSuffix.
+               if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) {
+                       pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, residual)
+               }
+               return
+       }
+
+       mmbr := elems[1]
+       if !isExampleSuffix(mmbr) {
+               // Check ExampleFoo_Method and ExampleFoo_BadMethod.
+               if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj == nil {
+                       pass.Reportf(fn.Pos(), "%s refers to unknown field or method: %s.%s", fnName, ident, mmbr)
+               }
+       }
+       if len(elems) == 3 && !isExampleSuffix(elems[2]) {
+               // Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix.
+               pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, elems[2])
+       }
+}
+
+func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
+       // Want functions with 0 results and 1 parameter.
+       if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
+               fn.Type.Params == nil ||
+               len(fn.Type.Params.List) != 1 ||
+               len(fn.Type.Params.List[0].Names) > 1 {
+               return
+       }
+
+       // The param must look like a *testing.T or *testing.B.
+       if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) {
+               return
+       }
+
+       if !isTestSuffix(fn.Name.Name[len(prefix):]) {
+               pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go
new file mode 100644 (file)
index 0000000..19bc9c2
--- /dev/null
@@ -0,0 +1,314 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package unreachable defines an Analyzer that checks for unreachable code.
+package unreachable
+
+// TODO(adonovan): use the new cfg package, which is more precise.
+
+import (
+       "go/ast"
+       "go/token"
+       "log"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for unreachable code
+
+The unreachable analyzer finds statements that execution can never reach
+because they are preceded by an return statement, a call to panic, an
+infinite loop, or similar constructs.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:             "unreachable",
+       Doc:              Doc,
+       Requires:         []*analysis.Analyzer{inspect.Analyzer},
+       RunDespiteErrors: true,
+       Run:              run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.FuncDecl)(nil),
+               (*ast.FuncLit)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               var body *ast.BlockStmt
+               switch n := n.(type) {
+               case *ast.FuncDecl:
+                       body = n.Body
+               case *ast.FuncLit:
+                       body = n.Body
+               }
+               if body == nil {
+                       return
+               }
+               d := &deadState{
+                       pass:     pass,
+                       hasBreak: make(map[ast.Stmt]bool),
+                       hasGoto:  make(map[string]bool),
+                       labels:   make(map[string]ast.Stmt),
+               }
+               d.findLabels(body)
+               d.reachable = true
+               d.findDead(body)
+       })
+       return nil, nil
+}
+
+type deadState struct {
+       pass        *analysis.Pass
+       hasBreak    map[ast.Stmt]bool
+       hasGoto     map[string]bool
+       labels      map[string]ast.Stmt
+       breakTarget ast.Stmt
+
+       reachable bool
+}
+
+// findLabels gathers information about the labels defined and used by stmt
+// and about which statements break, whether a label is involved or not.
+func (d *deadState) findLabels(stmt ast.Stmt) {
+       switch x := stmt.(type) {
+       default:
+               log.Fatalf("%s: internal error in findLabels: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x)
+
+       case *ast.AssignStmt,
+               *ast.BadStmt,
+               *ast.DeclStmt,
+               *ast.DeferStmt,
+               *ast.EmptyStmt,
+               *ast.ExprStmt,
+               *ast.GoStmt,
+               *ast.IncDecStmt,
+               *ast.ReturnStmt,
+               *ast.SendStmt:
+               // no statements inside
+
+       case *ast.BlockStmt:
+               for _, stmt := range x.List {
+                       d.findLabels(stmt)
+               }
+
+       case *ast.BranchStmt:
+               switch x.Tok {
+               case token.GOTO:
+                       if x.Label != nil {
+                               d.hasGoto[x.Label.Name] = true
+                       }
+
+               case token.BREAK:
+                       stmt := d.breakTarget
+                       if x.Label != nil {
+                               stmt = d.labels[x.Label.Name]
+                       }
+                       if stmt != nil {
+                               d.hasBreak[stmt] = true
+                       }
+               }
+
+       case *ast.IfStmt:
+               d.findLabels(x.Body)
+               if x.Else != nil {
+                       d.findLabels(x.Else)
+               }
+
+       case *ast.LabeledStmt:
+               d.labels[x.Label.Name] = x.Stmt
+               d.findLabels(x.Stmt)
+
+       // These cases are all the same, but the x.Body only works
+       // when the specific type of x is known, so the cases cannot
+       // be merged.
+       case *ast.ForStmt:
+               outer := d.breakTarget
+               d.breakTarget = x
+               d.findLabels(x.Body)
+               d.breakTarget = outer
+
+       case *ast.RangeStmt:
+               outer := d.breakTarget
+               d.breakTarget = x
+               d.findLabels(x.Body)
+               d.breakTarget = outer
+
+       case *ast.SelectStmt:
+               outer := d.breakTarget
+               d.breakTarget = x
+               d.findLabels(x.Body)
+               d.breakTarget = outer
+
+       case *ast.SwitchStmt:
+               outer := d.breakTarget
+               d.breakTarget = x
+               d.findLabels(x.Body)
+               d.breakTarget = outer
+
+       case *ast.TypeSwitchStmt:
+               outer := d.breakTarget
+               d.breakTarget = x
+               d.findLabels(x.Body)
+               d.breakTarget = outer
+
+       case *ast.CommClause:
+               for _, stmt := range x.Body {
+                       d.findLabels(stmt)
+               }
+
+       case *ast.CaseClause:
+               for _, stmt := range x.Body {
+                       d.findLabels(stmt)
+               }
+       }
+}
+
+// findDead walks the statement looking for dead code.
+// If d.reachable is false on entry, stmt itself is dead.
+// When findDead returns, d.reachable tells whether the
+// statement following stmt is reachable.
+func (d *deadState) findDead(stmt ast.Stmt) {
+       // Is this a labeled goto target?
+       // If so, assume it is reachable due to the goto.
+       // This is slightly conservative, in that we don't
+       // check that the goto is reachable, so
+       //      L: goto L
+       // will not provoke a warning.
+       // But it's good enough.
+       if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] {
+               d.reachable = true
+       }
+
+       if !d.reachable {
+               switch stmt.(type) {
+               case *ast.EmptyStmt:
+                       // do not warn about unreachable empty statements
+               default:
+                       d.pass.Reportf(stmt.Pos(), "unreachable code")
+                       d.reachable = true // silence error about next statement
+               }
+       }
+
+       switch x := stmt.(type) {
+       default:
+               log.Fatalf("%s: internal error in findDead: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x)
+
+       case *ast.AssignStmt,
+               *ast.BadStmt,
+               *ast.DeclStmt,
+               *ast.DeferStmt,
+               *ast.EmptyStmt,
+               *ast.GoStmt,
+               *ast.IncDecStmt,
+               *ast.SendStmt:
+               // no control flow
+
+       case *ast.BlockStmt:
+               for _, stmt := range x.List {
+                       d.findDead(stmt)
+               }
+
+       case *ast.BranchStmt:
+               switch x.Tok {
+               case token.BREAK, token.GOTO, token.FALLTHROUGH:
+                       d.reachable = false
+               case token.CONTINUE:
+                       // NOTE: We accept "continue" statements as terminating.
+                       // They are not necessary in the spec definition of terminating,
+                       // because a continue statement cannot be the final statement
+                       // before a return. But for the more general problem of syntactically
+                       // identifying dead code, continue redirects control flow just
+                       // like the other terminating statements.
+                       d.reachable = false
+               }
+
+       case *ast.ExprStmt:
+               // Call to panic?
+               call, ok := x.X.(*ast.CallExpr)
+               if ok {
+                       name, ok := call.Fun.(*ast.Ident)
+                       if ok && name.Name == "panic" && name.Obj == nil {
+                               d.reachable = false
+                       }
+               }
+
+       case *ast.ForStmt:
+               d.findDead(x.Body)
+               d.reachable = x.Cond != nil || d.hasBreak[x]
+
+       case *ast.IfStmt:
+               d.findDead(x.Body)
+               if x.Else != nil {
+                       r := d.reachable
+                       d.reachable = true
+                       d.findDead(x.Else)
+                       d.reachable = d.reachable || r
+               } else {
+                       // might not have executed if statement
+                       d.reachable = true
+               }
+
+       case *ast.LabeledStmt:
+               d.findDead(x.Stmt)
+
+       case *ast.RangeStmt:
+               d.findDead(x.Body)
+               d.reachable = true
+
+       case *ast.ReturnStmt:
+               d.reachable = false
+
+       case *ast.SelectStmt:
+               // NOTE: Unlike switch and type switch below, we don't care
+               // whether a select has a default, because a select without a
+               // default blocks until one of the cases can run. That's different
+               // from a switch without a default, which behaves like it has
+               // a default with an empty body.
+               anyReachable := false
+               for _, comm := range x.Body.List {
+                       d.reachable = true
+                       for _, stmt := range comm.(*ast.CommClause).Body {
+                               d.findDead(stmt)
+                       }
+                       anyReachable = anyReachable || d.reachable
+               }
+               d.reachable = anyReachable || d.hasBreak[x]
+
+       case *ast.SwitchStmt:
+               anyReachable := false
+               hasDefault := false
+               for _, cas := range x.Body.List {
+                       cc := cas.(*ast.CaseClause)
+                       if cc.List == nil {
+                               hasDefault = true
+                       }
+                       d.reachable = true
+                       for _, stmt := range cc.Body {
+                               d.findDead(stmt)
+                       }
+                       anyReachable = anyReachable || d.reachable
+               }
+               d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
+
+       case *ast.TypeSwitchStmt:
+               anyReachable := false
+               hasDefault := false
+               for _, cas := range x.Body.List {
+                       cc := cas.(*ast.CaseClause)
+                       if cc.List == nil {
+                               hasDefault = true
+                       }
+                       d.reachable = true
+                       for _, stmt := range cc.Body {
+                               d.findDead(stmt)
+                       }
+                       anyReachable = anyReachable || d.reachable
+               }
+               d.reachable = anyReachable || d.hasBreak[x] || !hasDefault
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go
new file mode 100644 (file)
index 0000000..116d622
--- /dev/null
@@ -0,0 +1,130 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package unsafeptr defines an Analyzer that checks for invalid
+// conversions of uintptr to unsafe.Pointer.
+package unsafeptr
+
+import (
+       "go/ast"
+       "go/token"
+       "go/types"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+const Doc = `check for invalid conversions of uintptr to unsafe.Pointer
+
+The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer
+to convert integers to pointers. A conversion from uintptr to
+unsafe.Pointer is invalid if it implies that there is a uintptr-typed
+word in memory that holds a pointer value, because that word will be
+invisible to stack copying and to the garbage collector.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "unsafeptr",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.CallExpr)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               x := n.(*ast.CallExpr)
+               if len(x.Args) != 1 {
+                       return
+               }
+               if hasBasicType(pass.TypesInfo, x.Fun, types.UnsafePointer) &&
+                       hasBasicType(pass.TypesInfo, x.Args[0], types.Uintptr) &&
+                       !isSafeUintptr(pass.TypesInfo, x.Args[0]) {
+                       pass.Reportf(x.Pos(), "possible misuse of unsafe.Pointer")
+               }
+       })
+       return nil, nil
+}
+
+// isSafeUintptr reports whether x - already known to be a uintptr -
+// is safe to convert to unsafe.Pointer. It is safe if x is itself derived
+// directly from an unsafe.Pointer via conversion and pointer arithmetic
+// or if x is the result of reflect.Value.Pointer or reflect.Value.UnsafeAddr
+// or obtained from the Data field of a *reflect.SliceHeader or *reflect.StringHeader.
+func isSafeUintptr(info *types.Info, x ast.Expr) bool {
+       switch x := x.(type) {
+       case *ast.ParenExpr:
+               return isSafeUintptr(info, x.X)
+
+       case *ast.SelectorExpr:
+               switch x.Sel.Name {
+               case "Data":
+                       // reflect.SliceHeader and reflect.StringHeader are okay,
+                       // but only if they are pointing at a real slice or string.
+                       // It's not okay to do:
+                       //      var x SliceHeader
+                       //      x.Data = uintptr(unsafe.Pointer(...))
+                       //      ... use x ...
+                       //      p := unsafe.Pointer(x.Data)
+                       // because in the middle the garbage collector doesn't
+                       // see x.Data as a pointer and so x.Data may be dangling
+                       // by the time we get to the conversion at the end.
+                       // For now approximate by saying that *Header is okay
+                       // but Header is not.
+                       pt, ok := info.Types[x.X].Type.(*types.Pointer)
+                       if ok {
+                               t, ok := pt.Elem().(*types.Named)
+                               if ok && t.Obj().Pkg().Path() == "reflect" {
+                                       switch t.Obj().Name() {
+                                       case "StringHeader", "SliceHeader":
+                                               return true
+                                       }
+                               }
+                       }
+               }
+
+       case *ast.CallExpr:
+               switch len(x.Args) {
+               case 0:
+                       // maybe call to reflect.Value.Pointer or reflect.Value.UnsafeAddr.
+                       sel, ok := x.Fun.(*ast.SelectorExpr)
+                       if !ok {
+                               break
+                       }
+                       switch sel.Sel.Name {
+                       case "Pointer", "UnsafeAddr":
+                               t, ok := info.Types[sel.X].Type.(*types.Named)
+                               if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" {
+                                       return true
+                               }
+                       }
+
+               case 1:
+                       // maybe conversion of uintptr to unsafe.Pointer
+                       return hasBasicType(info, x.Fun, types.Uintptr) &&
+                               hasBasicType(info, x.Args[0], types.UnsafePointer)
+               }
+
+       case *ast.BinaryExpr:
+               switch x.Op {
+               case token.ADD, token.SUB, token.AND_NOT:
+                       return isSafeUintptr(info, x.X) && !isSafeUintptr(info, x.Y)
+               }
+       }
+       return false
+}
+
+// hasBasicType reports whether x's type is a types.Basic with the given kind.
+func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool {
+       t := info.Types[x].Type
+       if t != nil {
+               t = t.Underlying()
+       }
+       b, ok := t.(*types.Basic)
+       return ok && b.Kind() == kind
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go
new file mode 100644 (file)
index 0000000..76d4ab2
--- /dev/null
@@ -0,0 +1,131 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package unusedresult defines an analyzer that checks for unused
+// results of calls to certain pure functions.
+package unusedresult
+
+import (
+       "go/ast"
+       "go/token"
+       "go/types"
+       "sort"
+       "strings"
+
+       "golang.org/x/tools/go/analysis"
+       "golang.org/x/tools/go/analysis/passes/inspect"
+       "golang.org/x/tools/go/analysis/passes/internal/analysisutil"
+       "golang.org/x/tools/go/ast/inspector"
+)
+
+// TODO(adonovan): make this analysis modular: export a mustUseResult
+// fact for each function that tail-calls one of the functions that we
+// check, and check those functions too.
+
+const Doc = `check for unused results of calls to some functions
+
+Some functions like fmt.Errorf return a result and have no side effects,
+so it is always a mistake to discard the result. This analyzer reports
+calls to certain functions in which the result of the call is ignored.
+
+The set of functions may be controlled using flags.`
+
+var Analyzer = &analysis.Analyzer{
+       Name:     "unusedresult",
+       Doc:      Doc,
+       Requires: []*analysis.Analyzer{inspect.Analyzer},
+       Run:      run,
+}
+
+// flags
+var funcs, stringMethods stringSetFlag
+
+func init() {
+       // TODO(adonovan): provide a comment syntax to allow users to
+       // add their functions to this set using facts.
+       funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse")
+       Analyzer.Flags.Var(&funcs, "funcs",
+               "comma-separated list of functions whose results must be used")
+
+       stringMethods.Set("Error,String")
+       Analyzer.Flags.Var(&stringMethods, "stringmethods",
+               "comma-separated list of names of methods of type func() string whose results must be used")
+}
+
+func run(pass *analysis.Pass) (interface{}, error) {
+       inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+       nodeFilter := []ast.Node{
+               (*ast.ExprStmt)(nil),
+       }
+       inspect.Preorder(nodeFilter, func(n ast.Node) {
+               call, ok := analysisutil.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr)
+               if !ok {
+                       return // not a call statement
+               }
+               fun := analysisutil.Unparen(call.Fun)
+
+               if pass.TypesInfo.Types[fun].IsType() {
+                       return // a conversion, not a call
+               }
+
+               selector, ok := fun.(*ast.SelectorExpr)
+               if !ok {
+                       return // neither a method call nor a qualified ident
+               }
+
+               sel, ok := pass.TypesInfo.Selections[selector]
+               if ok && sel.Kind() == types.MethodVal {
+                       // method (e.g. foo.String())
+                       obj := sel.Obj().(*types.Func)
+                       sig := sel.Type().(*types.Signature)
+                       if types.Identical(sig, sigNoArgsStringResult) {
+                               if stringMethods[obj.Name()] {
+                                       pass.Reportf(call.Lparen, "result of (%s).%s call not used",
+                                               sig.Recv().Type(), obj.Name())
+                               }
+                       }
+               } else if !ok {
+                       // package-qualified function (e.g. fmt.Errorf)
+                       obj := pass.TypesInfo.Uses[selector.Sel]
+                       if obj, ok := obj.(*types.Func); ok {
+                               qname := obj.Pkg().Path() + "." + obj.Name()
+                               if funcs[qname] {
+                                       pass.Reportf(call.Lparen, "result of %v call not used", qname)
+                               }
+                       }
+               }
+       })
+       return nil, nil
+}
+
+// func() string
+var sigNoArgsStringResult = types.NewSignature(nil, nil,
+       types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])),
+       false)
+
+type stringSetFlag map[string]bool
+
+func (ss *stringSetFlag) String() string {
+       var items []string
+       for item := range *ss {
+               items = append(items, item)
+       }
+       sort.Strings(items)
+       return strings.Join(items, ",")
+}
+
+func (ss *stringSetFlag) Set(s string) error {
+       m := make(map[string]bool) // clobber previous value
+       if s != "" {
+               for _, name := range strings.Split(s, ",") {
+                       if name == "" {
+                               continue // TODO: report error? proceed?
+                       }
+                       m[name] = true
+               }
+       }
+       *ss = m
+       return nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/analysis/validate.go b/src/cmd/vendor/golang.org/x/tools/go/analysis/validate.go
new file mode 100644 (file)
index 0000000..6e6cf49
--- /dev/null
@@ -0,0 +1,104 @@
+package analysis
+
+import (
+       "fmt"
+       "reflect"
+       "unicode"
+)
+
+// Validate reports an error if any of the analyzers are misconfigured.
+// Checks include:
+// that the name is a valid identifier;
+// that analyzer names are unique;
+// that the Requires graph is acylic;
+// that analyzer fact types are unique;
+// that each fact type is a pointer.
+func Validate(analyzers []*Analyzer) error {
+       names := make(map[string]bool)
+
+       // Map each fact type to its sole generating analyzer.
+       factTypes := make(map[reflect.Type]*Analyzer)
+
+       // Traverse the Requires graph, depth first.
+       const (
+               white = iota
+               grey
+               black
+               finished
+       )
+       color := make(map[*Analyzer]uint8)
+       var visit func(a *Analyzer) error
+       visit = func(a *Analyzer) error {
+               if a == nil {
+                       return fmt.Errorf("nil *Analyzer")
+               }
+               if color[a] == white {
+                       color[a] = grey
+
+                       // names
+                       if !validIdent(a.Name) {
+                               return fmt.Errorf("invalid analyzer name %q", a)
+                       }
+                       if names[a.Name] {
+                               return fmt.Errorf("duplicate analyzer name %q", a)
+                       }
+                       names[a.Name] = true
+
+                       if a.Doc == "" {
+                               return fmt.Errorf("analyzer %q is undocumented", a)
+                       }
+
+                       // fact types
+                       for _, f := range a.FactTypes {
+                               if f == nil {
+                                       return fmt.Errorf("analyzer %s has nil FactType", a)
+                               }
+                               t := reflect.TypeOf(f)
+                               if prev := factTypes[t]; prev != nil {
+                                       return fmt.Errorf("fact type %s registered by two analyzers: %v, %v",
+                                               t, a, prev)
+                               }
+                               if t.Kind() != reflect.Ptr {
+                                       return fmt.Errorf("%s: fact type %s is not a pointer", a, t)
+                               }
+                               factTypes[t] = a
+                       }
+
+                       // recursion
+                       for i, req := range a.Requires {
+                               if err := visit(req); err != nil {
+                                       return fmt.Errorf("%s.Requires[%d]: %v", a.Name, i, err)
+                               }
+                       }
+                       color[a] = black
+               }
+
+               return nil
+       }
+       for _, a := range analyzers {
+               if err := visit(a); err != nil {
+                       return err
+               }
+       }
+
+       // Reject duplicates among analyzers.
+       // Precondition:  color[a] == black.
+       // Postcondition: color[a] == finished.
+       for _, a := range analyzers {
+               if color[a] == finished {
+                       return fmt.Errorf("duplicate analyzer: %s", a.Name)
+               }
+               color[a] = finished
+       }
+
+       return nil
+}
+
+func validIdent(name string) bool {
+       for i, r := range name {
+               if !(r == '_' || unicode.IsLetter(r) || i > 0 && unicode.IsDigit(r)) {
+                       return false
+               }
+       }
+       return name != ""
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644 (file)
index 0000000..6b7052b
--- /dev/null
@@ -0,0 +1,627 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+       "fmt"
+       "go/ast"
+       "go/token"
+       "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+//              z := x + y // add them
+//                   <-A->
+//                  <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace.  It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+//              z := x + y // add them
+//                <--C-->     <---E-->
+//                  ^
+//                  D
+//
+// intervals C, D and E are inexact.  C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +).  So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment.  E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// Precondition: [start, end) both lie within the same file as root.
+// TODO(adonovan): return (nil, false) in this case and remove precond.
+// Requires FileSet; see loader.tokenFileContainsPos.
+//
+// Postcondition: path is never nil; it always contains at least 'root'.
+//
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+       // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+       // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+       var visit func(node ast.Node) bool
+       visit = func(node ast.Node) bool {
+               path = append(path, node)
+
+               nodePos := node.Pos()
+               nodeEnd := node.End()
+
+               // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+               // Intersect [start, end) with interval of node.
+               if start < nodePos {
+                       start = nodePos
+               }
+               if end > nodeEnd {
+                       end = nodeEnd
+               }
+
+               // Find sole child that contains [start, end).
+               children := childrenOf(node)
+               l := len(children)
+               for i, child := range children {
+                       // [childPos, childEnd) is unaugmented interval of child.
+                       childPos := child.Pos()
+                       childEnd := child.End()
+
+                       // [augPos, augEnd) is whitespace-augmented interval of child.
+                       augPos := childPos
+                       augEnd := childEnd
+                       if i > 0 {
+                               augPos = children[i-1].End() // start of preceding whitespace
+                       }
+                       if i < l-1 {
+                               nextChildPos := children[i+1].Pos()
+                               // Does [start, end) lie between child and next child?
+                               if start >= augEnd && end <= nextChildPos {
+                                       return false // inexact match
+                               }
+                               augEnd = nextChildPos // end of following whitespace
+                       }
+
+                       // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+                       //      i, augPos, augEnd, start, end) // debugging
+
+                       // Does augmented child strictly contain [start, end)?
+                       if augPos <= start && end <= augEnd {
+                               _, isToken := child.(tokenNode)
+                               return isToken || visit(child)
+                       }
+
+                       // Does [start, end) overlap multiple children?
+                       // i.e. left-augmented child contains start
+                       // but LR-augmented child does not contain end.
+                       if start < childEnd && end > augEnd {
+                               break
+                       }
+               }
+
+               // No single child contained [start, end),
+               // so node is the result.  Is it exact?
+
+               // (It's tempting to put this condition before the
+               // child loop, but it gives the wrong result in the
+               // case where a node (e.g. ExprStmt) and its sole
+               // child have equal intervals.)
+               if start == nodePos && end == nodeEnd {
+                       return true // exact match
+               }
+
+               return false // inexact: overlaps multiple children
+       }
+
+       if start > end {
+               start, end = end, start
+       }
+
+       if start < root.End() && end > root.Pos() {
+               if start == end {
+                       end = start + 1 // empty interval => interval of size 1
+               }
+               exact = visit(root)
+
+               // Reverse the path:
+               for i, l := 0, len(path); i < l/2; i++ {
+                       path[i], path[l-1-i] = path[l-1-i], path[i]
+               }
+       } else {
+               // Selection lies within whitespace preceding the
+               // first (or following the last) declaration in the file.
+               // The result nonetheless always includes the ast.File.
+               path = append(path, root)
+       }
+
+       return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+//
+type tokenNode struct {
+       pos token.Pos
+       end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+       return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+       return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+       return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+//
+func childrenOf(n ast.Node) []ast.Node {
+       var children []ast.Node
+
+       // First add nodes for all true subtrees.
+       ast.Inspect(n, func(node ast.Node) bool {
+               if node == n { // push n
+                       return true // recur
+               }
+               if node != nil { // push child
+                       children = append(children, node)
+               }
+               return false // no recursion
+       })
+
+       // Then add fake Nodes for bare tokens.
+       switch n := n.(type) {
+       case *ast.ArrayType:
+               children = append(children,
+                       tok(n.Lbrack, len("[")),
+                       tok(n.Elt.End(), len("]")))
+
+       case *ast.AssignStmt:
+               children = append(children,
+                       tok(n.TokPos, len(n.Tok.String())))
+
+       case *ast.BasicLit:
+               children = append(children,
+                       tok(n.ValuePos, len(n.Value)))
+
+       case *ast.BinaryExpr:
+               children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+       case *ast.BlockStmt:
+               children = append(children,
+                       tok(n.Lbrace, len("{")),
+                       tok(n.Rbrace, len("}")))
+
+       case *ast.BranchStmt:
+               children = append(children,
+                       tok(n.TokPos, len(n.Tok.String())))
+
+       case *ast.CallExpr:
+               children = append(children,
+                       tok(n.Lparen, len("(")),
+                       tok(n.Rparen, len(")")))
+               if n.Ellipsis != 0 {
+                       children = append(children, tok(n.Ellipsis, len("...")))
+               }
+
+       case *ast.CaseClause:
+               if n.List == nil {
+                       children = append(children,
+                               tok(n.Case, len("default")))
+               } else {
+                       children = append(children,
+                               tok(n.Case, len("case")))
+               }
+               children = append(children, tok(n.Colon, len(":")))
+
+       case *ast.ChanType:
+               switch n.Dir {
+               case ast.RECV:
+                       children = append(children, tok(n.Begin, len("<-chan")))
+               case ast.SEND:
+                       children = append(children, tok(n.Begin, len("chan<-")))
+               case ast.RECV | ast.SEND:
+                       children = append(children, tok(n.Begin, len("chan")))
+               }
+
+       case *ast.CommClause:
+               if n.Comm == nil {
+                       children = append(children,
+                               tok(n.Case, len("default")))
+               } else {
+                       children = append(children,
+                               tok(n.Case, len("case")))
+               }
+               children = append(children, tok(n.Colon, len(":")))
+
+       case *ast.Comment:
+               // nop
+
+       case *ast.CommentGroup:
+               // nop
+
+       case *ast.CompositeLit:
+               children = append(children,
+                       tok(n.Lbrace, len("{")),
+                       tok(n.Rbrace, len("{")))
+
+       case *ast.DeclStmt:
+               // nop
+
+       case *ast.DeferStmt:
+               children = append(children,
+                       tok(n.Defer, len("defer")))
+
+       case *ast.Ellipsis:
+               children = append(children,
+                       tok(n.Ellipsis, len("...")))
+
+       case *ast.EmptyStmt:
+               // nop
+
+       case *ast.ExprStmt:
+               // nop
+
+       case *ast.Field:
+               // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+       case *ast.FieldList:
+               children = append(children,
+                       tok(n.Opening, len("(")),
+                       tok(n.Closing, len(")")))
+
+       case *ast.File:
+               // TODO test: Doc
+               children = append(children,
+                       tok(n.Package, len("package")))
+
+       case *ast.ForStmt:
+               children = append(children,
+                       tok(n.For, len("for")))
+
+       case *ast.FuncDecl:
+               // TODO(adonovan): FuncDecl.Comment?
+
+               // Uniquely, FuncDecl breaks the invariant that
+               // preorder traversal yields tokens in lexical order:
+               // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+               //
+               // As a workaround, we inline the case for FuncType
+               // here and order things correctly.
+               //
+               children = nil // discard ast.Walk(FuncDecl) info subtrees
+               children = append(children, tok(n.Type.Func, len("func")))
+               if n.Recv != nil {
+                       children = append(children, n.Recv)
+               }
+               children = append(children, n.Name)
+               if n.Type.Params != nil {
+                       children = append(children, n.Type.Params)
+               }
+               if n.Type.Results != nil {
+                       children = append(children, n.Type.Results)
+               }
+               if n.Body != nil {
+                       children = append(children, n.Body)
+               }
+
+       case *ast.FuncLit:
+               // nop
+
+       case *ast.FuncType:
+               if n.Func != 0 {
+                       children = append(children,
+                               tok(n.Func, len("func")))
+               }
+
+       case *ast.GenDecl:
+               children = append(children,
+                       tok(n.TokPos, len(n.Tok.String())))
+               if n.Lparen != 0 {
+                       children = append(children,
+                               tok(n.Lparen, len("(")),
+                               tok(n.Rparen, len(")")))
+               }
+
+       case *ast.GoStmt:
+               children = append(children,
+                       tok(n.Go, len("go")))
+
+       case *ast.Ident:
+               children = append(children,
+                       tok(n.NamePos, len(n.Name)))
+
+       case *ast.IfStmt:
+               children = append(children,
+                       tok(n.If, len("if")))
+
+       case *ast.ImportSpec:
+               // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+       case *ast.IncDecStmt:
+               children = append(children,
+                       tok(n.TokPos, len(n.Tok.String())))
+
+       case *ast.IndexExpr:
+               children = append(children,
+                       tok(n.Lbrack, len("{")),
+                       tok(n.Rbrack, len("}")))
+
+       case *ast.InterfaceType:
+               children = append(children,
+                       tok(n.Interface, len("interface")))
+
+       case *ast.KeyValueExpr:
+               children = append(children,
+                       tok(n.Colon, len(":")))
+
+       case *ast.LabeledStmt:
+               children = append(children,
+                       tok(n.Colon, len(":")))
+
+       case *ast.MapType:
+               children = append(children,
+                       tok(n.Map, len("map")))
+
+       case *ast.ParenExpr:
+               children = append(children,
+                       tok(n.Lparen, len("(")),
+                       tok(n.Rparen, len(")")))
+
+       case *ast.RangeStmt:
+               children = append(children,
+                       tok(n.For, len("for")),
+                       tok(n.TokPos, len(n.Tok.String())))
+
+       case *ast.ReturnStmt:
+               children = append(children,
+                       tok(n.Return, len("return")))
+
+       case *ast.SelectStmt:
+               children = append(children,
+                       tok(n.Select, len("select")))
+
+       case *ast.SelectorExpr:
+               // nop
+
+       case *ast.SendStmt:
+               children = append(children,
+                       tok(n.Arrow, len("<-")))
+
+       case *ast.SliceExpr:
+               children = append(children,
+                       tok(n.Lbrack, len("[")),
+                       tok(n.Rbrack, len("]")))
+
+       case *ast.StarExpr:
+               children = append(children, tok(n.Star, len("*")))
+
+       case *ast.StructType:
+               children = append(children, tok(n.Struct, len("struct")))
+
+       case *ast.SwitchStmt:
+               children = append(children, tok(n.Switch, len("switch")))
+
+       case *ast.TypeAssertExpr:
+               children = append(children,
+                       tok(n.Lparen-1, len(".")),
+                       tok(n.Lparen, len("(")),
+                       tok(n.Rparen, len(")")))
+
+       case *ast.TypeSpec:
+               // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+       case *ast.TypeSwitchStmt:
+               children = append(children, tok(n.Switch, len("switch")))
+
+       case *ast.UnaryExpr:
+               children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+       case *ast.ValueSpec:
+               // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+       case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+               // nop
+       }
+
+       // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+       // the switch above so we can make interleaved callbacks for
+       // both Nodes and Tokens in the right order and avoid the need
+       // to sort.
+       sort.Sort(byPos(children))
+
+       return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+       return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+       return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+       sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root.  Perhaps we should do that.
+//
+func NodeDescription(n ast.Node) string {
+       switch n := n.(type) {
+       case *ast.ArrayType:
+               return "array type"
+       case *ast.AssignStmt:
+               return "assignment"
+       case *ast.BadDecl:
+               return "bad declaration"
+       case *ast.BadExpr:
+               return "bad expression"
+       case *ast.BadStmt:
+               return "bad statement"
+       case *ast.BasicLit:
+               return "basic literal"
+       case *ast.BinaryExpr:
+               return fmt.Sprintf("binary %s operation", n.Op)
+       case *ast.BlockStmt:
+               return "block"
+       case *ast.BranchStmt:
+               switch n.Tok {
+               case token.BREAK:
+                       return "break statement"
+               case token.CONTINUE:
+                       return "continue statement"
+               case token.GOTO:
+                       return "goto statement"
+               case token.FALLTHROUGH:
+                       return "fall-through statement"
+               }
+       case *ast.CallExpr:
+               if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+                       return "function call (or conversion)"
+               }
+               return "function call"
+       case *ast.CaseClause:
+               return "case clause"
+       case *ast.ChanType:
+               return "channel type"
+       case *ast.CommClause:
+               return "communication clause"
+       case *ast.Comment:
+               return "comment"
+       case *ast.CommentGroup:
+               return "comment group"
+       case *ast.CompositeLit:
+               return "composite literal"
+       case *ast.DeclStmt:
+               return NodeDescription(n.Decl) + " statement"
+       case *ast.DeferStmt:
+               return "defer statement"
+       case *ast.Ellipsis:
+               return "ellipsis"
+       case *ast.EmptyStmt:
+               return "empty statement"
+       case *ast.ExprStmt:
+               return "expression statement"
+       case *ast.Field:
+               // Can be any of these:
+               // struct {x, y int}  -- struct field(s)
+               // struct {T}         -- anon struct field
+               // interface {I}      -- interface embedding
+               // interface {f()}    -- interface method
+               // func (A) func(B) C -- receiver, param(s), result(s)
+               return "field/method/parameter"
+       case *ast.FieldList:
+               return "field/method/parameter list"
+       case *ast.File:
+               return "source file"
+       case *ast.ForStmt:
+               return "for loop"
+       case *ast.FuncDecl:
+               return "function declaration"
+       case *ast.FuncLit:
+               return "function literal"
+       case *ast.FuncType:
+               return "function type"
+       case *ast.GenDecl:
+               switch n.Tok {
+               case token.IMPORT:
+                       return "import declaration"
+               case token.CONST:
+                       return "constant declaration"
+               case token.TYPE:
+                       return "type declaration"
+               case token.VAR:
+                       return "variable declaration"
+               }
+       case *ast.GoStmt:
+               return "go statement"
+       case *ast.Ident:
+               return "identifier"
+       case *ast.IfStmt:
+               return "if statement"
+       case *ast.ImportSpec:
+               return "import specification"
+       case *ast.IncDecStmt:
+               if n.Tok == token.INC {
+                       return "increment statement"
+               }
+               return "decrement statement"
+       case *ast.IndexExpr:
+               return "index expression"
+       case *ast.InterfaceType:
+               return "interface type"
+       case *ast.KeyValueExpr:
+               return "key/value association"
+       case *ast.LabeledStmt:
+               return "statement label"
+       case *ast.MapType:
+               return "map type"
+       case *ast.Package:
+               return "package"
+       case *ast.ParenExpr:
+               return "parenthesized " + NodeDescription(n.X)
+       case *ast.RangeStmt:
+               return "range loop"
+       case *ast.ReturnStmt:
+               return "return statement"
+       case *ast.SelectStmt:
+               return "select statement"
+       case *ast.SelectorExpr:
+               return "selector"
+       case *ast.SendStmt:
+               return "channel send"
+       case *ast.SliceExpr:
+               return "slice expression"
+       case *ast.StarExpr:
+               return "*-operation" // load/store expr or pointer type
+       case *ast.StructType:
+               return "struct type"
+       case *ast.SwitchStmt:
+               return "switch statement"
+       case *ast.TypeAssertExpr:
+               return "type assertion"
+       case *ast.TypeSpec:
+               return "type specification"
+       case *ast.TypeSwitchStmt:
+               return "type switch"
+       case *ast.UnaryExpr:
+               return fmt.Sprintf("unary %s operation", n.Op)
+       case *ast.ValueSpec:
+               return "value specification"
+
+       }
+       panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644 (file)
index 0000000..04ad679
--- /dev/null
@@ -0,0 +1,471 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+       "fmt"
+       "go/ast"
+       "go/token"
+       "strconv"
+       "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
+       return AddNamedImport(fset, f, "", ipath)
+}
+
+// AddNamedImport adds the import path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+//     AddNamedImport(fset, f, "pathpkg", "path")
+// adds
+//     import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
+       if imports(f, ipath) {
+               return false
+       }
+
+       newImport := &ast.ImportSpec{
+               Path: &ast.BasicLit{
+                       Kind:  token.STRING,
+                       Value: strconv.Quote(ipath),
+               },
+       }
+       if name != "" {
+               newImport.Name = &ast.Ident{Name: name}
+       }
+
+       // Find an import decl to add to.
+       // The goal is to find an existing import
+       // whose import path has the longest shared
+       // prefix with ipath.
+       var (
+               bestMatch  = -1         // length of longest shared prefix
+               lastImport = -1         // index in f.Decls of the file's final import decl
+               impDecl    *ast.GenDecl // import decl containing the best match
+               impIndex   = -1         // spec index in impDecl containing the best match
+
+               isThirdPartyPath = isThirdParty(ipath)
+       )
+       for i, decl := range f.Decls {
+               gen, ok := decl.(*ast.GenDecl)
+               if ok && gen.Tok == token.IMPORT {
+                       lastImport = i
+                       // Do not add to import "C", to avoid disrupting the
+                       // association with its doc comment, breaking cgo.
+                       if declImports(gen, "C") {
+                               continue
+                       }
+
+                       // Match an empty import decl if that's all that is available.
+                       if len(gen.Specs) == 0 && bestMatch == -1 {
+                               impDecl = gen
+                       }
+
+                       // Compute longest shared prefix with imports in this group and find best
+                       // matched import spec.
+                       // 1. Always prefer import spec with longest shared prefix.
+                       // 2. While match length is 0,
+                       // - for stdlib package: prefer first import spec.
+                       // - for third party package: prefer first third party import spec.
+                       // We cannot use last import spec as best match for third party package
+                       // because grouped imports are usually placed last by goimports -local
+                       // flag.
+                       // See issue #19190.
+                       seenAnyThirdParty := false
+                       for j, spec := range gen.Specs {
+                               impspec := spec.(*ast.ImportSpec)
+                               p := importPath(impspec)
+                               n := matchLen(p, ipath)
+                               if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+                                       bestMatch = n
+                                       impDecl = gen
+                                       impIndex = j
+                               }
+                               seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+                       }
+               }
+       }
+
+       // If no import decl found, add one after the last import.
+       if impDecl == nil {
+               impDecl = &ast.GenDecl{
+                       Tok: token.IMPORT,
+               }
+               if lastImport >= 0 {
+                       impDecl.TokPos = f.Decls[lastImport].End()
+               } else {
+                       // There are no existing imports.
+                       // Our new import, preceded by a blank line,  goes after the package declaration
+                       // and after the comment, if any, that starts on the same line as the
+                       // package declaration.
+                       impDecl.TokPos = f.Package
+
+                       file := fset.File(f.Package)
+                       pkgLine := file.Line(f.Package)
+                       for _, c := range f.Comments {
+                               if file.Line(c.Pos()) > pkgLine {
+                                       break
+                               }
+                               // +2 for a blank line
+                               impDecl.TokPos = c.End() + 2
+                       }
+               }
+               f.Decls = append(f.Decls, nil)
+               copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+               f.Decls[lastImport+1] = impDecl
+       }
+
+       // Insert new import at insertAt.
+       insertAt := 0
+       if impIndex >= 0 {
+               // insert after the found import
+               insertAt = impIndex + 1
+       }
+       impDecl.Specs = append(impDecl.Specs, nil)
+       copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+       impDecl.Specs[insertAt] = newImport
+       pos := impDecl.Pos()
+       if insertAt > 0 {
+               // If there is a comment after an existing import, preserve the comment
+               // position by adding the new import after the comment.
+               if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+                       pos = spec.Comment.End()
+               } else {
+                       // Assign same position as the previous import,
+                       // so that the sorter sees it as being in the same block.
+                       pos = impDecl.Specs[insertAt-1].Pos()
+               }
+       }
+       if newImport.Name != nil {
+               newImport.Name.NamePos = pos
+       }
+       newImport.Path.ValuePos = pos
+       newImport.EndPos = pos
+
+       // Clean up parens. impDecl contains at least one spec.
+       if len(impDecl.Specs) == 1 {
+               // Remove unneeded parens.
+               impDecl.Lparen = token.NoPos
+       } else if !impDecl.Lparen.IsValid() {
+               // impDecl needs parens added.
+               impDecl.Lparen = impDecl.Specs[0].Pos()
+       }
+
+       f.Imports = append(f.Imports, newImport)
+
+       if len(f.Decls) <= 1 {
+               return true
+       }
+
+       // Merge all the import declarations into the first one.
+       var first *ast.GenDecl
+       for i := 0; i < len(f.Decls); i++ {
+               decl := f.Decls[i]
+               gen, ok := decl.(*ast.GenDecl)
+               if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+                       continue
+               }
+               if first == nil {
+                       first = gen
+                       continue // Don't touch the first one.
+               }
+               // We now know there is more than one package in this import
+               // declaration. Ensure that it ends up parenthesized.
+               first.Lparen = first.Pos()
+               // Move the imports of the other import declaration to the first one.
+               for _, spec := range gen.Specs {
+                       spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+                       first.Specs = append(first.Specs, spec)
+               }
+               f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+               i--
+       }
+
+       return true
+}
+
+func isThirdParty(importPath string) bool {
+       // Third party package import path usually contains "." (".com", ".org", ...)
+       // This logic is taken from golang.org/x/tools/imports package.
+       return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+       return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+       var delspecs []*ast.ImportSpec
+       var delcomments []*ast.CommentGroup
+
+       // Find the import nodes that import path, if any.
+       for i := 0; i < len(f.Decls); i++ {
+               decl := f.Decls[i]
+               gen, ok := decl.(*ast.GenDecl)
+               if !ok || gen.Tok != token.IMPORT {
+                       continue
+               }
+               for j := 0; j < len(gen.Specs); j++ {
+                       spec := gen.Specs[j]
+                       impspec := spec.(*ast.ImportSpec)
+                       if impspec.Name == nil && name != "" {
+                               continue
+                       }
+                       if impspec.Name != nil && impspec.Name.Name != name {
+                               continue
+                       }
+                       if importPath(impspec) != path {
+                               continue
+                       }
+
+                       // We found an import spec that imports path.
+                       // Delete it.
+                       delspecs = append(delspecs, impspec)
+                       deleted = true
+                       copy(gen.Specs[j:], gen.Specs[j+1:])
+                       gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+                       // If this was the last import spec in this decl,
+                       // delete the decl, too.
+                       if len(gen.Specs) == 0 {
+                               copy(f.Decls[i:], f.Decls[i+1:])
+                               f.Decls = f.Decls[:len(f.Decls)-1]
+                               i--
+                               break
+                       } else if len(gen.Specs) == 1 {
+                               if impspec.Doc != nil {
+                                       delcomments = append(delcomments, impspec.Doc)
+                               }
+                               if impspec.Comment != nil {
+                                       delcomments = append(delcomments, impspec.Comment)
+                               }
+                               for _, cg := range f.Comments {
+                                       // Found comment on the same line as the import spec.
+                                       if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+                                               delcomments = append(delcomments, cg)
+                                               break
+                                       }
+                               }
+
+                               spec := gen.Specs[0].(*ast.ImportSpec)
+
+                               // Move the documentation right after the import decl.
+                               if spec.Doc != nil {
+                                       for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+                                               fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+                                       }
+                               }
+                               for _, cg := range f.Comments {
+                                       if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+                                               for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+                                                       fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+                                               }
+                                               break
+                                       }
+                               }
+                       }
+                       if j > 0 {
+                               lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+                               lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
+                               line := fset.Position(impspec.Path.ValuePos).Line
+
+                               // We deleted an entry but now there may be
+                               // a blank line-sized hole where the import was.
+                               if line-lastLine > 1 {
+                                       // There was a blank line immediately preceding the deleted import,
+                                       // so there's no need to close the hole.
+                                       // Do nothing.
+                               } else if line != fset.File(gen.Rparen).LineCount() {
+                                       // There was no blank line. Close the hole.
+                                       fset.File(gen.Rparen).MergeLine(line)
+                               }
+                       }
+                       j--
+               }
+       }
+
+       // Delete imports from f.Imports.
+       for i := 0; i < len(f.Imports); i++ {
+               imp := f.Imports[i]
+               for j, del := range delspecs {
+                       if imp == del {
+                               copy(f.Imports[i:], f.Imports[i+1:])
+                               f.Imports = f.Imports[:len(f.Imports)-1]
+                               copy(delspecs[j:], delspecs[j+1:])
+                               delspecs = delspecs[:len(delspecs)-1]
+                               i--
+                               break
+                       }
+               }
+       }
+
+       // Delete comments from f.Comments.
+       for i := 0; i < len(f.Comments); i++ {
+               cg := f.Comments[i]
+               for j, del := range delcomments {
+                       if cg == del {
+                               copy(f.Comments[i:], f.Comments[i+1:])
+                               f.Comments = f.Comments[:len(f.Comments)-1]
+                               copy(delcomments[j:], delcomments[j+1:])
+                               delcomments = delcomments[:len(delcomments)-1]
+                               i--
+                               break
+                       }
+               }
+       }
+
+       if len(delspecs) > 0 {
+               panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+       }
+
+       return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+       for _, imp := range f.Imports {
+               if importPath(imp) == oldPath {
+                       rewrote = true
+                       // record old End, because the default is to compute
+                       // it using the length of imp.Path.Value.
+                       imp.EndPos = imp.End()
+                       imp.Path.Value = strconv.Quote(newPath)
+               }
+       }
+       return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+       spec := importSpec(f, path)
+       if spec == nil {
+               return
+       }
+
+       name := spec.Name.String()
+       switch name {
+       case "<nil>":
+               // If the package name is not explicitly specified,
+               // make an educated guess. This is not guaranteed to be correct.
+               lastSlash := strings.LastIndex(path, "/")
+               if lastSlash == -1 {
+                       name = path
+               } else {
+                       name = path[lastSlash+1:]
+               }
+       case "_", ".":
+               // Not sure if this import is used - err on the side of caution.
+               return true
+       }
+
+       ast.Walk(visitFn(func(n ast.Node) {
+               sel, ok := n.(*ast.SelectorExpr)
+               if ok && isTopName(sel.X, name) {
+                       used = true
+               }
+       }), f)
+
+       return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+       fn(node)
+       return fn
+}
+
+// imports returns true if f imports path.
+func imports(f *ast.File, path string) bool {
+       return importSpec(f, path) != nil
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+       for _, s := range f.Imports {
+               if importPath(s) == path {
+                       return s
+               }
+       }
+       return nil
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+       t, err := strconv.Unquote(s.Path.Value)
+       if err == nil {
+               return t
+       }
+       return ""
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+       if gen.Tok != token.IMPORT {
+               return false
+       }
+       for _, spec := range gen.Specs {
+               impspec := spec.(*ast.ImportSpec)
+               if importPath(impspec) == path {
+                       return true
+               }
+       }
+       return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+       n := 0
+       for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+               if x[i] == '/' {
+                       n++
+               }
+       }
+       return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+       id, ok := n.(*ast.Ident)
+       return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+       var groups [][]*ast.ImportSpec
+
+       for _, decl := range f.Decls {
+               genDecl, ok := decl.(*ast.GenDecl)
+               if !ok || genDecl.Tok != token.IMPORT {
+                       break
+               }
+
+               group := []*ast.ImportSpec{}
+
+               var lastLine int
+               for _, spec := range genDecl.Specs {
+                       importSpec := spec.(*ast.ImportSpec)
+                       pos := importSpec.Path.ValuePos
+                       line := fset.Position(pos).Line
+                       if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+                               groups = append(groups, group)
+                               group = []*ast.ImportSpec{}
+                       }
+                       group = append(group, importSpec)
+                       lastLine = line
+               }
+               groups = append(groups, group)
+       }
+
+       return groups
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644 (file)
index 0000000..cf72ea9
--- /dev/null
@@ -0,0 +1,477 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+       "fmt"
+       "go/ast"
+       "reflect"
+       "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+//
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+       parent := &struct{ ast.Node }{root}
+       defer func() {
+               if r := recover(); r != nil && r != abort {
+                       panic(r)
+               }
+               result = parent.Node
+       }()
+       a := &application{pre: pre, post: post}
+       a.apply(parent, "Node", nil, root)
+       return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+//   p.f            == c.Node()  if c.Index() <  0
+//   p.f[c.Index()] == c.Node()  if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+       parent ast.Node
+       name   string
+       iter   *iterator // valid if non-nil
+       node   ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+       if c.iter != nil {
+               return c.iter.index
+       }
+       return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+       return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+       if _, ok := c.node.(*ast.File); ok {
+               file, ok := n.(*ast.File)
+               if !ok {
+                       panic("attempt to replace *ast.File with non-*ast.File")
+               }
+               c.parent.(*ast.Package).Files[c.name] = file
+               return
+       }
+
+       v := c.field()
+       if i := c.Index(); i >= 0 {
+               v = v.Index(i)
+       }
+       v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+       if _, ok := c.node.(*ast.File); ok {
+               delete(c.parent.(*ast.Package).Files, c.name)
+               return
+       }
+
+       i := c.Index()
+       if i < 0 {
+               panic("Delete node not contained in slice")
+       }
+       v := c.field()
+       l := v.Len()
+       reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+       v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+       v.SetLen(l - 1)
+       c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+       i := c.Index()
+       if i < 0 {
+               panic("InsertAfter node not contained in slice")
+       }
+       v := c.field()
+       v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+       l := v.Len()
+       reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+       v.Index(i + 1).Set(reflect.ValueOf(n))
+       c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+       i := c.Index()
+       if i < 0 {
+               panic("InsertBefore node not contained in slice")
+       }
+       v := c.field()
+       v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+       l := v.Len()
+       reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+       v.Index(i).Set(reflect.ValueOf(n))
+       c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+       pre, post ApplyFunc
+       cursor    Cursor
+       iter      iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+       // convert typed nil into untyped nil
+       if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+               n = nil
+       }
+
+       // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+       saved := a.cursor
+       a.cursor.parent = parent
+       a.cursor.name = name
+       a.cursor.iter = iter
+       a.cursor.node = n
+
+       if a.pre != nil && !a.pre(&a.cursor) {
+               a.cursor = saved
+               return
+       }
+
+       // walk children
+       // (the order of the cases matches the order of the corresponding node types in go/ast)
+       switch n := n.(type) {
+       case nil:
+               // nothing to do
+
+       // Comments and fields
+       case *ast.Comment:
+               // nothing to do
+
+       case *ast.CommentGroup:
+               if n != nil {
+                       a.applyList(n, "List")
+               }
+
+       case *ast.Field:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.applyList(n, "Names")
+               a.apply(n, "Type", nil, n.Type)
+               a.apply(n, "Tag", nil, n.Tag)
+               a.apply(n, "Comment", nil, n.Comment)
+
+       case *ast.FieldList:
+               a.applyList(n, "List")
+
+       // Expressions
+       case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+               // nothing to do
+
+       case *ast.Ellipsis:
+               a.apply(n, "Elt", nil, n.Elt)
+
+       case *ast.FuncLit:
+               a.apply(n, "Type", nil, n.Type)
+               a.apply(n, "Body", nil, n.Body)
+
+       case *ast.CompositeLit:
+               a.apply(n, "Type", nil, n.Type)
+               a.applyList(n, "Elts")
+
+       case *ast.ParenExpr:
+               a.apply(n, "X", nil, n.X)
+
+       case *ast.SelectorExpr:
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Sel", nil, n.Sel)
+
+       case *ast.IndexExpr:
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Index", nil, n.Index)
+
+       case *ast.SliceExpr:
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Low", nil, n.Low)
+               a.apply(n, "High", nil, n.High)
+               a.apply(n, "Max", nil, n.Max)
+
+       case *ast.TypeAssertExpr:
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Type", nil, n.Type)
+
+       case *ast.CallExpr:
+               a.apply(n, "Fun", nil, n.Fun)
+               a.applyList(n, "Args")
+
+       case *ast.StarExpr:
+               a.apply(n, "X", nil, n.X)
+
+       case *ast.UnaryExpr:
+               a.apply(n, "X", nil, n.X)
+
+       case *ast.BinaryExpr:
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Y", nil, n.Y)
+
+       case *ast.KeyValueExpr:
+               a.apply(n, "Key", nil, n.Key)
+               a.apply(n, "Value", nil, n.Value)
+
+       // Types
+       case *ast.ArrayType:
+               a.apply(n, "Len", nil, n.Len)
+               a.apply(n, "Elt", nil, n.Elt)
+
+       case *ast.StructType:
+               a.apply(n, "Fields", nil, n.Fields)
+
+       case *ast.FuncType:
+               a.apply(n, "Params", nil, n.Params)
+               a.apply(n, "Results", nil, n.Results)
+
+       case *ast.InterfaceType:
+               a.apply(n, "Methods", nil, n.Methods)
+
+       case *ast.MapType:
+               a.apply(n, "Key", nil, n.Key)
+               a.apply(n, "Value", nil, n.Value)
+
+       case *ast.ChanType:
+               a.apply(n, "Value", nil, n.Value)
+
+       // Statements
+       case *ast.BadStmt:
+               // nothing to do
+
+       case *ast.DeclStmt:
+               a.apply(n, "Decl", nil, n.Decl)
+
+       case *ast.EmptyStmt:
+               // nothing to do
+
+       case *ast.LabeledStmt:
+               a.apply(n, "Label", nil, n.Label)
+               a.apply(n, "Stmt", nil, n.Stmt)
+
+       case *ast.ExprStmt:
+               a.apply(n, "X", nil, n.X)
+
+       case *ast.SendStmt:
+               a.apply(n, "Chan", nil, n.Chan)
+               a.apply(n, "Value", nil, n.Value)
+
+       case *ast.IncDecStmt:
+               a.apply(n, "X", nil, n.X)
+
+       case *ast.AssignStmt:
+               a.applyList(n, "Lhs")
+               a.applyList(n, "Rhs")
+
+       case *ast.GoStmt:
+               a.apply(n, "Call", nil, n.Call)
+
+       case *ast.DeferStmt:
+               a.apply(n, "Call", nil, n.Call)
+
+       case *ast.ReturnStmt:
+               a.applyList(n, "Results")
+
+       case *ast.BranchStmt:
+               a.apply(n, "Label", nil, n.Label)
+
+       case *ast.BlockStmt:
+               a.applyList(n, "List")
+
+       case *ast.IfStmt:
+               a.apply(n, "Init", nil, n.Init)
+               a.apply(n, "Cond", nil, n.Cond)
+               a.apply(n, "Body", nil, n.Body)
+               a.apply(n, "Else", nil, n.Else)
+
+       case *ast.CaseClause:
+               a.applyList(n, "List")
+               a.applyList(n, "Body")
+
+       case *ast.SwitchStmt:
+               a.apply(n, "Init", nil, n.Init)
+               a.apply(n, "Tag", nil, n.Tag)
+               a.apply(n, "Body", nil, n.Body)
+
+       case *ast.TypeSwitchStmt:
+               a.apply(n, "Init", nil, n.Init)
+               a.apply(n, "Assign", nil, n.Assign)
+               a.apply(n, "Body", nil, n.Body)
+
+       case *ast.CommClause:
+               a.apply(n, "Comm", nil, n.Comm)
+               a.applyList(n, "Body")
+
+       case *ast.SelectStmt:
+               a.apply(n, "Body", nil, n.Body)
+
+       case *ast.ForStmt:
+               a.apply(n, "Init", nil, n.Init)
+               a.apply(n, "Cond", nil, n.Cond)
+               a.apply(n, "Post", nil, n.Post)
+               a.apply(n, "Body", nil, n.Body)
+
+       case *ast.RangeStmt:
+               a.apply(n, "Key", nil, n.Key)
+               a.apply(n, "Value", nil, n.Value)
+               a.apply(n, "X", nil, n.X)
+               a.apply(n, "Body", nil, n.Body)
+
+       // Declarations
+       case *ast.ImportSpec:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.apply(n, "Name", nil, n.Name)
+               a.apply(n, "Path", nil, n.Path)
+               a.apply(n, "Comment", nil, n.Comment)
+
+       case *ast.ValueSpec:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.applyList(n, "Names")
+               a.apply(n, "Type", nil, n.Type)
+               a.applyList(n, "Values")
+               a.apply(n, "Comment", nil, n.Comment)
+
+       case *ast.TypeSpec:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.apply(n, "Name", nil, n.Name)
+               a.apply(n, "Type", nil, n.Type)
+               a.apply(n, "Comment", nil, n.Comment)
+
+       case *ast.BadDecl:
+               // nothing to do
+
+       case *ast.GenDecl:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.applyList(n, "Specs")
+
+       case *ast.FuncDecl:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.apply(n, "Recv", nil, n.Recv)
+               a.apply(n, "Name", nil, n.Name)
+               a.apply(n, "Type", nil, n.Type)
+               a.apply(n, "Body", nil, n.Body)
+
+       // Files and packages
+       case *ast.File:
+               a.apply(n, "Doc", nil, n.Doc)
+               a.apply(n, "Name", nil, n.Name)
+               a.applyList(n, "Decls")
+               // Don't walk n.Comments; they have either been walked already if
+               // they are Doc comments, or they can be easily walked explicitly.
+
+       case *ast.Package:
+               // collect and sort names for reproducible behavior
+               var names []string
+               for name := range n.Files {
+                       names = append(names, name)
+               }
+               sort.Strings(names)
+               for _, name := range names {
+                       a.apply(n, name, nil, n.Files[name])
+               }
+
+       default:
+               panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+       }
+
+       if a.post != nil && !a.post(&a.cursor) {
+               panic(abort)
+       }
+
+       a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+       index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+       // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+       saved := a.iter
+       a.iter.index = 0
+       for {
+               // must reload parent.name each time, since cursor modifications might change it
+               v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+               if a.iter.index >= v.Len() {
+                       break
+               }
+
+               // element x may be nil in a bad AST - be cautious
+               var x ast.Node
+               if e := v.Index(a.iter.index); e.IsValid() {
+                       x = e.Interface().(ast.Node)
+               }
+
+               a.iter.step = 1
+               a.apply(parent, name, &a.iter, x)
+               a.iter.index += a.iter.step
+       }
+       a.iter = saved
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/util.go b/src/cmd/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644 (file)
index 0000000..7630629
--- /dev/null
@@ -0,0 +1,14 @@
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+       for {
+               p, ok := e.(*ast.ParenExpr)
+               if !ok {
+                       return e
+               }
+               e = p.X
+       }
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/src/cmd/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
new file mode 100644 (file)
index 0000000..db88a95
--- /dev/null
@@ -0,0 +1,182 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package inspector provides helper functions for traversal over the
+// syntax trees of a package, including node filtering by type, and
+// materialization of the traversal stack.
+//
+// During construction, the inspector does a complete traversal and
+// builds a list of push/pop events and their node type. Subsequent
+// method calls that request a traversal scan this list, rather than walk
+// the AST, and perform type filtering using efficient bit sets.
+//
+// Experiments suggest the inspector's traversals are about 2.5x faster
+// than ast.Inspect, but it may take around 5 traversals for this
+// benefit to amortize the inspector's construction cost.
+// If efficiency is the primary concern, do not use use Inspector for
+// one-off traversals.
+package inspector
+
+// There are four orthogonal features in a traversal:
+//  1 type filtering
+//  2 pruning
+//  3 postorder calls to f
+//  4 stack
+// Rather than offer all of them in the API,
+// only a few combinations are exposed:
+// - Preorder is the fastest and has fewest features,
+//   but is the most commonly needed traversal.
+// - Nodes and WithStack both provide pruning and postorder calls,
+//   even though few clients need it, because supporting two versions
+//   is not justified.
+// More combinations could be supported by expressing them as
+// wrappers around a more generic traversal, but this was measured
+// and found to degrade performance significantly (30%).
+
+import (
+       "go/ast"
+)
+
+// An Inspector provides methods for inspecting
+// (traversing) the syntax trees of a package.
+type Inspector struct {
+       events []event
+}
+
+// New returns an Inspector for the specified syntax trees.
+func New(files []*ast.File) *Inspector {
+       return &Inspector{traverse(files)}
+}
+
+// An event represents a push or a pop
+// of an ast.Node during a traversal.
+type event struct {
+       node  ast.Node
+       typ   uint64 // typeOf(node)
+       index int    // 1 + index of corresponding pop event, or 0 if this is a pop
+}
+
+// Preorder visits all the nodes of the files supplied to New in
+// depth-first order. It calls f(n) for each node n before it visits
+// n's children.
+//
+// The types argument, if non-empty, enables type-based filtering of
+// events. The function f if is called only for nodes whose type
+// matches an element of the types slice.
+func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) {
+       // Because it avoids postorder calls to f, and the pruning
+       // check, Preorder is almost twice as fast as Nodes. The two
+       // features seem to contribute similar slowdowns (~1.4x each).
+
+       mask := maskOf(types)
+       for i := 0; i < len(in.events); {
+               ev := in.events[i]
+               if ev.typ&mask != 0 {
+                       if ev.index > 0 {
+                               f(ev.node)
+                       }
+               }
+               i++
+       }
+}
+
+// Nodes visits the nodes of the files supplied to New in depth-first
+// order. It calls f(n, true) for each node n before it visits n's
+// children. If f returns true, Nodes invokes f recursively for each
+// of the non-nil children of the node, followed by a call of
+// f(n, false).
+//
+// The types argument, if non-empty, enables type-based filtering of
+// events. The function f if is called only for nodes whose type
+// matches an element of the types slice.
+func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (prune bool)) {
+       mask := maskOf(types)
+       for i := 0; i < len(in.events); {
+               ev := in.events[i]
+               if ev.typ&mask != 0 {
+                       if ev.index > 0 {
+                               // push
+                               if !f(ev.node, true) {
+                                       i = ev.index // jump to corresponding pop + 1
+                                       continue
+                               }
+                       } else {
+                               // pop
+                               f(ev.node, false)
+                       }
+               }
+               i++
+       }
+}
+
+// WithStack visits nodes in a similar manner to Nodes, but it
+// supplies each call to f an additional argument, the current
+// traversal stack. The stack's first element is the outermost node,
+// an *ast.File; its last is the innermost, n.
+func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (prune bool)) {
+       mask := maskOf(types)
+       var stack []ast.Node
+       for i := 0; i < len(in.events); {
+               ev := in.events[i]
+               if ev.index > 0 {
+                       // push
+                       stack = append(stack, ev.node)
+                       if ev.typ&mask != 0 {
+                               if !f(ev.node, true, stack) {
+                                       i = ev.index
+                                       stack = stack[:len(stack)-1]
+                                       continue
+                               }
+                       }
+               } else {
+                       // pop
+                       if ev.typ&mask != 0 {
+                               f(ev.node, false, stack)
+                       }
+                       stack = stack[:len(stack)-1]
+               }
+               i++
+       }
+}
+
+// traverse builds the table of events representing a traversal.
+func traverse(files []*ast.File) []event {
+       // Preallocate approximate number of events
+       // based on source file extent.
+       // This makes traverse faster by 4x (!).
+       var extent int
+       for _, f := range files {
+               extent += int(f.End() - f.Pos())
+       }
+       // This estimate is based on the net/http package.
+       events := make([]event, 0, extent*33/100)
+
+       var stack []event
+       for _, f := range files {
+               ast.Inspect(f, func(n ast.Node) bool {
+                       if n != nil {
+                               // push
+                               ev := event{
+                                       node:  n,
+                                       typ:   typeOf(n),
+                                       index: len(events), // push event temporarily holds own index
+                               }
+                               stack = append(stack, ev)
+                               events = append(events, ev)
+                       } else {
+                               // pop
+                               ev := stack[len(stack)-1]
+                               stack = stack[:len(stack)-1]
+
+                               events[ev.index].index = len(events) + 1 // make push refer to pop
+
+                               ev.index = 0 // turn ev into a pop event
+                               events = append(events, ev)
+                       }
+                       return true
+               })
+       }
+
+       return events
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/ast/inspector/typeof.go b/src/cmd/vendor/golang.org/x/tools/go/ast/inspector/typeof.go
new file mode 100644 (file)
index 0000000..d61301b
--- /dev/null
@@ -0,0 +1,216 @@
+package inspector
+
+// This file defines func typeOf(ast.Node) uint64.
+//
+// The initial map-based implementation was too slow;
+// see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196
+
+import "go/ast"
+
+const (
+       nArrayType = iota
+       nAssignStmt
+       nBadDecl
+       nBadExpr
+       nBadStmt
+       nBasicLit
+       nBinaryExpr
+       nBlockStmt
+       nBranchStmt
+       nCallExpr
+       nCaseClause
+       nChanType
+       nCommClause
+       nComment
+       nCommentGroup
+       nCompositeLit
+       nDeclStmt
+       nDeferStmt
+       nEllipsis
+       nEmptyStmt
+       nExprStmt
+       nField
+       nFieldList
+       nFile
+       nForStmt
+       nFuncDecl
+       nFuncLit
+       nFuncType
+       nGenDecl
+       nGoStmt
+       nIdent
+       nIfStmt
+       nImportSpec
+       nIncDecStmt
+       nIndexExpr
+       nInterfaceType
+       nKeyValueExpr
+       nLabeledStmt
+       nMapType
+       nPackage
+       nParenExpr
+       nRangeStmt
+       nReturnStmt
+       nSelectStmt
+       nSelectorExpr
+       nSendStmt
+       nSliceExpr
+       nStarExpr
+       nStructType
+       nSwitchStmt
+       nTypeAssertExpr
+       nTypeSpec
+       nTypeSwitchStmt
+       nUnaryExpr
+       nValueSpec
+)
+
+// typeOf returns a distinct single-bit value that represents the type of n.
+//
+// Various implementations were benchmarked with BenchmarkNewInspector:
+//                                                             GOGC=off
+// - type switch                               4.9-5.5ms       2.1ms
+// - binary search over a sorted list of types  5.5-5.9ms      2.5ms
+// - linear scan, frequency-ordered list       5.9-6.1ms       2.7ms
+// - linear scan, unordered list               6.4ms           2.7ms
+// - hash table                                        6.5ms           3.1ms
+// A perfect hash seemed like overkill.
+//
+// The compiler's switch statement is the clear winner
+// as it produces a binary tree in code,
+// with constant conditions and good branch prediction.
+// (Sadly it is the most verbose in source code.)
+// Binary search suffered from poor branch prediction.
+//
+func typeOf(n ast.Node) uint64 {
+       // Fast path: nearly half of all nodes are identifiers.
+       if _, ok := n.(*ast.Ident); ok {
+               return 1 << nIdent
+       }
+
+       // These cases include all nodes encountered by ast.Inspect.
+       switch n.(type) {
+       case *ast.ArrayType:
+               return 1 << nArrayType
+       case *ast.AssignStmt:
+               return 1 << nAssignStmt
+       case *ast.BadDecl:
+               return 1 << nBadDecl
+       case *ast.BadExpr:
+               return 1 << nBadExpr
+       case *ast.BadStmt:
+               return 1 << nBadStmt
+       case *ast.BasicLit:
+               return 1 << nBasicLit
+       case *ast.BinaryExpr:
+               return 1 << nBinaryExpr
+       case *ast.BlockStmt:
+               return 1 << nBlockStmt
+       case *ast.BranchStmt:
+               return 1 << nBranchStmt
+       case *ast.CallExpr:
+               return 1 << nCallExpr
+       case *ast.CaseClause:
+               return 1 << nCaseClause
+       case *ast.ChanType:
+               return 1 << nChanType
+       case *ast.CommClause:
+               return 1 << nCommClause
+       case *ast.Comment:
+               return 1 << nComment
+       case *ast.CommentGroup:
+               return 1 << nCommentGroup
+       case *ast.CompositeLit:
+               return 1 << nCompositeLit
+       case *ast.DeclStmt:
+               return 1 << nDeclStmt
+       case *ast.DeferStmt:
+               return 1 << nDeferStmt
+       case *ast.Ellipsis:
+               return 1 << nEllipsis
+       case *ast.EmptyStmt:
+               return 1 << nEmptyStmt
+       case *ast.ExprStmt:
+               return 1 << nExprStmt
+       case *ast.Field:
+               return 1 << nField
+       case *ast.FieldList:
+               return 1 << nFieldList
+       case *ast.File:
+               return 1 << nFile
+       case *ast.ForStmt:
+               return 1 << nForStmt
+       case *ast.FuncDecl:
+               return 1 << nFuncDecl
+       case *ast.FuncLit:
+               return 1 << nFuncLit
+       case *ast.FuncType:
+               return 1 << nFuncType
+       case *ast.GenDecl:
+               return 1 << nGenDecl
+       case *ast.GoStmt:
+               return 1 << nGoStmt
+       case *ast.Ident:
+               return 1 << nIdent
+       case *ast.IfStmt:
+               return 1 << nIfStmt
+       case *ast.ImportSpec:
+               return 1 << nImportSpec
+       case *ast.IncDecStmt:
+               return 1 << nIncDecStmt
+       case *ast.IndexExpr:
+               return 1 << nIndexExpr
+       case *ast.InterfaceType:
+               return 1 << nInterfaceType
+       case *ast.KeyValueExpr:
+               return 1 << nKeyValueExpr
+       case *ast.LabeledStmt:
+               return 1 << nLabeledStmt
+       case *ast.MapType:
+               return 1 << nMapType
+       case *ast.Package:
+               return 1 << nPackage
+       case *ast.ParenExpr:
+               return 1 << nParenExpr
+       case *ast.RangeStmt:
+               return 1 << nRangeStmt
+       case *ast.ReturnStmt:
+               return 1 << nReturnStmt
+       case *ast.SelectStmt:
+               return 1 << nSelectStmt
+       case *ast.SelectorExpr:
+               return 1 << nSelectorExpr
+       case *ast.SendStmt:
+               return 1 << nSendStmt
+       case *ast.SliceExpr:
+               return 1 << nSliceExpr
+       case *ast.StarExpr:
+               return 1 << nStarExpr
+       case *ast.StructType:
+               return 1 << nStructType
+       case *ast.SwitchStmt:
+               return 1 << nSwitchStmt
+       case *ast.TypeAssertExpr:
+               return 1 << nTypeAssertExpr
+       case *ast.TypeSpec:
+               return 1 << nTypeSpec
+       case *ast.TypeSwitchStmt:
+               return 1 << nTypeSwitchStmt
+       case *ast.UnaryExpr:
+               return 1 << nUnaryExpr
+       case *ast.ValueSpec:
+               return 1 << nValueSpec
+       }
+       return 0
+}
+
+func maskOf(nodes []ast.Node) uint64 {
+       if nodes == nil {
+               return 1<<64 - 1 // match all node types
+       }
+       var mask uint64
+       for _, n := range nodes {
+               mask |= typeOf(n)
+       }
+       return mask
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/cfg/builder.go b/src/cmd/vendor/golang.org/x/tools/go/cfg/builder.go
new file mode 100644 (file)
index 0000000..24e1aba
--- /dev/null
@@ -0,0 +1,510 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cfg
+
+// This file implements the CFG construction pass.
+
+import (
+       "fmt"
+       "go/ast"
+       "go/token"
+)
+
+type builder struct {
+       cfg       *CFG
+       mayReturn func(*ast.CallExpr) bool
+       current   *Block
+       lblocks   map[*ast.Object]*lblock // labeled blocks
+       targets   *targets                // linked stack of branch targets
+}
+
+func (b *builder) stmt(_s ast.Stmt) {
+       // The label of the current statement.  If non-nil, its _goto
+       // target is always set; its _break and _continue are set only
+       // within the body of switch/typeswitch/select/for/range.
+       // It is effectively an additional default-nil parameter of stmt().
+       var label *lblock
+start:
+       switch s := _s.(type) {
+       case *ast.BadStmt,
+               *ast.SendStmt,
+               *ast.IncDecStmt,
+               *ast.GoStmt,
+               *ast.DeferStmt,
+               *ast.EmptyStmt,
+               *ast.AssignStmt:
+               // No effect on control flow.
+               b.add(s)
+
+       case *ast.ExprStmt:
+               b.add(s)
+               if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) {
+                       // Calls to panic, os.Exit, etc, never return.
+                       b.current = b.newBlock("unreachable.call")
+               }
+
+       case *ast.DeclStmt:
+               // Treat each var ValueSpec as a separate statement.
+               d := s.Decl.(*ast.GenDecl)
+               if d.Tok == token.VAR {
+                       for _, spec := range d.Specs {
+                               if spec, ok := spec.(*ast.ValueSpec); ok {
+                                       b.add(spec)
+                               }
+                       }
+               }
+
+       case *ast.LabeledStmt:
+               label = b.labeledBlock(s.Label)
+               b.jump(label._goto)
+               b.current = label._goto
+               _s = s.Stmt
+               goto start // effectively: tailcall stmt(g, s.Stmt, label)
+
+       case *ast.ReturnStmt:
+               b.add(s)
+               b.current = b.newBlock("unreachable.return")
+
+       case *ast.BranchStmt:
+               b.branchStmt(s)
+
+       case *ast.BlockStmt:
+               b.stmtList(s.List)
+
+       case *ast.IfStmt:
+               if s.Init != nil {
+                       b.stmt(s.Init)
+               }
+               then := b.newBlock("if.then")
+               done := b.newBlock("if.done")
+               _else := done
+               if s.Else != nil {
+                       _else = b.newBlock("if.else")
+               }
+               b.add(s.Cond)
+               b.ifelse(then, _else)
+               b.current = then
+               b.stmt(s.Body)
+               b.jump(done)
+
+               if s.Else != nil {
+                       b.current = _else
+                       b.stmt(s.Else)
+                       b.jump(done)
+               }
+
+               b.current = done
+
+       case *ast.SwitchStmt:
+               b.switchStmt(s, label)
+
+       case *ast.TypeSwitchStmt:
+               b.typeSwitchStmt(s, label)
+
+       case *ast.SelectStmt:
+               b.selectStmt(s, label)
+
+       case *ast.ForStmt:
+               b.forStmt(s, label)
+
+       case *ast.RangeStmt:
+               b.rangeStmt(s, label)
+
+       default:
+               panic(fmt.Sprintf("unexpected statement kind: %T", s))
+       }
+}
+
+func (b *builder) stmtList(list []ast.Stmt) {
+       for _, s := range list {
+               b.stmt(s)
+       }
+}
+
+func (b *builder) branchStmt(s *ast.BranchStmt) {
+       var block *Block
+       switch s.Tok {
+       case token.BREAK:
+               if s.Label != nil {
+                       if lb := b.labeledBlock(s.Label); lb != nil {
+                               block = lb._break
+                       }
+               } else {
+                       for t := b.targets; t != nil && block == nil; t = t.tail {
+                               block = t._break
+                       }
+               }
+
+       case token.CONTINUE:
+               if s.Label != nil {
+                       if lb := b.labeledBlock(s.Label); lb != nil {
+                               block = lb._continue
+                       }
+               } else {
+                       for t := b.targets; t != nil && block == nil; t = t.tail {
+                               block = t._continue
+                       }
+               }
+
+       case token.FALLTHROUGH:
+               for t := b.targets; t != nil; t = t.tail {
+                       block = t._fallthrough
+               }
+
+       case token.GOTO:
+               if s.Label != nil {
+                       block = b.labeledBlock(s.Label)._goto
+               }
+       }
+       if block == nil {
+               block = b.newBlock("undefined.branch")
+       }
+       b.jump(block)
+       b.current = b.newBlock("unreachable.branch")
+}
+
+func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) {
+       if s.Init != nil {
+               b.stmt(s.Init)
+       }
+       if s.Tag != nil {
+               b.add(s.Tag)
+       }
+       done := b.newBlock("switch.done")
+       if label != nil {
+               label._break = done
+       }
+       // We pull the default case (if present) down to the end.
+       // But each fallthrough label must point to the next
+       // body block in source order, so we preallocate a
+       // body block (fallthru) for the next case.
+       // Unfortunately this makes for a confusing block order.
+       var defaultBody *[]ast.Stmt
+       var defaultFallthrough *Block
+       var fallthru, defaultBlock *Block
+       ncases := len(s.Body.List)
+       for i, clause := range s.Body.List {
+               body := fallthru
+               if body == nil {
+                       body = b.newBlock("switch.body") // first case only
+               }
+
+               // Preallocate body block for the next case.
+               fallthru = done
+               if i+1 < ncases {
+                       fallthru = b.newBlock("switch.body")
+               }
+
+               cc := clause.(*ast.CaseClause)
+               if cc.List == nil {
+                       // Default case.
+                       defaultBody = &cc.Body
+                       defaultFallthrough = fallthru
+                       defaultBlock = body
+                       continue
+               }
+
+               var nextCond *Block
+               for _, cond := range cc.List {
+                       nextCond = b.newBlock("switch.next")
+                       b.add(cond) // one half of the tag==cond condition
+                       b.ifelse(body, nextCond)
+                       b.current = nextCond
+               }
+               b.current = body
+               b.targets = &targets{
+                       tail:         b.targets,
+                       _break:       done,
+                       _fallthrough: fallthru,
+               }
+               b.stmtList(cc.Body)
+               b.targets = b.targets.tail
+               b.jump(done)
+               b.current = nextCond
+       }
+       if defaultBlock != nil {
+               b.jump(defaultBlock)
+               b.current = defaultBlock
+               b.targets = &targets{
+                       tail:         b.targets,
+                       _break:       done,
+                       _fallthrough: defaultFallthrough,
+               }
+               b.stmtList(*defaultBody)
+               b.targets = b.targets.tail
+       }
+       b.jump(done)
+       b.current = done
+}
+
+func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) {
+       if s.Init != nil {
+               b.stmt(s.Init)
+       }
+       if s.Assign != nil {
+               b.add(s.Assign)
+       }
+
+       done := b.newBlock("typeswitch.done")
+       if label != nil {
+               label._break = done
+       }
+       var default_ *ast.CaseClause
+       for _, clause := range s.Body.List {
+               cc := clause.(*ast.CaseClause)
+               if cc.List == nil {
+                       default_ = cc
+                       continue
+               }
+               body := b.newBlock("typeswitch.body")
+               var next *Block
+               for _, casetype := range cc.List {
+                       next = b.newBlock("typeswitch.next")
+                       // casetype is a type, so don't call b.add(casetype).
+                       // This block logically contains a type assertion,
+                       // x.(casetype), but it's unclear how to represent x.
+                       _ = casetype
+                       b.ifelse(body, next)
+                       b.current = next
+               }
+               b.current = body
+               b.typeCaseBody(cc, done)
+               b.current = next
+       }
+       if default_ != nil {
+               b.typeCaseBody(default_, done)
+       } else {
+               b.jump(done)
+       }
+       b.current = done
+}
+
+func (b *builder) typeCaseBody(cc *ast.CaseClause, done *Block) {
+       b.targets = &targets{
+               tail:   b.targets,
+               _break: done,
+       }
+       b.stmtList(cc.Body)
+       b.targets = b.targets.tail
+       b.jump(done)
+}
+
+func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) {
+       // First evaluate channel expressions.
+       // TODO(adonovan): fix: evaluate only channel exprs here.
+       for _, clause := range s.Body.List {
+               if comm := clause.(*ast.CommClause).Comm; comm != nil {
+                       b.stmt(comm)
+               }
+       }
+
+       done := b.newBlock("select.done")
+       if label != nil {
+               label._break = done
+       }
+
+       var defaultBody *[]ast.Stmt
+       for _, cc := range s.Body.List {
+               clause := cc.(*ast.CommClause)
+               if clause.Comm == nil {
+                       defaultBody = &clause.Body
+                       continue
+               }
+               body := b.newBlock("select.body")
+               next := b.newBlock("select.next")
+               b.ifelse(body, next)
+               b.current = body
+               b.targets = &targets{
+                       tail:   b.targets,
+                       _break: done,
+               }
+               switch comm := clause.Comm.(type) {
+               case *ast.ExprStmt: // <-ch
+                       // nop
+               case *ast.AssignStmt: // x := <-states[state].Chan
+                       b.add(comm.Lhs[0])
+               }
+               b.stmtList(clause.Body)
+               b.targets = b.targets.tail
+               b.jump(done)
+               b.current = next
+       }
+       if defaultBody != nil {
+               b.targets = &targets{
+                       tail:   b.targets,
+                       _break: done,
+               }
+               b.stmtList(*defaultBody)
+               b.targets = b.targets.tail
+               b.jump(done)
+       }
+       b.current = done
+}
+
+func (b *builder) forStmt(s *ast.ForStmt, label *lblock) {
+       //      ...init...
+       //      jump loop
+       // loop:
+       //      if cond goto body else done
+       // body:
+       //      ...body...
+       //      jump post
+       // post:                                 (target of continue)
+       //      ...post...
+       //      jump loop
+       // done:                                 (target of break)
+       if s.Init != nil {
+               b.stmt(s.Init)
+       }
+       body := b.newBlock("for.body")
+       done := b.newBlock("for.done") // target of 'break'
+       loop := body                   // target of back-edge
+       if s.Cond != nil {
+               loop = b.newBlock("for.loop")
+       }
+       cont := loop // target of 'continue'
+       if s.Post != nil {
+               cont = b.newBlock("for.post")
+       }
+       if label != nil {
+               label._break = done
+               label._continue = cont
+       }
+       b.jump(loop)
+       b.current = loop
+       if loop != body {
+               b.add(s.Cond)
+               b.ifelse(body, done)
+               b.current = body
+       }
+       b.targets = &targets{
+               tail:      b.targets,
+               _break:    done,
+               _continue: cont,
+       }
+       b.stmt(s.Body)
+       b.targets = b.targets.tail
+       b.jump(cont)
+
+       if s.Post != nil {
+               b.current = cont
+               b.stmt(s.Post)
+               b.jump(loop) // back-edge
+       }
+       b.current = done
+}
+
+func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) {
+       b.add(s.X)
+
+       if s.Key != nil {
+               b.add(s.Key)
+       }
+       if s.Value != nil {
+               b.add(s.Value)
+       }
+
+       //      ...
+       // loop:                                   (target of continue)
+       //      if ... goto body else done
+       // body:
+       //      ...
+       //      jump loop
+       // done:                                   (target of break)
+
+       loop := b.newBlock("range.loop")
+       b.jump(loop)
+       b.current = loop
+
+       body := b.newBlock("range.body")
+       done := b.newBlock("range.done")
+       b.ifelse(body, done)
+       b.current = body
+
+       if label != nil {
+               label._break = done
+               label._continue = loop
+       }
+       b.targets = &targets{
+               tail:      b.targets,
+               _break:    done,
+               _continue: loop,
+       }
+       b.stmt(s.Body)
+       b.targets = b.targets.tail
+       b.jump(loop) // back-edge
+       b.current = done
+}
+
+// -------- helpers --------
+
+// Destinations associated with unlabeled for/switch/select stmts.
+// We push/pop one of these as we enter/leave each construct and for
+// each BranchStmt we scan for the innermost target of the right type.
+//
+type targets struct {
+       tail         *targets // rest of stack
+       _break       *Block
+       _continue    *Block
+       _fallthrough *Block
+}
+
+// Destinations associated with a labeled block.
+// We populate these as labels are encountered in forward gotos or
+// labeled statements.
+//
+type lblock struct {
+       _goto     *Block
+       _break    *Block
+       _continue *Block
+}
+
+// labeledBlock returns the branch target associated with the
+// specified label, creating it if needed.
+//
+func (b *builder) labeledBlock(label *ast.Ident) *lblock {
+       lb := b.lblocks[label.Obj]
+       if lb == nil {
+               lb = &lblock{_goto: b.newBlock(label.Name)}
+               if b.lblocks == nil {
+                       b.lblocks = make(map[*ast.Object]*lblock)
+               }
+               b.lblocks[label.Obj] = lb
+       }
+       return lb
+}
+
+// newBlock appends a new unconnected basic block to b.cfg's block
+// slice and returns it.
+// It does not automatically become the current block.
+// comment is an optional string for more readable debugging output.
+func (b *builder) newBlock(comment string) *Block {
+       g := b.cfg
+       block := &Block{
+               Index:   int32(len(g.Blocks)),
+               comment: comment,
+       }
+       block.Succs = block.succs2[:0]
+       g.Blocks = append(g.Blocks, block)
+       return block
+}
+
+func (b *builder) add(n ast.Node) {
+       b.current.Nodes = append(b.current.Nodes, n)
+}
+
+// jump adds an edge from the current block to the target block,
+// and sets b.current to nil.
+func (b *builder) jump(target *Block) {
+       b.current.Succs = append(b.current.Succs, target)
+       b.current = nil
+}
+
+// ifelse emits edges from the current block to the t and f blocks,
+// and sets b.current to nil.
+func (b *builder) ifelse(t, f *Block) {
+       b.current.Succs = append(b.current.Succs, t, f)
+       b.current = nil
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/cfg/cfg.go b/src/cmd/vendor/golang.org/x/tools/go/cfg/cfg.go
new file mode 100644 (file)
index 0000000..b075034
--- /dev/null
@@ -0,0 +1,150 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package constructs a simple control-flow graph (CFG) of the
+// statements and expressions within a single function.
+//
+// Use cfg.New to construct the CFG for a function body.
+//
+// The blocks of the CFG contain all the function's non-control
+// statements.  The CFG does not contain control statements such as If,
+// Switch, Select, and Branch, but does contain their subexpressions.
+// For example, this source code:
+//
+//     if x := f(); x != nil {
+//             T()
+//     } else {
+//             F()
+//     }
+//
+// produces this CFG:
+//
+//    1:  x := f()
+//        x != nil
+//        succs: 2, 3
+//    2:  T()
+//        succs: 4
+//    3:  F()
+//        succs: 4
+//    4:
+//
+// The CFG does contain Return statements; even implicit returns are
+// materialized (at the position of the function's closing brace).
+//
+// The CFG does not record conditions associated with conditional branch
+// edges, nor the short-circuit semantics of the && and || operators,
+// nor abnormal control flow caused by panic.  If you need this
+// information, use golang.org/x/tools/go/ssa instead.
+//
+package cfg
+
+import (
+       "bytes"
+       "fmt"
+       "go/ast"
+       "go/format"
+       "go/token"
+)
+
+// A CFG represents the control-flow graph of a single function.
+//
+// The entry point is Blocks[0]; there may be multiple return blocks.
+type CFG struct {
+       Blocks []*Block // block[0] is entry; order otherwise undefined
+}
+
+// A Block represents a basic block: a list of statements and
+// expressions that are always evaluated sequentially.
+//
+// A block may have 0-2 successors: zero for a return block or a block
+// that calls a function such as panic that never returns; one for a
+// normal (jump) block; and two for a conditional (if) block.
+type Block struct {
+       Nodes []ast.Node // statements, expressions, and ValueSpecs
+       Succs []*Block   // successor nodes in the graph
+       Index int32      // index within CFG.Blocks
+       Live  bool       // block is reachable from entry
+
+       comment string    // for debugging
+       succs2  [2]*Block // underlying array for Succs
+}
+
+// New returns a new control-flow graph for the specified function body,
+// which must be non-nil.
+//
+// The CFG builder calls mayReturn to determine whether a given function
+// call may return.  For example, calls to panic, os.Exit, and log.Fatal
+// do not return, so the builder can remove infeasible graph edges
+// following such calls.  The builder calls mayReturn only for a
+// CallExpr beneath an ExprStmt.
+func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG {
+       b := builder{
+               mayReturn: mayReturn,
+               cfg:       new(CFG),
+       }
+       b.current = b.newBlock("entry")
+       b.stmt(body)
+
+       // Compute liveness (reachability from entry point), breadth-first.
+       q := make([]*Block, 0, len(b.cfg.Blocks))
+       q = append(q, b.cfg.Blocks[0]) // entry point
+       for len(q) > 0 {
+               b := q[len(q)-1]
+               q = q[:len(q)-1]
+
+               if !b.Live {
+                       b.Live = true
+                       q = append(q, b.Succs...)
+               }
+       }
+
+       // Does control fall off the end of the function's body?
+       // Make implicit return explicit.
+       if b.current != nil && b.current.Live {
+               b.add(&ast.ReturnStmt{
+                       Return: body.End() - 1,
+               })
+       }
+
+       return b.cfg
+}
+
+func (b *Block) String() string {
+       return fmt.Sprintf("block %d (%s)", b.Index, b.comment)
+}
+
+// Return returns the return statement at the end of this block if present, nil otherwise.
+func (b *Block) Return() (ret *ast.ReturnStmt) {
+       if len(b.Nodes) > 0 {
+               ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt)
+       }
+       return
+}
+
+// Format formats the control-flow graph for ease of debugging.
+func (g *CFG) Format(fset *token.FileSet) string {
+       var buf bytes.Buffer
+       for _, b := range g.Blocks {
+               fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment)
+               for _, n := range b.Nodes {
+                       fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n))
+               }
+               if len(b.Succs) > 0 {
+                       fmt.Fprintf(&buf, "\tsuccs:")
+                       for _, succ := range b.Succs {
+                               fmt.Fprintf(&buf, " %d", succ.Index)
+                       }
+                       buf.WriteByte('\n')
+               }
+               buf.WriteByte('\n')
+       }
+       return buf.String()
+}
+
+func formatNode(fset *token.FileSet, n ast.Node) string {
+       var buf bytes.Buffer
+       format.Node(&buf, fset, n)
+       // Indent secondary lines by a tab.
+       return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1))
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/src/cmd/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
new file mode 100644 (file)
index 0000000..0d85488
--- /dev/null
@@ -0,0 +1,523 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package objectpath defines a naming scheme for types.Objects
+// (that is, named entities in Go programs) relative to their enclosing
+// package.
+//
+// Type-checker objects are canonical, so they are usually identified by
+// their address in memory (a pointer), but a pointer has meaning only
+// within one address space. By contrast, objectpath names allow the
+// identity of an object to be sent from one program to another,
+// establishing a correspondence between types.Object variables that are
+// distinct but logically equivalent.
+//
+// A single object may have multiple paths. In this example,
+//     type A struct{ X int }
+//     type B A
+// the field X has two paths due to its membership of both A and B.
+// The For(obj) function always returns one of these paths, arbitrarily
+// but consistently.
+package objectpath
+
+import (
+       "fmt"
+       "strconv"
+       "strings"
+
+       "go/types"
+)
+
+// A Path is an opaque name that identifies a types.Object
+// relative to its package. Conceptually, the name consists of a
+// sequence of destructuring operations applied to the package scope
+// to obtain the original object.
+// The name does not include the package itself.
+type Path string
+
+// Encoding
+//
+// An object path is a textual and (with training) human-readable encoding
+// of a sequence of destructuring operators, starting from a types.Package.
+// The sequences represent a path through the package/object/type graph.
+// We classify these operators by their type:
+//
+//   PO package->object        Package.Scope.Lookup
+//   OT  object->type  Object.Type
+//   TT    type->type  Type.{Elem,Key,Params,Results,Underlying} [EKPRU]
+//   TO   type->object Type.{At,Field,Method,Obj} [AFMO]
+//
+// All valid paths start with a package and end at an object
+// and thus may be defined by the regular language:
+//
+//   objectpath = PO (OT TT* TO)*
+//
+// The concrete encoding follows directly:
+// - The only PO operator is Package.Scope.Lookup, which requires an identifier.
+// - The only OT operator is Object.Type,
+//   which we encode as '.' because dot cannot appear in an identifier.
+// - The TT operators are encoded as [EKPRU].
+// - The OT operators are encoded as [AFMO];
+//   three of these (At,Field,Method) require an integer operand,
+//   which is encoded as a string of decimal digits.
+//   These indices are stable across different representations
+//   of the same package, even source and export data.
+//
+// In the example below,
+//
+//     package p
+//
+//     type T interface {
+//             f() (a string, b struct{ X int })
+//     }
+//
+// field X has the path "T.UM0.RA1.F0",
+// representing the following sequence of operations:
+//
+//    p.Lookup("T")                                    T
+//    .Type().Underlying().Method(0).                  f
+//    .Type().Results().At(1)                          b
+//    .Type().Field(0)                                 X
+//
+// The encoding is not maximally compact---every R or P is
+// followed by an A, for example---but this simplifies the
+// encoder and decoder.
+//
+const (
+       // object->type operators
+       opType = '.' // .Type()           (Object)
+
+       // type->type operators
+       opElem       = 'E' // .Elem()           (Pointer, Slice, Array, Chan, Map)
+       opKey        = 'K' // .Key()            (Map)
+       opParams     = 'P' // .Params()         (Signature)
+       opResults    = 'R' // .Results()        (Signature)
+       opUnderlying = 'U' // .Underlying()     (Named)
+
+       // type->object operators
+       opAt     = 'A' // .At(i)                (Tuple)
+       opField  = 'F' // .Field(i)             (Struct)
+       opMethod = 'M' // .Method(i)            (Named or Interface; not Struct: "promoted" names are ignored)
+       opObj    = 'O' // .Obj()                (Named)
+)
+
+// The For function returns the path to an object relative to its package,
+// or an error if the object is not accessible from the package's Scope.
+//
+// The For function guarantees to return a path only for the following objects:
+// - package-level types
+// - exported package-level non-types
+// - methods
+// - parameter and result variables
+// - struct fields
+// These objects are sufficient to define the API of their package.
+// The objects described by a package's export data are drawn from this set.
+//
+// For does not return a path for predeclared names, imported package
+// names, local names, and unexported package-level names (except
+// types).
+//
+// Example: given this definition,
+//
+//     package p
+//
+//     type T interface {
+//             f() (a string, b struct{ X int })
+//     }
+//
+// For(X) would return a path that denotes the following sequence of operations:
+//
+//    p.Scope().Lookup("T")                            (TypeName T)
+//    .Type().Underlying().Method(0).                  (method Func f)
+//    .Type().Results().At(1)                          (field Var b)
+//    .Type().Field(0)                                 (field Var X)
+//
+// where p is the package (*types.Package) to which X belongs.
+func For(obj types.Object) (Path, error) {
+       pkg := obj.Pkg()
+
+       // This table lists the cases of interest.
+       //
+       // Object                               Action
+       // ------                               ------
+       // nil                                  reject
+       // builtin                              reject
+       // pkgname                              reject
+       // label                                reject
+       // var
+       //    package-level                     accept
+       //    func param/result                 accept
+       //    local                             reject
+       //    struct field                      accept
+       // const
+       //    package-level                     accept
+       //    local                             reject
+       // func
+       //    package-level                     accept
+       //    init functions                    reject
+       //    concrete method                   accept
+       //    interface method                  accept
+       // type
+       //    package-level                     accept
+       //    local                             reject
+       //
+       // The only accessible package-level objects are members of pkg itself.
+       //
+       // The cases are handled in four steps:
+       //
+       // 1. reject nil and builtin
+       // 2. accept package-level objects
+       // 3. reject obviously invalid objects
+       // 4. search the API for the path to the param/result/field/method.
+
+       // 1. reference to nil or builtin?
+       if pkg == nil {
+               return "", fmt.Errorf("predeclared %s has no path", obj)
+       }
+       scope := pkg.Scope()
+
+       // 2. package-level object?
+       if scope.Lookup(obj.Name()) == obj {
+               // Only exported objects (and non-exported types) have a path.
+               // Non-exported types may be referenced by other objects.
+               if _, ok := obj.(*types.TypeName); !ok && !obj.Exported() {
+                       return "", fmt.Errorf("no path for non-exported %v", obj)
+               }
+               return Path(obj.Name()), nil
+       }
+
+       // 3. Not a package-level object.
+       //    Reject obviously non-viable cases.
+       switch obj := obj.(type) {
+       case *types.Const, // Only package-level constants have a path.
+               *types.TypeName, // Only package-level types have a path.
+               *types.Label,    // Labels are function-local.
+               *types.PkgName:  // PkgNames are file-local.
+               return "", fmt.Errorf("no path for %v", obj)
+
+       case *types.Var:
+               // Could be:
+               // - a field (obj.IsField())
+               // - a func parameter or result
+               // - a local var.
+               // Sadly there is no way to distinguish
+               // a param/result from a local
+               // so we must proceed to the find.
+
+       case *types.Func:
+               // A func, if not package-level, must be a method.
+               if recv := obj.Type().(*types.Signature).Recv(); recv == nil {
+                       return "", fmt.Errorf("func is not a method: %v", obj)
+               }
+               // TODO(adonovan): opt: if the method is concrete,
+               // do a specialized version of the rest of this function so
+               // that it's O(1) not O(|scope|).  Basically 'find' is needed
+               // only for struct fields and interface methods.
+
+       default:
+               panic(obj)
+       }
+
+       // 4. Search the API for the path to the var (field/param/result) or method.
+
+       // First inspect package-level named types.
+       // In the presence of path aliases, these give
+       // the best paths because non-types may
+       // refer to types, but not the reverse.
+       empty := make([]byte, 0, 48) // initial space
+       for _, name := range scope.Names() {
+               o := scope.Lookup(name)
+               tname, ok := o.(*types.TypeName)
+               if !ok {
+                       continue // handle non-types in second pass
+               }
+
+               path := append(empty, name...)
+               path = append(path, opType)
+
+               T := o.Type()
+
+               if tname.IsAlias() {
+                       // type alias
+                       if r := find(obj, T, path); r != nil {
+                               return Path(r), nil
+                       }
+               } else {
+                       // defined (named) type
+                       if r := find(obj, T.Underlying(), append(path, opUnderlying)); r != nil {
+                               return Path(r), nil
+                       }
+               }
+       }
+
+       // Then inspect everything else:
+       // non-types, and declared methods of defined types.
+       for _, name := range scope.Names() {
+               o := scope.Lookup(name)
+               path := append(empty, name...)
+               if _, ok := o.(*types.TypeName); !ok {
+                       if o.Exported() {
+                               // exported non-type (const, var, func)
+                               if r := find(obj, o.Type(), append(path, opType)); r != nil {
+                                       return Path(r), nil
+                               }
+                       }
+                       continue
+               }
+
+               // Inspect declared methods of defined types.
+               if T, ok := o.Type().(*types.Named); ok {
+                       path = append(path, opType)
+                       for i := 0; i < T.NumMethods(); i++ {
+                               m := T.Method(i)
+                               path2 := appendOpArg(path, opMethod, i)
+                               if m == obj {
+                                       return Path(path2), nil // found declared method
+                               }
+                               if r := find(obj, m.Type(), append(path2, opType)); r != nil {
+                                       return Path(r), nil
+                               }
+                       }
+               }
+       }
+
+       return "", fmt.Errorf("can't find path for %v in %s", obj, pkg.Path())
+}
+
+func appendOpArg(path []byte, op byte, arg int) []byte {
+       path = append(path, op)
+       path = strconv.AppendInt(path, int64(arg), 10)
+       return path
+}
+
+// find finds obj within type T, returning the path to it, or nil if not found.
+func find(obj types.Object, T types.Type, path []byte) []byte {
+       switch T := T.(type) {
+       case *types.Basic, *types.Named:
+               // Named types belonging to pkg were handled already,
+               // so T must belong to another package. No path.
+               return nil
+       case *types.Pointer:
+               return find(obj, T.Elem(), append(path, opElem))
+       case *types.Slice:
+               return find(obj, T.Elem(), append(path, opElem))
+       case *types.Array:
+               return find(obj, T.Elem(), append(path, opElem))
+       case *types.Chan:
+               return find(obj, T.Elem(), append(path, opElem))
+       case *types.Map:
+               if r := find(obj, T.Key(), append(path, opKey)); r != nil {
+                       return r
+               }
+               return find(obj, T.Elem(), append(path, opElem))
+       case *types.Signature:
+               if r := find(obj, T.Params(), append(path, opParams)); r != nil {
+                       return r
+               }
+               return find(obj, T.Results(), append(path, opResults))
+       case *types.Struct:
+               for i := 0; i < T.NumFields(); i++ {
+                       f := T.Field(i)
+                       path2 := appendOpArg(path, opField, i)
+                       if f == obj {
+                               return path2 // found field var
+                       }
+                       if r := find(obj, f.Type(), append(path2, opType)); r != nil {
+                               return r
+                       }
+               }
+               return nil
+       case *types.Tuple:
+               for i := 0; i < T.Len(); i++ {
+                       v := T.At(i)
+                       path2 := appendOpArg(path, opAt, i)
+                       if v == obj {
+                               return path2 // found param/result var
+                       }
+                       if r := find(obj, v.Type(), append(path2, opType)); r != nil {
+                               return r
+                       }
+               }
+               return nil
+       case *types.Interface:
+               for i := 0; i < T.NumMethods(); i++ {
+                       m := T.Method(i)
+                       path2 := appendOpArg(path, opMethod, i)
+                       if m == obj {
+                               return path2 // found interface method
+                       }
+                       if r := find(obj, m.Type(), append(path2, opType)); r != nil {
+                               return r
+                       }
+               }
+               return nil
+       }
+       panic(T)
+}
+
+// Object returns the object denoted by path p within the package pkg.
+func Object(pkg *types.Package, p Path) (types.Object, error) {
+       if p == "" {
+               return nil, fmt.Errorf("empty path")
+       }
+
+       pathstr := string(p)
+       var pkgobj, suffix string
+       if dot := strings.IndexByte(pathstr, opType); dot < 0 {
+               pkgobj = pathstr
+       } else {
+               pkgobj = pathstr[:dot]
+               suffix = pathstr[dot:] // suffix starts with "."
+       }
+
+       obj := pkg.Scope().Lookup(pkgobj)
+       if obj == nil {
+               return nil, fmt.Errorf("package %s does not contain %q", pkg.Path(), pkgobj)
+       }
+
+       // abtraction of *types.{Pointer,Slice,Array,Chan,Map}
+       type hasElem interface {
+               Elem() types.Type
+       }
+       // abstraction of *types.{Interface,Named}
+       type hasMethods interface {
+               Method(int) *types.Func
+               NumMethods() int
+       }
+
+       // The loop state is the pair (t, obj),
+       // exactly one of which is non-nil, initially obj.
+       // All suffixes start with '.' (the only object->type operation),
+       // followed by optional type->type operations,
+       // then a type->object operation.
+       // The cycle then repeats.
+       var t types.Type
+       for suffix != "" {
+               code := suffix[0]
+               suffix = suffix[1:]
+
+               // Codes [AFM] have an integer operand.
+               var index int
+               switch code {
+               case opAt, opField, opMethod:
+                       rest := strings.TrimLeft(suffix, "0123456789")
+                       numerals := suffix[:len(suffix)-len(rest)]
+                       suffix = rest
+                       i, err := strconv.Atoi(numerals)
+                       if err != nil {
+                               return nil, fmt.Errorf("invalid path: bad numeric operand %q for code %q", numerals, code)
+                       }
+                       index = int(i)
+               case opObj:
+                       // no operand
+               default:
+                       // The suffix must end with a type->object operation.
+                       if suffix == "" {
+                               return nil, fmt.Errorf("invalid path: ends with %q, want [AFMO]", code)
+                       }
+               }
+
+               if code == opType {
+                       if t != nil {
+                               return nil, fmt.Errorf("invalid path: unexpected %q in type context", opType)
+                       }
+                       t = obj.Type()
+                       obj = nil
+                       continue
+               }
+
+               if t == nil {
+                       return nil, fmt.Errorf("invalid path: code %q in object context", code)
+               }
+
+               // Inv: t != nil, obj == nil
+
+               switch code {
+               case opElem:
+                       hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %T, want pointer, slice, array, chan or map)", code, t, t)
+                       }
+                       t = hasElem.Elem()
+
+               case opKey:
+                       mapType, ok := t.(*types.Map)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %T, want map)", code, t, t)
+                       }
+                       t = mapType.Key()
+
+               case opParams:
+                       sig, ok := t.(*types.Signature)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
+                       }
+                       t = sig.Params()
+
+               case opResults:
+                       sig, ok := t.(*types.Signature)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t)
+                       }
+                       t = sig.Results()
+
+               case opUnderlying:
+                       named, ok := t.(*types.Named)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
+                       }
+                       t = named.Underlying()
+
+               case opAt:
+                       tuple, ok := t.(*types.Tuple)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %s, want tuple)", code, t, t)
+                       }
+                       if n := tuple.Len(); index >= n {
+                               return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n)
+                       }
+                       obj = tuple.At(index)
+                       t = nil
+
+               case opField:
+                       structType, ok := t.(*types.Struct)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %T, want struct)", code, t, t)
+                       }
+                       if n := structType.NumFields(); index >= n {
+                               return nil, fmt.Errorf("field index %d out of range [0-%d)", index, n)
+                       }
+                       obj = structType.Field(index)
+                       t = nil
+
+               case opMethod:
+                       hasMethods, ok := t.(hasMethods) // Interface or Named
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %s, want interface or named)", code, t, t)
+                       }
+                       if n := hasMethods.NumMethods(); index >= n {
+                               return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n)
+                       }
+                       obj = hasMethods.Method(index)
+                       t = nil
+
+               case opObj:
+                       named, ok := t.(*types.Named)
+                       if !ok {
+                               return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t)
+                       }
+                       obj = named.Obj()
+                       t = nil
+
+               default:
+                       return nil, fmt.Errorf("invalid path: unknown code %q", code)
+               }
+       }
+
+       if obj.Pkg() != pkg {
+               return nil, fmt.Errorf("path denotes %s, which belongs to a different package", obj)
+       }
+
+       return obj, nil // success
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/callee.go
new file mode 100644 (file)
index 0000000..38f596d
--- /dev/null
@@ -0,0 +1,46 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import (
+       "go/ast"
+       "go/types"
+
+       "golang.org/x/tools/go/ast/astutil"
+)
+
+// Callee returns the named target of a function call, if any:
+// a function, method, builtin, or variable.
+func Callee(info *types.Info, call *ast.CallExpr) types.Object {
+       var obj types.Object
+       switch fun := astutil.Unparen(call.Fun).(type) {
+       case *ast.Ident:
+               obj = info.Uses[fun] // type, var, builtin, or declared func
+       case *ast.SelectorExpr:
+               if sel, ok := info.Selections[fun]; ok {
+                       obj = sel.Obj() // method or field
+               } else {
+                       obj = info.Uses[fun.Sel] // qualified identifier?
+               }
+       }
+       if _, ok := obj.(*types.TypeName); ok {
+               return nil // T(x) is a conversion, not a call
+       }
+       return obj
+}
+
+// StaticCallee returns the target (function or method) of a static
+// function call, if any. It returns nil for calls to builtins.
+func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func {
+       if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) {
+               return f
+       }
+       return nil
+}
+
+func interfaceMethod(f *types.Func) bool {
+       recv := f.Type().(*types.Signature).Recv()
+       return recv != nil && types.IsInterface(recv.Type())
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/imports.go
new file mode 100644 (file)
index 0000000..9c441db
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+import "go/types"
+
+// Dependencies returns all dependencies of the specified packages.
+//
+// Dependent packages appear in topological order: if package P imports
+// package Q, Q appears earlier than P in the result.
+// The algorithm follows import statements in the order they
+// appear in the source code, so the result is a total order.
+//
+func Dependencies(pkgs ...*types.Package) []*types.Package {
+       var result []*types.Package
+       seen := make(map[*types.Package]bool)
+       var visit func(pkgs []*types.Package)
+       visit = func(pkgs []*types.Package) {
+               for _, p := range pkgs {
+                       if !seen[p] {
+                               seen[p] = true
+                               visit(p.Imports())
+                               result = append(result, p)
+                       }
+               }
+       }
+       visit(pkgs)
+       return result
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/map.go b/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/map.go
new file mode 100644 (file)
index 0000000..c7f7545
--- /dev/null
@@ -0,0 +1,313 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package typeutil defines various utilities for types, such as Map,
+// a mapping from types.Type to interface{} values.
+package typeutil // import "golang.org/x/tools/go/types/typeutil"
+
+import (
+       "bytes"
+       "fmt"
+       "go/types"
+       "reflect"
+)
+
+// Map is a hash-table-based mapping from types (types.Type) to
+// arbitrary interface{} values.  The concrete types that implement
+// the Type interface are pointers.  Since they are not canonicalized,
+// == cannot be used to check for equivalence, and thus we cannot
+// simply use a Go map.
+//
+// Just as with map[K]V, a nil *Map is a valid empty map.
+//
+// Not thread-safe.
+//
+type Map struct {
+       hasher Hasher             // shared by many Maps
+       table  map[uint32][]entry // maps hash to bucket; entry.key==nil means unused
+       length int                // number of map entries
+}
+
+// entry is an entry (key/value association) in a hash bucket.
+type entry struct {
+       key   types.Type
+       value interface{}
+}
+
+// SetHasher sets the hasher used by Map.
+//
+// All Hashers are functionally equivalent but contain internal state
+// used to cache the results of hashing previously seen types.
+//
+// A single Hasher created by MakeHasher() may be shared among many
+// Maps.  This is recommended if the instances have many keys in
+// common, as it will amortize the cost of hash computation.
+//
+// A Hasher may grow without bound as new types are seen.  Even when a
+// type is deleted from the map, the Hasher never shrinks, since other
+// types in the map may reference the deleted type indirectly.
+//
+// Hashers are not thread-safe, and read-only operations such as
+// Map.Lookup require updates to the hasher, so a full Mutex lock (not a
+// read-lock) is require around all Map operations if a shared
+// hasher is accessed from multiple threads.
+//
+// If SetHasher is not called, the Map will create a private hasher at
+// the first call to Insert.
+//
+func (m *Map) SetHasher(hasher Hasher) {
+       m.hasher = hasher
+}
+
+// Delete removes the entry with the given key, if any.
+// It returns true if the entry was found.
+//
+func (m *Map) Delete(key types.Type) bool {
+       if m != nil && m.table != nil {
+               hash := m.hasher.Hash(key)
+               bucket := m.table[hash]
+               for i, e := range bucket {
+                       if e.key != nil && types.Identical(key, e.key) {
+                               // We can't compact the bucket as it
+                               // would disturb iterators.
+                               bucket[i] = entry{}
+                               m.length--
+                               return true
+                       }
+               }
+       }
+       return false
+}
+
+// At returns the map entry for the given key.
+// The result is nil if the entry is not present.
+//
+func (m *Map) At(key types.Type) interface{} {
+       if m != nil && m.table != nil {
+               for _, e := range m.table[m.hasher.Hash(key)] {
+                       if e.key != nil && types.Identical(key, e.key) {
+                               return e.value
+                       }
+               }
+       }
+       return nil
+}
+
+// Set sets the map entry for key to val,
+// and returns the previous entry, if any.
+func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) {
+       if m.table != nil {
+               hash := m.hasher.Hash(key)
+               bucket := m.table[hash]
+               var hole *entry
+               for i, e := range bucket {
+                       if e.key == nil {
+                               hole = &bucket[i]
+                       } else if types.Identical(key, e.key) {
+                               prev = e.value
+                               bucket[i].value = value
+                               return
+                       }
+               }
+
+               if hole != nil {
+                       *hole = entry{key, value} // overwrite deleted entry
+               } else {
+                       m.table[hash] = append(bucket, entry{key, value})
+               }
+       } else {
+               if m.hasher.memo == nil {
+                       m.hasher = MakeHasher()
+               }
+               hash := m.hasher.Hash(key)
+               m.table = map[uint32][]entry{hash: {entry{key, value}}}
+       }
+
+       m.length++
+       return
+}
+
+// Len returns the number of map entries.
+func (m *Map) Len() int {
+       if m != nil {
+               return m.length
+       }
+       return 0
+}
+
+// Iterate calls function f on each entry in the map in unspecified order.
+//
+// If f should mutate the map, Iterate provides the same guarantees as
+// Go maps: if f deletes a map entry that Iterate has not yet reached,
+// f will not be invoked for it, but if f inserts a map entry that
+// Iterate has not yet reached, whether or not f will be invoked for
+// it is unspecified.
+//
+func (m *Map) Iterate(f func(key types.Type, value interface{})) {
+       if m != nil {
+               for _, bucket := range m.table {
+                       for _, e := range bucket {
+                               if e.key != nil {
+                                       f(e.key, e.value)
+                               }
+                       }
+               }
+       }
+}
+
+// Keys returns a new slice containing the set of map keys.
+// The order is unspecified.
+func (m *Map) Keys() []types.Type {
+       keys := make([]types.Type, 0, m.Len())
+       m.Iterate(func(key types.Type, _ interface{}) {
+               keys = append(keys, key)
+       })
+       return keys
+}
+
+func (m *Map) toString(values bool) string {
+       if m == nil {
+               return "{}"
+       }
+       var buf bytes.Buffer
+       fmt.Fprint(&buf, "{")
+       sep := ""
+       m.Iterate(func(key types.Type, value interface{}) {
+               fmt.Fprint(&buf, sep)
+               sep = ", "
+               fmt.Fprint(&buf, key)
+               if values {
+                       fmt.Fprintf(&buf, ": %q", value)
+               }
+       })
+       fmt.Fprint(&buf, "}")
+       return buf.String()
+}
+
+// String returns a string representation of the map's entries.
+// Values are printed using fmt.Sprintf("%v", v).
+// Order is unspecified.
+//
+func (m *Map) String() string {
+       return m.toString(true)
+}
+
+// KeysString returns a string representation of the map's key set.
+// Order is unspecified.
+//
+func (m *Map) KeysString() string {
+       return m.toString(false)
+}
+
+////////////////////////////////////////////////////////////////////////
+// Hasher
+
+// A Hasher maps each type to its hash value.
+// For efficiency, a hasher uses memoization; thus its memory
+// footprint grows monotonically over time.
+// Hashers are not thread-safe.
+// Hashers have reference semantics.
+// Call MakeHasher to create a Hasher.
+type Hasher struct {
+       memo map[types.Type]uint32
+}
+
+// MakeHasher returns a new Hasher instance.
+func MakeHasher() Hasher {
+       return Hasher{make(map[types.Type]uint32)}
+}
+
+// Hash computes a hash value for the given type t such that
+// Identical(t, t') => Hash(t) == Hash(t').
+func (h Hasher) Hash(t types.Type) uint32 {
+       hash, ok := h.memo[t]
+       if !ok {
+               hash = h.hashFor(t)
+               h.memo[t] = hash
+       }
+       return hash
+}
+
+// hashString computes the Fowler–Noll–Vo hash of s.
+func hashString(s string) uint32 {
+       var h uint32
+       for i := 0; i < len(s); i++ {
+               h ^= uint32(s[i])
+               h *= 16777619
+       }
+       return h
+}
+
+// hashFor computes the hash of t.
+func (h Hasher) hashFor(t types.Type) uint32 {
+       // See Identical for rationale.
+       switch t := t.(type) {
+       case *types.Basic:
+               return uint32(t.Kind())
+
+       case *types.Array:
+               return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem())
+
+       case *types.Slice:
+               return 9049 + 2*h.Hash(t.Elem())
+
+       case *types.Struct:
+               var hash uint32 = 9059
+               for i, n := 0, t.NumFields(); i < n; i++ {
+                       f := t.Field(i)
+                       if f.Anonymous() {
+                               hash += 8861
+                       }
+                       hash += hashString(t.Tag(i))
+                       hash += hashString(f.Name()) // (ignore f.Pkg)
+                       hash += h.Hash(f.Type())
+               }
+               return hash
+
+       case *types.Pointer:
+               return 9067 + 2*h.Hash(t.Elem())
+
+       case *types.Signature:
+               var hash uint32 = 9091
+               if t.Variadic() {
+                       hash *= 8863
+               }
+               return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results())
+
+       case *types.Interface:
+               var hash uint32 = 9103
+               for i, n := 0, t.NumMethods(); i < n; i++ {
+                       // See go/types.identicalMethods for rationale.
+                       // Method order is not significant.
+                       // Ignore m.Pkg().
+                       m := t.Method(i)
+                       hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type())
+               }
+               return hash
+
+       case *types.Map:
+               return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem())
+
+       case *types.Chan:
+               return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem())
+
+       case *types.Named:
+               // Not safe with a copying GC; objects may move.
+               return uint32(reflect.ValueOf(t.Obj()).Pointer())
+
+       case *types.Tuple:
+               return h.hashTuple(t)
+       }
+       panic(t)
+}
+
+func (h Hasher) hashTuple(tuple *types.Tuple) uint32 {
+       // See go/types.identicalTypes for rationale.
+       n := tuple.Len()
+       var hash uint32 = 9137 + 2*uint32(n)
+       for i := 0; i < n; i++ {
+               hash += 3 * h.Hash(tuple.At(i).Type())
+       }
+       return hash
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go
new file mode 100644 (file)
index 0000000..3208461
--- /dev/null
@@ -0,0 +1,72 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This file implements a cache of method sets.
+
+package typeutil
+
+import (
+       "go/types"
+       "sync"
+)
+
+// A MethodSetCache records the method set of each type T for which
+// MethodSet(T) is called so that repeat queries are fast.
+// The zero value is a ready-to-use cache instance.
+type MethodSetCache struct {
+       mu     sync.Mutex
+       named  map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N
+       others map[types.Type]*types.MethodSet                            // all other types
+}
+
+// MethodSet returns the method set of type T.  It is thread-safe.
+//
+// If cache is nil, this function is equivalent to types.NewMethodSet(T).
+// Utility functions can thus expose an optional *MethodSetCache
+// parameter to clients that care about performance.
+//
+func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet {
+       if cache == nil {
+               return types.NewMethodSet(T)
+       }
+       cache.mu.Lock()
+       defer cache.mu.Unlock()
+
+       switch T := T.(type) {
+       case *types.Named:
+               return cache.lookupNamed(T).value
+
+       case *types.Pointer:
+               if N, ok := T.Elem().(*types.Named); ok {
+                       return cache.lookupNamed(N).pointer
+               }
+       }
+
+       // all other types
+       // (The map uses pointer equivalence, not type identity.)
+       mset := cache.others[T]
+       if mset == nil {
+               mset = types.NewMethodSet(T)
+               if cache.others == nil {
+                       cache.others = make(map[types.Type]*types.MethodSet)
+               }
+               cache.others[T] = mset
+       }
+       return mset
+}
+
+func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } {
+       if cache.named == nil {
+               cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet })
+       }
+       // Avoid recomputing mset(*T) for each distinct Pointer
+       // instance whose underlying type is a named type.
+       msets, ok := cache.named[named]
+       if !ok {
+               msets.value = types.NewMethodSet(named)
+               msets.pointer = types.NewMethodSet(types.NewPointer(named))
+               cache.named[named] = msets
+       }
+       return msets
+}
diff --git a/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/src/cmd/vendor/golang.org/x/tools/go/types/typeutil/ui.go
new file mode 100644 (file)
index 0000000..9849c24
--- /dev/null
@@ -0,0 +1,52 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeutil
+
+// This file defines utilities for user interfaces that display types.
+
+import "go/types"
+
+// IntuitiveMethodSet returns the intuitive method set of a type T,
+// which is the set of methods you can call on an addressable value of
+// that type.
+//
+// The result always contains MethodSet(T), and is exactly MethodSet(T)
+// for interface types and for pointer-to-concrete types.
+// For all other concrete types T, the result additionally
+// contains each method belonging to *T if there is no identically
+// named method on T itself.
+//
+// This corresponds to user intuition about method sets;
+// this function is intended only for user interfaces.
+//
+// The order of the result is as for types.MethodSet(T).
+//
+func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection {
+       isPointerToConcrete := func(T types.Type) bool {
+               ptr, ok := T.(*types.Pointer)
+               return ok && !types.IsInterface(ptr.Elem())
+       }
+
+       var result []*types.Selection
+       mset := msets.MethodSet(T)
+       if types.IsInterface(T) || isPointerToConcrete(T) {
+               for i, n := 0, mset.Len(); i < n; i++ {
+                       result = append(result, mset.At(i))
+               }
+       } else {
+               // T is some other concrete type.
+               // Report methods of T and *T, preferring those of T.
+               pmset := msets.MethodSet(types.NewPointer(T))
+               for i, n := 0, pmset.Len(); i < n; i++ {
+                       meth := pmset.At(i)
+                       if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil {
+                               meth = m
+                       }
+                       result = append(result, meth)
+               }
+
+       }
+       return result
+}
index 6e077e4ae17c577c6267563b4d33c5f5ac790b25..b952b93c08a5905565c6888d9da86193fad05594 100644 (file)
                        "path": "golang.org/x/sys/windows/svc/mgr",
                        "revision": "90868a75fefd03942536221d7c0e2f84ec62a668",
                        "revisionTime": "2018-08-01T20:46:00Z"
+               },
+               {
+                       "checksumSHA1": "witNkDO7koGO7+oxpBMZBvoxz3c=",
+                       "path": "golang.org/x/tools/go/analysis",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "NPcubwbqmr2yGfGztLqizwbXrwM=",
+                       "path": "golang.org/x/tools/go/analysis/cmd/vet-lite",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "kWG+JiD2mA+2pnSeYJrKLHHgT+s=",
+                       "path": "golang.org/x/tools/go/analysis/internal/analysisflags",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "c4FY3+yRC2GHON66hIU254nQxA8=",
+                       "path": "golang.org/x/tools/go/analysis/internal/facts",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "Zuz7FbEMWtUNCKTA+ofVkkDl1Ic=",
+                       "path": "golang.org/x/tools/go/analysis/internal/unitchecker",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "fxi2KL0typcqGp87Qa9CxSp89Sk=",
+                       "path": "golang.org/x/tools/go/analysis/passes/asmdecl",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "AK5vKjJmQD1u/6v/s107upAF03w=",
+                       "path": "golang.org/x/tools/go/analysis/passes/assign",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "qRQNlOhRmTPecqsjJMf3Rxd7M1g=",
+                       "path": "golang.org/x/tools/go/analysis/passes/atomic",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "zhnbma06ExmGYTu5QaGAi5+QciY=",
+                       "path": "golang.org/x/tools/go/analysis/passes/bools",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "HWcvlzqG20E9BaG4/j3u9tnUyZ4=",
+                       "path": "golang.org/x/tools/go/analysis/passes/buildtag",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "ekVwfAw224CT/eBihMCzAOzIHiE=",
+                       "path": "golang.org/x/tools/go/analysis/passes/cgocall",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "dwtQdPi0Jb9BYVr0Gynh5NpCSz8=",
+                       "path": "golang.org/x/tools/go/analysis/passes/composite",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "6M8xb//gcLk3dSpRq6/fb/8Wvqk=",
+                       "path": "golang.org/x/tools/go/analysis/passes/copylock",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "DPQnIktTEV7cNBNDRIpg0OK6v9Q=",
+                       "path": "golang.org/x/tools/go/analysis/passes/ctrlflow",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "GJjZZhXYqMoGmym/2DpExqHP+Cw=",
+                       "path": "golang.org/x/tools/go/analysis/passes/httpresponse",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "Q76YV1xYtBCBsZk7uKXqih7iHL4=",
+                       "path": "golang.org/x/tools/go/analysis/passes/inspect",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "Y7NBmaqiGnVWf3yn16cwbWmgUhI=",
+                       "path": "golang.org/x/tools/go/analysis/passes/internal/analysisutil",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "Fjj6sV+qmJwvxGt/i8fLIma9Lzs=",
+                       "path": "golang.org/x/tools/go/analysis/passes/loopclosure",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "VZE2qx/m2esvfEreS0RCaVoWYhc=",
+                       "path": "golang.org/x/tools/go/analysis/passes/lostcancel",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "xf9nMwSbFWJXDC9W+Gnus+uU0Nw=",
+                       "path": "golang.org/x/tools/go/analysis/passes/nilfunc",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "PKByrfYKilYhkhAE01z5Om0Tr+w=",
+                       "path": "golang.org/x/tools/go/analysis/passes/pkgfact",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "BrlVsK8u6SPMyvoWdkwS4IAXVRI=",
+                       "path": "golang.org/x/tools/go/analysis/passes/printf",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "3w9Q99Mxrf8qEU+FH7lSyy5hwc4=",
+                       "path": "golang.org/x/tools/go/analysis/passes/shift",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "oeR6BB6OmfoYReHyNLEX9BbF1cI=",
+                       "path": "golang.org/x/tools/go/analysis/passes/stdmethods",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "rPSH7/W3vsomdmSIgdEDrzaCQyk=",
+                       "path": "golang.org/x/tools/go/analysis/passes/structtag",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "yHrglPUc3Ia12nwO0l/I0ArT3to=",
+                       "path": "golang.org/x/tools/go/analysis/passes/tests",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "en0VsP2OoNX40F/bNfbO6geSgi4=",
+                       "path": "golang.org/x/tools/go/analysis/passes/unreachable",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "641akvyeQUx5MqoHiyKwRps4vEg=",
+                       "path": "golang.org/x/tools/go/analysis/passes/unsafeptr",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "cm27h0jINv4jlgiHMn7q572FXTY=",
+                       "path": "golang.org/x/tools/go/analysis/passes/unusedresult",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "/bQnex6L/nyDuZCIIRbM6Is/IRY=",
+                       "path": "golang.org/x/tools/go/ast/astutil",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "qnZLWirp4hAxafiKvH+nnmgGf8Q=",
+                       "path": "golang.org/x/tools/go/ast/inspector",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "+g97ZSLGNNbqfBzpYje8fA5PvXs=",
+                       "path": "golang.org/x/tools/go/cfg",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "JWIR0GVqbDYhTW9mh4zpY/ve6Ro=",
+                       "path": "golang.org/x/tools/go/types/objectpath",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
+               },
+               {
+                       "checksumSHA1": "kyVWOWK3PkDKCtXRJffE60MrfOo=",
+                       "path": "golang.org/x/tools/go/types/typeutil",
+                       "revision": "c76e1ad98a635a7c069d7ab43d31fcf38381facc",
+                       "revisionTime": "2018-11-05T19:48:08Z"
                }
        ],
        "rootPath": "/cmd"