diff --git a/.golangci.example.yml b/.golangci.example.yml index b2296b839d6b..0c019f9d53c8 100644 --- a/.golangci.example.yml +++ b/.golangci.example.yml @@ -62,20 +62,6 @@ linters-settings: govet: # report about shadowed variables check-shadowing: true - - # Obtain type information from installed (to $GOPATH/pkg) package files: - # golangci-lint will execute `go install -i` and `go test -i` for analyzed packages - # before analyzing them. - # By default this option is disabled and govet gets type information by loader from source code. - # Loading from source code is slow, but it's done only once for all linters. - # Go-installing of packages first time is much slower than loading them from source code, - # therefore this option is disabled by default. - # But repeated installation is fast in go >= 1.10 because of build caching. - # Enable this option only if all conditions are met: - # 1. you use only "fast" linters (--fast e.g.): no program loading occurs - # 2. you use go >= 1.10 - # 3. you do repeated runs (false for CI) or cache $GOPATH/pkg or `go env GOCACHE` dir in CI. - use-installed-packages: false golint: # minimal confidence for issues, default is 0.8 min-confidence: 0.8 diff --git a/.golangci.yml b/.golangci.yml index c2b25a59d67b..9b34a15e622a 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -20,6 +20,8 @@ linters-settings: - github.com/sirupsen/logrus misspell: locale: US + lll: + line-length: 140 linters: enable-all: true diff --git a/Gopkg.lock b/Gopkg.lock index 9b6ef98e2d30..2318801b8152 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -265,7 +265,7 @@ [[projects]] branch = "master" - digest = "1:ba7223001c6b9de88a3e5beb6e441b3d559e09e3e9bc8ff866b2c0e72a6f7f8f" + digest = "1:0d459e50f313bce282754dfccb74a03b6caf7864eefd9660ffc2866c640b9ca1" name = "github.com/golangci/tools" packages = [ "go/callgraph", @@ -276,7 +276,7 @@ "go/ssa/ssautil", ] pruneopts = "UT" - revision = "98e75f53b4b9b9243ddc6f08b76f7682294b53b5" + revision = "01dd7756e01d993519ad7a8415ad8f2de8797be8" [[projects]] branch = "master" @@ -497,9 +497,12 @@ version = "v1.0.2" [[projects]] - digest = "1:f85e109eda8f6080877185d1c39e98dd8795e1780c08beca28304b87fd855a1c" + digest = "1:7e8d267900c7fa7f35129a2a37596e38ed0f11ca746d6d9ba727980ee138f9f6" name = "github.com/stretchr/testify" - packages = ["assert"] + packages = [ + "assert", + "require", + ] pruneopts = "UT" revision = "12b6f73e6084dad08a7c6e575284b177ecafbc71" version = "v1.2.1" @@ -549,7 +552,7 @@ [[projects]] branch = "master" - digest = "1:1f0808c71cfd0b33831391bc26c7ba2862138bfaa10b7f8ed695b2a0c67f896c" + digest = "1:e5f00913432ec90163061b29e00176fd12b71fe400adeb0cb4071273ada6542d" name = "golang.org/x/tools" packages = [ "go/ast/astutil", @@ -558,13 +561,15 @@ "go/internal/cgo", "go/internal/gcimporter", "go/loader", + "go/packages", "go/types/typeutil", "imports", "internal/fastwalk", "internal/gopathwalk", + "internal/semver", ] pruneopts = "UT" - revision = "5d4988d199e2aeefda3528f599e06410c43caa29" + revision = "3e7aa9e59977626dc60433e9aeadf1bb63d28295" [[projects]] digest = "1:342378ac4dcb378a5448dd723f0784ae519383532f5e70ade24132c4c8693202" @@ -634,7 +639,9 @@ "github.com/spf13/pflag", "github.com/spf13/viper", "github.com/stretchr/testify/assert", + "github.com/stretchr/testify/require", "golang.org/x/tools/go/loader", + "golang.org/x/tools/go/packages", "sourcegraph.com/sourcegraph/go-diff/diff", ] solver-name = "gps-cdcl" diff --git a/README.md b/README.md index 3738e3f4c400..94c812ba4b10 100644 --- a/README.md +++ b/README.md @@ -110,16 +110,16 @@ GolangCI-Lint can be used with zero configuration. By default the following lint ``` $ golangci-lint help linters Enabled by default linters: -govet: Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: false] -errcheck: Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: false] +govet: Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string [fast: true] +errcheck: Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases [fast: true] staticcheck: Staticcheck is a go vet on steroids, applying a ton of static analysis checks [fast: false] unused: Checks Go code for unused constants, variables, functions and types [fast: false] gosimple: Linter for Go source code that specializes in simplifying a code [fast: false] -structcheck: Finds an unused struct fields [fast: false] -varcheck: Finds unused global variables and constants [fast: false] +structcheck: Finds an unused struct fields [fast: true] +varcheck: Finds unused global variables and constants [fast: true] ineffassign: Detects when assignments to existing variables are not used [fast: true] -deadcode: Finds unused code [fast: false] -typecheck: Like the front-end of a Go compiler, parses and type-checks Go code [fast: false] +deadcode: Finds unused code [fast: true] +typecheck: Like the front-end of a Go compiler, parses and type-checks Go code [fast: true] ``` and the following linters are disabled by default: @@ -128,17 +128,17 @@ $ golangci-lint help linters ... Disabled by default linters: golint: Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes [fast: true] -gosec (gas): Inspects source code for security problems [fast: false] +gosec (gas): Inspects source code for security problems [fast: true] interfacer: Linter that suggests narrower interface types [fast: false] -unconvert: Remove unnecessary type conversions [fast: false] +unconvert: Remove unnecessary type conversions [fast: true] dupl: Tool for code clone detection [fast: true] goconst: Finds repeated strings that could be replaced by a constant [fast: true] gocyclo: Computes and checks the cyclomatic complexity of functions [fast: true] gofmt: Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification [fast: true] goimports: Goimports does everything that gofmt does. Additionally it checks unused imports [fast: true] -maligned: Tool to detect Go structs that would take less memory if their fields were sorted [fast: false] +maligned: Tool to detect Go structs that would take less memory if their fields were sorted [fast: true] megacheck: 3 sub-linters in one: unused, gosimple and staticcheck [fast: false] -depguard: Go linter that checks if package imports are in a list of acceptable packages [fast: false] +depguard: Go linter that checks if package imports are in a list of acceptable packages [fast: true] misspell: Finds commonly misspelled English words in comments [fast: true] lll: Reports long lines [fast: true] unparam: Reports unused function parameters [fast: false] @@ -369,7 +369,7 @@ Flags: --enable-all Enable all linters --disable-all Disable all linters -p, --presets strings Enable presets (bugs|unused|format|style|complexity|performance) of linters. Run 'golangci-lint linters' to see them. This option implies option --disable-all - --fast Run only fast linters from enabled linters set + --fast Run only fast linters from enabled linters set (first run won't be fast) -e, --exclude strings Exclude issue by regexp --exclude-use-default Use or not use default excludes: # errcheck: Almost all programs ignore errors on these functions and in most cases it's ok @@ -499,20 +499,6 @@ linters-settings: govet: # report about shadowed variables check-shadowing: true - - # Obtain type information from installed (to $GOPATH/pkg) package files: - # golangci-lint will execute `go install -i` and `go test -i` for analyzed packages - # before analyzing them. - # By default this option is disabled and govet gets type information by loader from source code. - # Loading from source code is slow, but it's done only once for all linters. - # Go-installing of packages first time is much slower than loading them from source code, - # therefore this option is disabled by default. - # But repeated installation is fast in go >= 1.10 because of build caching. - # Enable this option only if all conditions are met: - # 1. you use only "fast" linters (--fast e.g.): no program loading occurs - # 2. you use go >= 1.10 - # 3. you do repeated runs (false for CI) or cache $GOPATH/pkg or `go env GOCACHE` dir in CI. - use-installed-packages: false golint: # minimal confidence for issues, default is 0.8 min-confidence: 0.8 @@ -654,6 +640,8 @@ linters-settings: - github.com/sirupsen/logrus misspell: locale: US + lll: + line-length: 140 linters: enable-all: true @@ -698,9 +686,7 @@ We don't recommend vendoring `golangci-lint` in your repo: you will get troubles **Does I need to run `go install`?** -No, you don't need to do it anymore. We will run `go install -i` and `go test -i` -for analyzed packages ourselves. We will run them only -if option `govet.use-installed-packages` is `true`. +No, you don't need to do it anymore. **`golangci-lint` doesn't work** @@ -708,7 +694,12 @@ if option `govet.use-installed-packages` is `true`. 2. Run it with `-v` option and check the output. 3. If it doesn't help create a [GitHub issue](https://github.com/golangci/golangci-lint/issues/new) with the output from the error and #2 above. +**Why running with `--fast` is slow on the first run?** +Because the first run caches type information. All subsequent runs will be fast. +Usually this options is used during development on local machine and compilation was already performed. + # Thanks + Thanks to [alecthomas/gometalinter](https://github.com/alecthomas/gometalinter) for inspiration and amazing work. Thanks to [bradleyfalzon/revgrep](https://github.com/bradleyfalzon/revgrep) for cool diff tool. diff --git a/README.tmpl.md b/README.tmpl.md index a4195afa5817..19c5649d376d 100644 --- a/README.tmpl.md +++ b/README.tmpl.md @@ -358,9 +358,7 @@ We don't recommend vendoring `golangci-lint` in your repo: you will get troubles **Does I need to run `go install`?** -No, you don't need to do it anymore. We will run `go install -i` and `go test -i` -for analyzed packages ourselves. We will run them only -if option `govet.use-installed-packages` is `true`. +No, you don't need to do it anymore. **`golangci-lint` doesn't work** @@ -368,7 +366,12 @@ if option `govet.use-installed-packages` is `true`. 2. Run it with `-v` option and check the output. 3. If it doesn't help create a [GitHub issue](https://github.com/golangci/golangci-lint/issues/new) with the output from the error and #2 above. +**Why running with `--fast` is slow on the first run?** +Because the first run caches type information. All subsequent runs will be fast. +Usually this options is used during development on local machine and compilation was already performed. + # Thanks + Thanks to [alecthomas/gometalinter](https://github.com/alecthomas/gometalinter) for inspiration and amazing work. Thanks to [bradleyfalzon/revgrep](https://github.com/bradleyfalzon/revgrep) for cool diff tool. diff --git a/go.mod b/go.mod index ffd3ecca1536..8c93a68b11ca 100644 --- a/go.mod +++ b/go.mod @@ -28,7 +28,7 @@ require ( github.com/golangci/misspell v0.0.0-20180809174111-950f5d19e770 github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 - github.com/golangci/tools v0.0.0-20180902102414-98e75f53b4b9 + github.com/golangci/tools v0.0.0-20180902102414-01dd7756e01d github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 github.com/golangci/unparam v0.0.0-20180902112548-7ad9dbcccc16 github.com/hashicorp/hcl v0.0.0-20180404174102-ef8a98b0bbce // indirect @@ -56,7 +56,7 @@ require ( github.com/spf13/viper v1.0.2 github.com/stretchr/testify v1.2.1 golang.org/x/crypto v0.0.0-20180505025534-4ec37c66abab // indirect - golang.org/x/tools v0.0.0-20180831211245-5d4988d199e2 + golang.org/x/tools v0.0.0-20180831211245-3e7aa9e59977 gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 // indirect sourcegraph.com/sourcegraph/go-diff v0.0.0-20171119081133-3f415a150aec diff --git a/go.sum b/go.sum index 40167bd0f3ce..5b8c97485100 100644 --- a/go.sum +++ b/go.sum @@ -59,6 +59,7 @@ github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21 h1:leSNB7iYzLYSS github.com/golangci/prealloc v0.0.0-20180630174525-215b22d4de21/go.mod h1:tf5+bzsHdTM0bsB7+8mt0GUMvjCgwLpTapNZHU8AajI= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0 h1:HVfrLniijszjS1aiNg8JbBMO2+E1WIQ+j/gL4SQqGPg= github.com/golangci/revgrep v0.0.0-20180526074752-d9c87f5ffaf0/go.mod h1:qOQCunEYvmd/TLamH+7LlVccLvUH5kZNhbCgTHoBbp4= +github.com/golangci/tools v0.0.0-20180902102414-01dd7756e01d/go.mod h1:zgj6NOYXOC1cexsdtDceI4/mj3aXK4JOVg9AV3C5LWI= github.com/golangci/tools v0.0.0-20180902102414-98e75f53b4b9 h1:JGHGJqnbD9OMyjgQqyja7DZd0/to1LKFpN31Fq8byxc= github.com/golangci/tools v0.0.0-20180902102414-98e75f53b4b9/go.mod h1:zgj6NOYXOC1cexsdtDceI4/mj3aXK4JOVg9AV3C5LWI= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= @@ -131,6 +132,7 @@ golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180826000951-f6ba57429505/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180831211245-3e7aa9e59977/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180831211245-5d4988d199e2/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180831211245-7ca132754999 h1:mf2VYfMpSMTlp0I/UXrX13w5LejDx34QeUUHH4TrUA8= golang.org/x/tools v0.0.0-20180831211245-7ca132754999/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/pkg/commands/executor.go b/pkg/commands/executor.go index 17a37f4e1f85..55ff865d5fcf 100644 --- a/pkg/commands/executor.go +++ b/pkg/commands/executor.go @@ -2,6 +2,8 @@ package commands import ( "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/goutil" + "github.com/golangci/golangci-lint/pkg/lint" "github.com/golangci/golangci-lint/pkg/lint/lintersdb" "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/report" @@ -21,6 +23,8 @@ type Executor struct { reportData report.Data DBManager *lintersdb.Manager EnabledLintersSet *lintersdb.EnabledSet + contextLoader *lint.ContextLoader + goenv *goutil.Env } func NewExecutor(version, commit, date string) *Executor { @@ -65,6 +69,8 @@ func NewExecutor(version, commit, date string) *Executor { e.EnabledLintersSet = lintersdb.NewEnabledSet(e.DBManager, lintersdb.NewValidator(e.DBManager), e.log.Child("lintersdb"), e.cfg) + e.goenv = goutil.NewEnv(e.log.Child("goenv")) + e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child("loader"), e.goenv) return e } diff --git a/pkg/commands/help.go b/pkg/commands/help.go index 109591a49c41..ffcfc2d52223 100644 --- a/pkg/commands/help.go +++ b/pkg/commands/help.go @@ -38,7 +38,7 @@ func printLinterConfigs(lcs []linter.Config) { altNamesStr = fmt.Sprintf(" (%s)", strings.Join(lc.AlternativeNames, ", ")) } fmt.Fprintf(logutils.StdOut, "%s%s: %s [fast: %t]\n", color.YellowString(lc.Name()), - altNamesStr, lc.Linter.Desc(), !lc.DoesFullImport) + altNamesStr, lc.Linter.Desc(), !lc.NeedsSSARepr) } } diff --git a/pkg/commands/run.go b/pkg/commands/run.go index 72095666bf4a..6ad7a6a2e20c 100644 --- a/pkg/commands/run.go +++ b/pkg/commands/run.go @@ -141,7 +141,7 @@ func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager) { fs.StringSliceVarP(&lc.Presets, "presets", "p", nil, wh(fmt.Sprintf("Enable presets (%s) of linters. Run 'golangci-lint linters' to see "+ "them. This option implies option --disable-all", strings.Join(m.AllPresets(), "|")))) - fs.BoolVar(&lc.Fast, "fast", false, wh("Run only fast linters from enabled linters set")) + fs.BoolVar(&lc.Fast, "fast", false, wh("Run only fast linters from enabled linters set (first run won't be fast)")) // Issues config ic := &cfg.Issues @@ -257,12 +257,13 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) (<-chan resul e.reportData.AddLinter(lc.Name(), isEnabled, lc.EnabledByDefault) } - lintCtx, err := lint.LoadContext(enabledLinters, e.cfg, e.log.Child("load")) + lintCtx, err := e.contextLoader.Load(ctx, enabledLinters) if err != nil { return nil, errors.Wrap(err, "context loading failed") } + lintCtx.Log = e.log.Child("linters context") - runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner")) + runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"), e.goenv) if err != nil { return nil, err } @@ -303,6 +304,10 @@ func (e *Executor) setExitCodeIfIssuesFound(issues <-chan result.Issue) <-chan r } func (e *Executor) runAndPrint(ctx context.Context, args []string) error { + if err := e.goenv.Discover(ctx); err != nil { + e.log.Warnf("Failed to discover go env: %s", err) + } + if !logutils.HaveDebugTag("linters_output") { // Don't allow linters and loader to print anything log.SetOutput(ioutil.Discard) @@ -379,8 +384,20 @@ func (e *Executor) executeRun(cmd *cobra.Command, args []string) { } } - if e.exitCode == exitcodes.Success && ctx.Err() != nil { + e.setupExitCode(ctx) +} + +func (e *Executor) setupExitCode(ctx context.Context) { + if ctx.Err() != nil { e.exitCode = exitcodes.Timeout + e.log.Errorf("Deadline exceeded: try increase it by passing --deadline option") + } + + if e.exitCode == exitcodes.Success && + os.Getenv("GL_TEST_RUN") == "1" && + len(e.reportData.Warnings) != 0 { + + e.exitCode = exitcodes.WarningInTest } } diff --git a/pkg/config/config.go b/pkg/config/config.go index 883f531d9847..d3a99dfd86aa 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -119,8 +119,7 @@ type Run struct { type LintersSettings struct { Govet struct { - CheckShadowing bool `mapstructure:"check-shadowing"` - UseInstalledPackages bool `mapstructure:"use-installed-packages"` + CheckShadowing bool `mapstructure:"check-shadowing"` } Golint struct { MinConfidence float64 `mapstructure:"min-confidence"` diff --git a/pkg/golinters/dupl.go b/pkg/golinters/dupl.go index f48c32afc579..a0d8ecc61da8 100644 --- a/pkg/golinters/dupl.go +++ b/pkg/golinters/dupl.go @@ -6,8 +6,10 @@ import ( "go/token" duplAPI "github.com/golangci/dupl" + "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" + "github.com/pkg/errors" ) type Dupl struct{} @@ -21,7 +23,7 @@ func (Dupl) Desc() string { } func (d Dupl) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { - issues, err := duplAPI.Run(lintCtx.PkgProgram.Files(lintCtx.Cfg.Run.AnalyzeTests), lintCtx.Settings().Dupl.Threshold) + issues, err := duplAPI.Run(getAllFileNames(lintCtx), lintCtx.Settings().Dupl.Threshold) if err != nil { return nil, err } @@ -32,7 +34,11 @@ func (d Dupl) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, res := make([]result.Issue, 0, len(issues)) for _, i := range issues { - dupl := fmt.Sprintf("%s:%d-%d", i.To.Filename(), i.To.LineStart(), i.To.LineEnd()) + toFilename, err := fsutils.ShortestRelPath(i.To.Filename(), "") + if err != nil { + return nil, errors.Wrapf(err, "failed to get shortest rel path for %q", i.To.Filename()) + } + dupl := fmt.Sprintf("%s:%d-%d", toFilename, i.To.LineStart(), i.To.LineEnd()) text := fmt.Sprintf("%d-%d lines are duplicate of %s", i.From.LineStart(), i.From.LineEnd(), formatCode(dupl, lintCtx.Cfg)) diff --git a/pkg/golinters/goconst.go b/pkg/golinters/goconst.go index fdc7c63b39e7..7ac8b0f6bcea 100644 --- a/pkg/golinters/goconst.go +++ b/pkg/golinters/goconst.go @@ -26,8 +26,8 @@ func (lint Goconst) Run(ctx context.Context, lintCtx *linter.Context) ([]result. MinStringLength: lintCtx.Settings().Goconst.MinStringLen, MinOccurrences: lintCtx.Settings().Goconst.MinOccurrencesCount, } - for _, pkg := range lintCtx.PkgProgram.Packages() { - files, fset, err := getASTFilesForPkg(lintCtx, &pkg) + for _, pkg := range lintCtx.Packages { + files, fset, err := getASTFilesForGoPkg(lintCtx, pkg) if err != nil { return nil, err } diff --git a/pkg/golinters/gofmt.go b/pkg/golinters/gofmt.go index 3a5dd3145335..5bd622ff4e83 100644 --- a/pkg/golinters/gofmt.go +++ b/pkg/golinters/gofmt.go @@ -105,7 +105,7 @@ func (g Gofmt) extractIssuesFromPatch(patch string, log logutils.Log) ([]result. func (g Gofmt) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { var issues []result.Issue - for _, f := range lintCtx.PkgProgram.Files(lintCtx.Cfg.Run.AnalyzeTests) { + for _, f := range getAllFileNames(lintCtx) { var diff []byte var err error if g.UseGoimports { diff --git a/pkg/golinters/golint.go b/pkg/golinters/golint.go index 170337c44d2a..cf2515183671 100644 --- a/pkg/golinters/golint.go +++ b/pkg/golinters/golint.go @@ -24,8 +24,8 @@ func (Golint) Desc() string { func (g Golint) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { var issues []result.Issue var lintErr error - for _, pkg := range lintCtx.PkgProgram.Packages() { - files, fset, err := getASTFilesForPkg(lintCtx, &pkg) + for _, pkg := range lintCtx.Packages { + files, fset, err := getASTFilesForGoPkg(lintCtx, pkg) if err != nil { return nil, err } diff --git a/pkg/golinters/govet.go b/pkg/golinters/govet.go index e8d83564ab52..64335b97f64d 100644 --- a/pkg/golinters/govet.go +++ b/pkg/golinters/govet.go @@ -2,20 +2,12 @@ package golinters import ( "context" - "fmt" "go/ast" "go/token" - "os" - "os/exec" - "strings" - "time" "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/goutils" "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" - "github.com/golangci/golangci-lint/pkg/timeutils" govetAPI "github.com/golangci/govet" ) @@ -30,19 +22,12 @@ func (Govet) Desc() string { "such as Printf calls whose arguments do not align with the format string" } -func (g Govet) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { +func (g Govet) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { var govetIssues []govetAPI.Issue var err error - if lintCtx.Settings().Govet.UseInstalledPackages { - govetIssues, err = g.runOnInstalledPackages(ctx, lintCtx) - if err != nil { - return nil, fmt.Errorf("can't run govet on installed packages: %s", err) - } - } else { - govetIssues, err = g.runOnSourcePackages(ctx, lintCtx) - if err != nil { - return nil, fmt.Errorf("can't run govet on source packages: %s", err) - } + govetIssues, err = g.runImpl(lintCtx) + if err != nil { + return nil, err } if len(govetIssues) == 0 { @@ -60,175 +45,7 @@ func (g Govet) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue return res, nil } -func (g Govet) runOnInstalledPackages(ctx context.Context, lintCtx *linter.Context) ([]govetAPI.Issue, error) { - if err := g.installPackages(ctx, lintCtx); err != nil { - return nil, fmt.Errorf("can't install packages (it's required for govet): %s", err) - } - - // TODO: check .S asm files: govet can do it if pass dirs - var govetIssues []govetAPI.Issue - for _, pkg := range lintCtx.PkgProgram.Packages() { - var astFiles []*ast.File - var fset *token.FileSet - for _, fname := range pkg.Files(lintCtx.Cfg.Run.AnalyzeTests) { - af := lintCtx.ASTCache.Get(fname) - if af == nil || af.Err != nil { - return nil, fmt.Errorf("can't get parsed file %q from ast cache: %#v", fname, af) - } - astFiles = append(astFiles, af.F) - fset = af.Fset - } - if len(astFiles) == 0 { - continue - } - issues, err := govetAPI.Analyze(astFiles, fset, nil, - lintCtx.Settings().Govet.CheckShadowing, getPath) - if err != nil { - return nil, err - } - govetIssues = append(govetIssues, issues...) - } - - return govetIssues, nil -} - -func (g Govet) installPackages(ctx context.Context, lintCtx *linter.Context) error { - inGoRoot, err := goutils.InGoRoot() - if err != nil { - return fmt.Errorf("can't check whether we are in $GOROOT: %s", err) - } - - if inGoRoot { - // Go source packages already should be installed into $GOROOT/pkg with go distribution - lintCtx.Log.Infof("In $GOROOT, don't install packages") - return nil - } - - if err := g.installNonTestPackages(ctx, lintCtx); err != nil { - return err - } - - if err := g.installTestDependencies(ctx, lintCtx); err != nil { - return err - } - - return nil -} - -func (g Govet) installTestDependencies(ctx context.Context, lintCtx *linter.Context) error { - log := lintCtx.Log - packages := lintCtx.PkgProgram.Packages() - var testDirs []string - for _, pkg := range packages { - dir := pkg.Dir() - if dir == "" { - log.Warnf("Package %#v has empty dir", pkg) - continue - } - - if !strings.HasPrefix(dir, ".") { - // go install can't work without that - dir = "./" + dir - } - - if len(pkg.TestFiles()) != 0 { - testDirs = append(testDirs, dir) - } - } - - if len(testDirs) == 0 { - log.Infof("No test files in packages %#v", packages) - return nil - } - - args := append([]string{"test", "-i"}, testDirs...) - return runGoCommand(ctx, log, args...) -} - -func (g Govet) installNonTestPackages(ctx context.Context, lintCtx *linter.Context) error { - log := lintCtx.Log - packages := lintCtx.PkgProgram.Packages() - var importPaths []string - for _, pkg := range packages { - if pkg.IsTestOnly() { - // test-only package will be processed by installTestDependencies - continue - } - - dir := pkg.Dir() - if dir == "" { - log.Warnf("Package %#v has empty dir", pkg) - continue - } - - if !strings.HasPrefix(dir, ".") { - // go install can't work without that - dir = "./" + dir - } - - importPaths = append(importPaths, dir) - } - - if len(importPaths) == 0 { - log.Infof("No packages to install, all packages: %#v", packages) - return nil - } - - // we need type information of dependencies of analyzed packages - // so we pass -i option to install it - if err := runGoInstall(ctx, log, importPaths, true); err != nil { - // try without -i option: go < 1.10 doesn't support this option - // and install dependencies by default. - return runGoInstall(ctx, log, importPaths, false) - } - - return nil -} - -func runGoInstall(ctx context.Context, log logutils.Log, importPaths []string, withIOption bool) error { - args := []string{"install"} - if withIOption { - args = append(args, "-i") - } - args = append(args, importPaths...) - - return runGoCommand(ctx, log, args...) -} - -func runGoCommand(ctx context.Context, log logutils.Log, args ...string) error { - argsStr := strings.Join(args, " ") - defer timeutils.Track(time.Now(), log, "go %s", argsStr) - - cmd := exec.CommandContext(ctx, "go", args...) - cmd.Env = append([]string{}, os.Environ()...) - cmd.Env = append(cmd.Env, "GOMAXPROCS=1") // don't consume more than 1 cpu - - // use .Output but not .Run to capture StdErr in err - _, err := cmd.Output() - if err != nil { - var stderr string - if ee, ok := err.(*exec.ExitError); ok && ee.Stderr != nil { - stderr = ": " + string(ee.Stderr) - } - - return fmt.Errorf("can't run [go %s]: %s%s", argsStr, err, stderr) - } - - return nil -} - -func filterFiles(files []*ast.File, fset *token.FileSet) []*ast.File { - newFiles := make([]*ast.File, 0, len(files)) - for _, f := range files { - if !goutils.IsCgoFilename(fset.Position(f.Pos()).Filename) { - newFiles = append(newFiles, f) - } - } - - return newFiles -} - -func (g Govet) runOnSourcePackages(_ context.Context, lintCtx *linter.Context) ([]govetAPI.Issue, error) { +func (g Govet) runImpl(lintCtx *linter.Context) ([]govetAPI.Issue, error) { // TODO: check .S asm files: govet can do it if pass dirs var govetIssues []govetAPI.Issue for _, pkg := range lintCtx.Program.InitialPackages() { @@ -236,8 +53,7 @@ func (g Govet) runOnSourcePackages(_ context.Context, lintCtx *linter.Context) ( continue } - filteredFiles := filterFiles(pkg.Files, lintCtx.Program.Fset) - issues, err := govetAPI.Analyze(filteredFiles, lintCtx.Program.Fset, pkg, + issues, err := govetAPI.Analyze(pkg.Files, lintCtx.Program.Fset, pkg, lintCtx.Settings().Govet.CheckShadowing, getPath) if err != nil { return nil, err diff --git a/pkg/golinters/ineffassign.go b/pkg/golinters/ineffassign.go index a18e76314dbc..72a9f310a284 100644 --- a/pkg/golinters/ineffassign.go +++ b/pkg/golinters/ineffassign.go @@ -20,7 +20,7 @@ func (Ineffassign) Desc() string { } func (lint Ineffassign) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { - issues := ineffassignAPI.Run(lintCtx.PkgProgram.Files(lintCtx.Cfg.Run.AnalyzeTests)) + issues := ineffassignAPI.Run(getAllFileNames(lintCtx)) if len(issues) == 0 { return nil, nil } diff --git a/pkg/golinters/lll.go b/pkg/golinters/lll.go index 09b1f926dcb2..ad90830df4ad 100644 --- a/pkg/golinters/lll.go +++ b/pkg/golinters/lll.go @@ -83,7 +83,7 @@ func (lint Lll) getIssuesForFile(filename string, maxLineLen int, tabSpaces stri func (lint Lll) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { var res []result.Issue spaces := strings.Repeat(" ", lintCtx.Settings().Lll.TabWidth) - for _, f := range lintCtx.PkgProgram.Files(lintCtx.Cfg.Run.AnalyzeTests) { + for _, f := range getAllFileNames(lintCtx) { issues, err := lint.getIssuesForFile(f, lintCtx.Settings().Lll.LineLength, spaces) if err != nil { return nil, err diff --git a/pkg/golinters/megacheck.go b/pkg/golinters/megacheck.go index 792fd61f17ed..d33d9e5d7f50 100644 --- a/pkg/golinters/megacheck.go +++ b/pkg/golinters/megacheck.go @@ -16,8 +16,11 @@ import ( "github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/tools/go/ssa" "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) +const megacheckName = "megacheck" + type Megacheck struct { UnusedEnabled bool GosimpleEnabled bool @@ -41,7 +44,7 @@ func (m Megacheck) Name() string { } if len(names) == 3 { - return "megacheck" // all enabled + return megacheckName // all enabled } return fmt.Sprintf("megacheck.{%s}", strings.Join(names, ",")) @@ -58,7 +61,7 @@ func (m Megacheck) Desc() string { return descs[m.Name()] } -func prettifyCompilationError(err error) error { +func prettifyCompilationError(err packages.Error) error { i, _ := TypeCheck{}.parseError(err) if i == nil { return err @@ -79,15 +82,15 @@ func prettifyCompilationError(err error) error { func (m Megacheck) Run(ctx context.Context, lintCtx *linter.Context) ([]result.Issue, error) { if len(lintCtx.NotCompilingPackages) != 0 { - var packages []string - var errors []error + var errPkgs []string + var errors []packages.Error for _, p := range lintCtx.NotCompilingPackages { - packages = append(packages, p.String()) + errPkgs = append(errPkgs, p.String()) errors = append(errors, p.Errors...) } warnText := fmt.Sprintf("Can't run megacheck because of compilation errors in packages %s", - packages) + errPkgs) if len(errors) != 0 { warnText += fmt.Sprintf(": %s", prettifyCompilationError(errors[0])) if len(errors) > 1 { @@ -146,6 +149,6 @@ func runMegacheck(program *loader.Program, ssaProg *ssa.Program, conf *loader.Co }) } - fs := lintutil.FlagSet("megacheck") + fs := lintutil.FlagSet(megacheckName) return lintutil.ProcessFlagSet(checkers, fs, program, ssaProg, conf) } diff --git a/pkg/golinters/misspell.go b/pkg/golinters/misspell.go index 896134a5d71f..46fcccc99b71 100644 --- a/pkg/golinters/misspell.go +++ b/pkg/golinters/misspell.go @@ -43,7 +43,7 @@ func (lint Misspell) Run(ctx context.Context, lintCtx *linter.Context) ([]result r.Compile() var res []result.Issue - for _, f := range lintCtx.PkgProgram.Files(lintCtx.Cfg.Run.AnalyzeTests) { + for _, f := range getAllFileNames(lintCtx) { fileContent, err := ioutil.ReadFile(f) if err != nil { return nil, fmt.Errorf("can't read file %s: %s", f, err) diff --git a/pkg/golinters/typecheck.go b/pkg/golinters/typecheck.go index 18ed6b7abc69..c893196c9f63 100644 --- a/pkg/golinters/typecheck.go +++ b/pkg/golinters/typecheck.go @@ -2,7 +2,6 @@ package golinters import ( "context" - "errors" "fmt" "go/token" "strconv" @@ -10,6 +9,8 @@ import ( "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" ) type TypeCheck struct{} @@ -22,13 +23,11 @@ func (TypeCheck) Desc() string { return "Like the front-end of a Go compiler, parses and type-checks Go code" } -func (lint TypeCheck) parseError(srcErr error) (*result.Issue, error) { - // TODO: cast srcErr to types.Error and just use it - - // file:line(:colon): message - parts := strings.Split(srcErr.Error(), ":") - if len(parts) < 3 { - return nil, errors.New("too few colons") +func (lint TypeCheck) parseError(srcErr packages.Error) (*result.Issue, error) { + // file:line(:colon) + parts := strings.Split(srcErr.Pos, ":") + if len(parts) == 1 { + return nil, errors.New("no colons") } file := parts[0] @@ -38,30 +37,20 @@ func (lint TypeCheck) parseError(srcErr error) (*result.Issue, error) { } var column int - var message string if len(parts) == 3 { // no column - message = parts[2] - } else { column, err = strconv.Atoi(parts[2]) - if err == nil { // column was parsed - message = strings.Join(parts[3:], ":") - } else { - message = strings.Join(parts[2:], ":") + if err != nil { + return nil, errors.Wrapf(err, "failed to parse column from %q", parts[2]) } } - message = strings.TrimSpace(message) - if message == "" { - return nil, fmt.Errorf("empty message") - } - return &result.Issue{ Pos: token.Position{ Filename: file, Line: line, Column: column, }, - Text: markIdentifiers(message), + Text: srcErr.Msg, FromLinter: lint.Name(), }, nil } @@ -72,7 +61,10 @@ func (lint TypeCheck) Run(ctx context.Context, lintCtx *linter.Context) ([]resul for _, err := range pkg.Errors { i, perr := lint.parseError(err) if perr != nil { - lintCtx.Log.Warnf("Can't parse type error %s: %s", err, perr) + res = append(res, result.Issue{ + Text: err.Msg, + FromLinter: lint.Name(), + }) } else { res = append(res, *i) } diff --git a/pkg/golinters/typecheck_test.go b/pkg/golinters/typecheck_test.go index bf420a875983..032180f3d97b 100644 --- a/pkg/golinters/typecheck_test.go +++ b/pkg/golinters/typecheck_test.go @@ -1,11 +1,11 @@ package golinters import ( - "errors" "fmt" "testing" "github.com/stretchr/testify/assert" + "golang.org/x/tools/go/packages" ) func TestParseError(t *testing.T) { @@ -13,29 +13,18 @@ func TestParseError(t *testing.T) { in, out string good bool }{ - {"f.go:1:2: text", "", true}, - {"f.go:1:2: text: with: colons", "", true}, - - {"f.go:1:2:text wo leading space", "f.go:1:2: text wo leading space", true}, - - {"f.go:1:2:", "", false}, - {"f.go:1:2: ", "", false}, - - {"f.go:1:2", "f.go:1: 2", true}, - {"f.go:1: text no column", "", true}, - {"f.go:1: text no column: but with colon", "", true}, - {"f.go:1:text no column", "f.go:1: text no column", true}, - - {"f.go: no line", "", false}, - {"f.go: 1: text", "", false}, - - {"f.go:", "", false}, + {"f.go:1:2", "", true}, + {"f.go:1", "", true}, {"f.go", "", false}, + {"f.go: 1", "", false}, } lint := TypeCheck{} for _, c := range cases { - i, _ := lint.parseError(errors.New(c.in)) + i, _ := lint.parseError(packages.Error{ + Pos: c.in, + Msg: "msg", + }) if !c.good { assert.Nil(t, i) continue @@ -47,7 +36,7 @@ func TestParseError(t *testing.T) { if i.Pos.Column != 0 { pos += fmt.Sprintf(":%d", i.Pos.Column) } - out := fmt.Sprintf("%s: %s", pos, i.Text) + out := pos expOut := c.out if expOut == "" { expOut = c.in @@ -55,5 +44,6 @@ func TestParseError(t *testing.T) { assert.Equal(t, expOut, out) assert.Equal(t, "typecheck", i.FromLinter) + assert.Equal(t, "msg", i.Text) } } diff --git a/pkg/golinters/utils.go b/pkg/golinters/util.go similarity index 82% rename from pkg/golinters/utils.go rename to pkg/golinters/util.go index fe9ef1be3ebc..09938d089f46 100644 --- a/pkg/golinters/utils.go +++ b/pkg/golinters/util.go @@ -10,7 +10,7 @@ import ( "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/packages" + gopackages "golang.org/x/tools/go/packages" ) func formatCode(code string, _ *config.Config) string { @@ -88,11 +88,25 @@ func markIdentifiers(s string) string { return s } -func getASTFilesForPkg(ctx *linter.Context, pkg *packages.Package) ([]*ast.File, *token.FileSet, error) { - filenames := pkg.Files(ctx.Cfg.Run.AnalyzeTests) - files := make([]*ast.File, 0, len(filenames)) +func getAllFileNames(ctx *linter.Context) []string { + var ret []string + uniqFiles := map[string]bool{} // files are duplicated for test packages + for _, pkg := range ctx.Packages { + for _, f := range pkg.GoFiles { + if uniqFiles[f] { + continue + } + uniqFiles[f] = true + ret = append(ret, f) + } + } + return ret +} + +func getASTFilesForGoPkg(ctx *linter.Context, pkg *gopackages.Package) ([]*ast.File, *token.FileSet, error) { + var files []*ast.File var fset *token.FileSet - for _, filename := range filenames { + for _, filename := range pkg.GoFiles { f := ctx.ASTCache.Get(filename) if f == nil { return nil, nil, fmt.Errorf("no AST for file %s in cache: %+v", filename, *ctx.ASTCache) diff --git a/pkg/goutil/env.go b/pkg/goutil/env.go new file mode 100644 index 000000000000..acc472b2137f --- /dev/null +++ b/pkg/goutil/env.go @@ -0,0 +1,65 @@ +package goutil + +import ( + "bufio" + "bytes" + "context" + "os" + "os/exec" + "strconv" + "strings" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/pkg/errors" +) + +type Env struct { + vars map[string]string + log logutils.Log + debugf logutils.DebugFunc +} + +func NewEnv(log logutils.Log) *Env { + return &Env{ + vars: map[string]string{}, + log: log, + debugf: logutils.Debug("env"), + } +} + +func (e *Env) Discover(ctx context.Context) error { + out, err := exec.CommandContext(ctx, "go", "env").Output() + if err != nil { + return errors.Wrap(err, "failed to run 'go env'") + } + + scanner := bufio.NewScanner(bytes.NewReader(out)) + scanner.Split(bufio.ScanLines) + for scanner.Scan() { + parts := strings.SplitN(scanner.Text(), "=", 2) + if len(parts) != 2 { + e.log.Warnf("Can't parse go env line %q: got %d parts", scanner.Text(), len(parts)) + continue + } + + v, err := strconv.Unquote(parts[1]) + if err != nil { + e.log.Warnf("Invalid key %q with value %q: %s", parts[0], parts[1], err) + continue + } + + e.vars[parts[0]] = v + } + + e.debugf("Read go env: %#v", e.vars) + return nil +} + +func (e Env) Get(k string) string { + envValue := os.Getenv(k) + if envValue != "" { + return envValue + } + + return e.vars[k] +} diff --git a/pkg/goutils/goutils.go b/pkg/goutils/goutils.go deleted file mode 100644 index 9e9d76ca4501..000000000000 --- a/pkg/goutils/goutils.go +++ /dev/null @@ -1,57 +0,0 @@ -package goutils - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "sync" - - "github.com/golangci/golangci-lint/pkg/fsutils" -) - -var discoverGoRootOnce sync.Once -var discoveredGoRoot string -var discoveredGoRootError error - -func DiscoverGoRoot() (string, error) { - discoverGoRootOnce.Do(func() { - discoveredGoRoot, discoveredGoRootError = discoverGoRootImpl() - }) - - return discoveredGoRoot, discoveredGoRootError -} - -func discoverGoRootImpl() (string, error) { - goroot := os.Getenv("GOROOT") - if goroot != "" { - return goroot, nil - } - - output, err := exec.Command("go", "env", "GOROOT").Output() - if err != nil { - return "", fmt.Errorf("can't execute go env GOROOT: %s", err) - } - - return strings.TrimSpace(string(output)), nil -} - -func InGoRoot() (bool, error) { - goroot, err := DiscoverGoRoot() - if err != nil { - return false, err - } - - wd, err := fsutils.Getwd() - if err != nil { - return false, err - } - - // TODO: strip, then add slashes - return strings.HasPrefix(wd, goroot), nil -} - -func IsCgoFilename(f string) bool { - return filepath.Base(f) == "C" -} diff --git a/pkg/lint/astcache/astcache.go b/pkg/lint/astcache/astcache.go index 7d0e2ed86d01..f4227b971be1 100644 --- a/pkg/lint/astcache/astcache.go +++ b/pkg/lint/astcache/astcache.go @@ -5,11 +5,11 @@ import ( "go/parser" "go/token" "path/filepath" + "strings" + "time" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/goutils" "github.com/golangci/golangci-lint/pkg/logutils" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) type File struct { @@ -20,7 +20,7 @@ type File struct { } type Cache struct { - m map[string]*File + m map[string]*File // map from absolute file path to file data s []*File log logutils.Log } @@ -36,14 +36,32 @@ func (c Cache) Get(filename string) *File { return c.m[filepath.Clean(filename)] } -func (c Cache) GetOrParse(filename string) *File { +func (c Cache) keys() []string { + var keys []string + for k := range c.m { + keys = append(keys, k) + } + return keys +} + +func (c Cache) GetOrParse(filename string, fset *token.FileSet) *File { + if !filepath.IsAbs(filename) { + absFilename, err := filepath.Abs(filename) + if err != nil { + c.log.Warnf("Can't abs-ify filename %s: %s", filename, err) + } else { + filename = absFilename + } + } + f := c.m[filename] if f != nil { return f } - c.log.Infof("Parse AST for file %s on demand", filename) - c.parseFile(filename, nil) + c.log.Infof("Parse AST for file %s on demand, existing files are %s", + filename, strings.Join(c.keys(), ",")) + c.parseFile(filename, fset) return c.m[filename] } @@ -62,45 +80,54 @@ func (c *Cache) prepareValidFiles() { c.s = files } -func LoadFromProgram(prog *loader.Program, log logutils.Log) (*Cache, error) { +func LoadFromPackages(pkgs []*packages.Package, log logutils.Log) (*Cache, error) { c := NewCache(log) - for _, pkg := range prog.InitialPackages() { - for _, f := range pkg.Files { - pos := prog.Fset.Position(f.Pos()) - if pos.Filename == "" { - continue - } - - if goutils.IsCgoFilename(pos.Filename) { - continue - } - - path, err := fsutils.ShortestRelPath(pos.Filename, "") - if err != nil { - c.log.Warnf("Can't get relative path for %s: %s", - pos.Filename, err) - continue - } - - c.m[path] = &File{ - F: f, - Fset: prog.Fset, - Name: path, - } - } + for _, pkg := range pkgs { + c.loadFromPackage(pkg) } c.prepareValidFiles() return c, nil } +func (c *Cache) loadFromPackage(pkg *packages.Package) { + if len(pkg.Syntax) == 0 || len(pkg.GoFiles) != len(pkg.CompiledGoFiles) { + // len(pkg.Syntax) == 0 if only filenames are loaded + // lengths aren't equal if there are preprocessed files (cgo) + startedAt := time.Now() + + // can't use pkg.Fset: it will overwrite offsets by preprocessed files + fset := token.NewFileSet() + for _, f := range pkg.GoFiles { + c.parseFile(f, fset) + } + + c.log.Infof("Parsed AST of all pkg.GoFiles: %s for %s", pkg.GoFiles, time.Since(startedAt)) + return + } + + for _, f := range pkg.Syntax { + pos := pkg.Fset.Position(f.Pos()) + if pos.Filename == "" { + continue + } + + c.m[pos.Filename] = &File{ + F: f, + Fset: pkg.Fset, + Name: pos.Filename, + } + } +} + func (c *Cache) parseFile(filePath string, fset *token.FileSet) { if fset == nil { fset = token.NewFileSet() } - f, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments) // comments needed by e.g. golint + // comments needed by e.g. golint + f, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments) c.m[filePath] = &File{ F: f, Fset: fset, @@ -111,16 +138,3 @@ func (c *Cache) parseFile(filePath string, fset *token.FileSet) { c.log.Warnf("Can't parse AST of %s: %s", filePath, err) } } - -func LoadFromFiles(files []string, log logutils.Log) (*Cache, error) { //nolint:unparam - c := NewCache(log) - - fset := token.NewFileSet() - for _, filePath := range files { - filePath = filepath.Clean(filePath) - c.parseFile(filePath, fset) - } - - c.prepareValidFiles() - return c, nil -} diff --git a/pkg/lint/linter/config.go b/pkg/lint/linter/config.go index 9820b6ae2b67..0c5d9cd87a33 100644 --- a/pkg/lint/linter/config.go +++ b/pkg/lint/linter/config.go @@ -12,8 +12,10 @@ const ( type Config struct { Linter Linter EnabledByDefault bool - DoesFullImport bool - NeedsSSARepr bool + + NeedsTypeInfo bool + NeedsSSARepr bool + InPresets []string Speed int // more value means faster execution of linter AlternativeNames []string @@ -21,13 +23,13 @@ type Config struct { OriginalURL string // URL of original (not forked) repo, needed for autogenerated README } -func (lc Config) WithFullImport() Config { - lc.DoesFullImport = true +func (lc Config) WithTypeInfo() Config { + lc.NeedsTypeInfo = true return lc } func (lc Config) WithSSA() Config { - lc.DoesFullImport = true + lc.NeedsTypeInfo = true lc.NeedsSSARepr = true return lc } @@ -52,14 +54,6 @@ func (lc Config) WithAlternativeNames(names ...string) Config { return lc } -func (lc Config) NeedsProgramLoading() bool { - return lc.DoesFullImport -} - -func (lc Config) NeedsSSARepresentation() bool { - return lc.NeedsSSARepr -} - func (lc Config) GetSpeed() int { return lc.Speed } diff --git a/pkg/lint/linter/context.go b/pkg/lint/linter/context.go index 4788ce0edab1..27eea8a2a0f7 100644 --- a/pkg/lint/linter/context.go +++ b/pkg/lint/linter/context.go @@ -4,20 +4,23 @@ import ( "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/lint/astcache" "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/tools/go/ssa" "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) type Context struct { - PkgProgram *packages.Program - Cfg *config.Config - Program *loader.Program - SSAProgram *ssa.Program - LoaderConfig *loader.Config - ASTCache *astcache.Cache - NotCompilingPackages []*loader.PackageInfo - Log logutils.Log + Packages []*packages.Package + NotCompilingPackages []*packages.Package + + LoaderConfig *loader.Config // deprecated, don't use for new linters + Program *loader.Program // deprecated, use Packages for new linters + + SSAProgram *ssa.Program + + Cfg *config.Config + ASTCache *astcache.Cache + Log logutils.Log } func (c *Context) Settings() *config.LintersSettings { diff --git a/pkg/lint/lintersdb/enabled_set.go b/pkg/lint/lintersdb/enabled_set.go index 755aeedecb5d..f4484c6193dc 100644 --- a/pkg/lint/lintersdb/enabled_set.go +++ b/pkg/lint/lintersdb/enabled_set.go @@ -52,7 +52,7 @@ func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []linte // It should be before --enable and --disable to be able to enable or disable specific linter. if lcfg.Fast { for name := range resultLintersSet { - if es.m.GetLinterConfig(name).DoesFullImport { + if es.m.GetLinterConfig(name).NeedsSSARepr { delete(resultLintersSet, name) } } diff --git a/pkg/lint/lintersdb/manager.go b/pkg/lint/lintersdb/manager.go index 56c047c79bf2..30dc135351de 100644 --- a/pkg/lint/lintersdb/manager.go +++ b/pkg/lint/lintersdb/manager.go @@ -59,12 +59,12 @@ func enableLinterConfigs(lcs []linter.Config, isEnabled func(lc *linter.Config) func (Manager) GetAllSupportedLinterConfigs() []linter.Config { lcs := []linter.Config{ linter.NewConfig(golinters.Govet{}). - WithFullImport(). // TODO: depend on it's configuration here + WithTypeInfo(). WithPresets(linter.PresetBugs). WithSpeed(4). WithURL("https://golang.org/cmd/vet/"), linter.NewConfig(golinters.Errcheck{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetBugs). WithSpeed(10). WithURL("https://github.com/kisielk/errcheck"), @@ -90,18 +90,18 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/gosimple"), linter.NewConfig(golinters.Gosec{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetBugs). WithSpeed(8). WithURL("https://github.com/securego/gosec"). WithAlternativeNames("gas"), linter.NewConfig(golinters.Structcheck{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetUnused). WithSpeed(10). WithURL("https://github.com/opennota/check"), linter.NewConfig(golinters.Varcheck{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetUnused). WithSpeed(10). WithURL("https://github.com/opennota/check"), @@ -111,7 +111,7 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithSpeed(6). WithURL("https://github.com/mvdan/interfacer"), linter.NewConfig(golinters.Unconvert{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetStyle). WithSpeed(10). WithURL("https://github.com/mdempsky/unconvert"), @@ -128,7 +128,7 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithSpeed(9). WithURL("https://github.com/jgautheron/goconst"), linter.NewConfig(golinters.Deadcode{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetUnused). WithSpeed(10). WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode"), @@ -137,7 +137,7 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithSpeed(8). WithURL("https://github.com/alecthomas/gocyclo"), linter.NewConfig(golinters.TypeCheck{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetBugs). WithSpeed(10). WithURL(""), @@ -151,7 +151,7 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithSpeed(5). WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), linter.NewConfig(golinters.Maligned{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetPerformance). WithSpeed(10). WithURL("https://github.com/mdempsky/maligned"), @@ -161,7 +161,7 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { WithSpeed(1). WithURL("https://github.com/dominikh/go-tools/tree/master/cmd/megacheck"), linter.NewConfig(golinters.Depguard{}). - WithFullImport(). + WithTypeInfo(). WithPresets(linter.PresetStyle). WithSpeed(6). WithURL("https://github.com/OpenPeeDeeP/depguard"), @@ -176,7 +176,6 @@ func (Manager) GetAllSupportedLinterConfigs() []linter.Config { linter.NewConfig(golinters.Unparam{}). WithPresets(linter.PresetUnused). WithSpeed(3). - WithFullImport(). WithSSA(). WithURL("https://github.com/mvdan/unparam"), linter.NewConfig(golinters.Nakedret{}). diff --git a/pkg/lint/load.go b/pkg/lint/load.go index 225aeed437ce..e259b88620d4 100644 --- a/pkg/lint/load.go +++ b/pkg/lint/load.go @@ -1,9 +1,9 @@ package lint import ( + "context" "fmt" "go/build" - "go/parser" "go/types" "os" "path/filepath" @@ -12,335 +12,294 @@ import ( "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/goutils" + "github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/lint/astcache" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/tools/go/ssa" "github.com/golangci/tools/go/ssa/ssautil" + "github.com/pkg/errors" "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) -var loadDebugf = logutils.Debug("load") - -func isFullImportNeeded(linters []linter.Config, cfg *config.Config) bool { - for _, lc := range linters { - if lc.NeedsProgramLoading() { - if lc.Name() == "govet" && cfg.LintersSettings.Govet.UseInstalledPackages { - // TODO: remove this hack - continue - } - - return true - } - } - - return false +type ContextLoader struct { + cfg *config.Config + log logutils.Log + debugf logutils.DebugFunc + goenv *goutil.Env } -func isSSAReprNeeded(linters []linter.Config) bool { - for _, linter := range linters { - if linter.NeedsSSARepresentation() { - return true - } +func NewContextLoader(cfg *config.Config, log logutils.Log, goenv *goutil.Env) *ContextLoader { + return &ContextLoader{ + cfg: cfg, + log: log, + debugf: logutils.Debug("loader"), + goenv: goenv, } - - return false } -func normalizePaths(paths []string) ([]string, error) { - ret := make([]string, 0, len(paths)) - for _, p := range paths { - relPath, err := fsutils.ShortestRelPath(p, "") - if err != nil { - return nil, fmt.Errorf("can't get relative path for path %s: %s", p, err) - } - p = relPath - - ret = append(ret, "./"+p) +func (cl ContextLoader) prepareBuildContext() { + // Set GOROOT to have working cross-compilation: cross-compiled binaries + // have invalid GOROOT. XXX: can't use runtime.GOROOT(). + goroot := cl.goenv.Get("GOROOT") + if goroot == "" { + return } - return ret, nil + os.Setenv("GOROOT", goroot) + build.Default.GOROOT = goroot + build.Default.BuildTags = cl.cfg.Run.BuildTags } -func getCurrentProjectImportPath() (string, error) { - gopath := os.Getenv("GOPATH") - if gopath == "" { - return "", fmt.Errorf("no GOPATH env variable") +func (cl ContextLoader) makeFakeLoaderPackageInfo(pkg *packages.Package) *loader.PackageInfo { + var errs []error + for _, err := range pkg.Errors { + errs = append(errs, err) } - wd, err := fsutils.Getwd() - if err != nil { - return "", fmt.Errorf("can't get workind directory: %s", err) + typeInfo := &types.Info{} + if pkg.TypesInfo != nil { + typeInfo = pkg.TypesInfo } - if !strings.HasPrefix(wd, gopath) { - return "", fmt.Errorf("currently no in gopath: %q isn't a prefix of %q", gopath, wd) - } + return &loader.PackageInfo{ + Pkg: pkg.Types, + Importable: true, // not used + TransitivelyErrorFree: !pkg.IllTyped, - path := strings.TrimPrefix(wd, gopath) - path = strings.TrimPrefix(path, string(os.PathSeparator)) // if GOPATH contains separator at the end - src := "src" + string(os.PathSeparator) - if !strings.HasPrefix(path, src) { - return "", fmt.Errorf("currently no in gopath/src: %q isn't a prefix of %q", src, path) + // use compiled (preprocessed) go files AST; + // AST linters use not preprocessed go files AST + Files: pkg.Syntax, + Errors: errs, + Info: *typeInfo, } - - path = strings.TrimPrefix(path, src) - path = strings.Replace(path, string(os.PathSeparator), "/", -1) - return path, nil } -func isLocalProjectAnalysis(args []string) bool { - for _, arg := range args { - if strings.HasPrefix(arg, "..") || filepath.IsAbs(arg) { - return false - } - } - - return true +func shouldSkipPkg(pkg *packages.Package) bool { + // it's an implicit testmain package + return pkg.Name == "main" && strings.HasSuffix(pkg.PkgPath, ".test") } -func getTypeCheckFuncBodies(cfg *config.Run, linters []linter.Config, - pkgProg *packages.Program, log logutils.Log) func(string) bool { - - if !isLocalProjectAnalysis(cfg.Args) { - loadDebugf("analysis in nonlocal, don't optimize loading by not typechecking func bodies") - return nil - } +func (cl ContextLoader) makeFakeLoaderProgram(pkgs []*packages.Package) *loader.Program { + var createdPkgs []*loader.PackageInfo + for _, pkg := range pkgs { + if len(pkg.Errors) != 0 { + // some linters crash on packages with errors, + // skip them and warn about them in another place + continue + } - if isSSAReprNeeded(linters) { - loadDebugf("ssa repr is needed, don't optimize loading by not typechecking func bodies") - return nil + pkgInfo := cl.makeFakeLoaderPackageInfo(pkg) + createdPkgs = append(createdPkgs, pkgInfo) } - if len(pkgProg.Dirs()) == 0 { - // files run, in this mode packages are fake: can't check their path properly - return nil + allPkgs := map[*types.Package]*loader.PackageInfo{} + for _, pkg := range createdPkgs { + pkg := pkg + allPkgs[pkg.Pkg] = pkg } - - projPath, err := getCurrentProjectImportPath() - if err != nil { - log.Infof("Can't get cur project path: %s", err) - return nil - } - - return func(path string) bool { - if strings.HasPrefix(path, ".") { - loadDebugf("%s: dot import: typecheck func bodies", path) - return true + for _, pkg := range pkgs { + if len(pkg.Errors) != 0 { + // some linters crash on packages with errors, + // skip them and warn about them in another place + continue } - isLocalPath := strings.HasPrefix(path, projPath) - if isLocalPath { - localPath := strings.TrimPrefix(path, projPath) - localPath = strings.TrimPrefix(localPath, "/") - if strings.HasPrefix(localPath, "vendor/") { - loadDebugf("%s: local vendor import: DO NOT typecheck func bodies", path) - return false - } - - loadDebugf("%s: local import: typecheck func bodies", path) - return true + for _, impPkg := range pkg.Imports { + // don't use astcache for imported packages: we don't find issues in cgo imported deps + pkgInfo := cl.makeFakeLoaderPackageInfo(impPkg) + allPkgs[pkgInfo.Pkg] = pkgInfo } - - loadDebugf("%s: not local import: DO NOT typecheck func bodies", path) - return false } -} - -func loadWholeAppIfNeeded(linters []linter.Config, cfg *config.Config, - pkgProg *packages.Program, log logutils.Log) (*loader.Program, *loader.Config, error) { - if !isFullImportNeeded(linters, cfg) { - return nil, nil, nil + return &loader.Program{ + Fset: pkgs[0].Fset, + Imported: nil, // not used without .Created in any linter + Created: createdPkgs, // all initial packages + AllPackages: allPkgs, // all initial packages and their depndencies } +} +func (cl ContextLoader) buildSSAProgram(pkgs []*packages.Package) *ssa.Program { startedAt := time.Now() defer func() { - log.Infof("Program loading took %s", time.Since(startedAt)) + cl.log.Infof("SSA repr building took %s", time.Since(startedAt)) }() - bctx := pkgProg.BuildContext() - loadcfg := &loader.Config{ - Build: bctx, - AllowErrors: true, // Try to analyze partially - ParserMode: parser.ParseComments, // AST will be reused by linters - TypeCheckFuncBodies: getTypeCheckFuncBodies(&cfg.Run, linters, pkgProg, log), - TypeChecker: types.Config{ - Sizes: types.SizesFor(build.Default.Compiler, build.Default.GOARCH), - }, - } + ssaProg, _ := ssautil.Packages(pkgs, ssa.GlobalDebug) + ssaProg.Build() + return ssaProg +} - var loaderArgs []string - dirs := pkgProg.Dirs() - if len(dirs) != 0 { - loaderArgs = dirs // dirs run - } else { - loaderArgs = pkgProg.Files(cfg.Run.AnalyzeTests) // files run +func (cl ContextLoader) findLoadMode(linters []linter.Config) packages.LoadMode { + maxLoadMode := packages.LoadFiles + for _, lc := range linters { + curLoadMode := packages.LoadFiles + if lc.NeedsTypeInfo { + curLoadMode = packages.LoadSyntax + } + if lc.NeedsSSARepr { + curLoadMode = packages.LoadAllSyntax + } + if curLoadMode > maxLoadMode { + maxLoadMode = curLoadMode + } } - nLoaderArgs, err := normalizePaths(loaderArgs) - if err != nil { - return nil, nil, err - } + return maxLoadMode +} - rest, err := loadcfg.FromArgs(nLoaderArgs, cfg.Run.AnalyzeTests) - if err != nil { - return nil, nil, fmt.Errorf("can't parepare load config with paths: %s", err) - } - if len(rest) > 0 { - return nil, nil, fmt.Errorf("unhandled loading paths: %v", rest) - } +func stringifyLoadMode(mode packages.LoadMode) string { + switch mode { + case packages.LoadFiles: + return "load files" + case packages.LoadImports: + return "load imports" + case packages.LoadTypes: + return "load types" + case packages.LoadSyntax: + return "load types and syntax" + case packages.LoadAllSyntax: + return "load deps types and syntax" + } + return "unknown" +} - prog, err := loadcfg.Load() - if err != nil { - return nil, nil, fmt.Errorf("can't load program from paths %v: %s", loaderArgs, err) +func (cl ContextLoader) buildArgs() []string { + args := cl.cfg.Run.Args + if len(args) == 0 { + return []string{"./..."} } - if len(prog.InitialPackages()) == 1 { - pkg := prog.InitialPackages()[0] - var files []string - for _, f := range pkg.Files { - files = append(files, prog.Fset.Position(f.Pos()).Filename) + var retArgs []string + for _, arg := range args { + if strings.HasPrefix(arg, ".") { + retArgs = append(retArgs, arg) + } else { + // go/packages doesn't work well if we don't have prefix ./ for local packages + retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg)) } - log.Infof("pkg %s files: %s", pkg, files) } - return prog, loadcfg, nil + return retArgs } -func buildSSAProgram(lprog *loader.Program, log logutils.Log) *ssa.Program { - startedAt := time.Now() - defer func() { - log.Infof("SSA repr building took %s", time.Since(startedAt)) - }() +func (cl ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) { + defer func(startedAt time.Time) { + cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt)) + }(time.Now()) - ssaProg := ssautil.CreateProgram(lprog, ssa.GlobalDebug) - ssaProg.Build() - return ssaProg -} + cl.prepareBuildContext() -// separateNotCompilingPackages moves not compiling packages into separate slices: -// a lot of linters crash on such packages. Leave them only for those linters -// which can work with them. -//nolint:gocyclo -func separateNotCompilingPackages(lintCtx *linter.Context) { - prog := lintCtx.Program - - notCompilingPackagesSet := map[*loader.PackageInfo]bool{} - - if prog.Created != nil { - compilingCreated := make([]*loader.PackageInfo, 0, len(prog.Created)) - for _, info := range prog.Created { - if len(info.Errors) != 0 { - lintCtx.NotCompilingPackages = append(lintCtx.NotCompilingPackages, info) - notCompilingPackagesSet[info] = true - } else { - compilingCreated = append(compilingCreated, info) - } - } - prog.Created = compilingCreated + var buildFlags []string + if len(cl.cfg.Run.BuildTags) != 0 { + // go help build + buildFlags = []string{fmt.Sprintf("-tags '%s'", strings.Join(cl.cfg.Run.BuildTags, " "))} + } + conf := &packages.Config{ + Mode: loadMode, + Tests: cl.cfg.Run.AnalyzeTests, + Context: ctx, + BuildFlags: buildFlags, + //TODO: use fset, parsefile, overlay } - if prog.Imported != nil { - for k, info := range prog.Imported { - if len(info.Errors) == 0 { - continue - } - - lintCtx.NotCompilingPackages = append(lintCtx.NotCompilingPackages, info) - notCompilingPackagesSet[info] = true - delete(prog.Imported, k) + args := cl.buildArgs() + cl.debugf("Built loader args are %s", args) + pkgs, err := packages.Load(conf, args...) + if err != nil { + return nil, errors.Wrap(err, "failed to load program with go/packages") + } + cl.debugf("loaded %d pkgs", len(pkgs)) + for i, pkg := range pkgs { + var syntaxFiles []string + for _, sf := range pkg.Syntax { + syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename) } + cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s", + i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles) } - if prog.AllPackages != nil { - for k, info := range prog.AllPackages { - if len(info.Errors) == 0 { - continue - } - - if !notCompilingPackagesSet[info] { - lintCtx.NotCompilingPackages = append(lintCtx.NotCompilingPackages, info) - notCompilingPackagesSet[info] = true + var retPkgs []*packages.Package + for _, pkg := range pkgs { + for _, err := range pkg.Errors { + if strings.Contains(err.Msg, "no Go files") { + return nil, errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath) } - delete(prog.AllPackages, k) + } + if !shouldSkipPkg(pkg) { + retPkgs = append(retPkgs, pkg) } } - if len(lintCtx.NotCompilingPackages) != 0 { - lintCtx.Log.Infof("Not compiling packages: %+v", lintCtx.NotCompilingPackages) - } + return retPkgs, nil } -//nolint:gocyclo -func LoadContext(linters []linter.Config, cfg *config.Config, log logutils.Log) (*linter.Context, error) { - // Set GOROOT to have working cross-compilation: cross-compiled binaries - // have invalid GOROOT. XXX: can't use runtime.GOROOT(). - goroot, err := goutils.DiscoverGoRoot() - if err != nil { - return nil, fmt.Errorf("can't discover GOROOT: %s", err) - } - os.Setenv("GOROOT", goroot) - build.Default.GOROOT = goroot - - args := cfg.Run.Args - if len(args) == 0 { - args = []string{"./..."} - } - - skipDirs := append([]string{}, packages.StdExcludeDirRegexps...) - skipDirs = append(skipDirs, cfg.Run.SkipDirs...) - r, err := packages.NewResolver(cfg.Run.BuildTags, skipDirs, log.Child("path_resolver")) - if err != nil { - return nil, err - } - - pkgProg, err := r.Resolve(args...) +func (cl ContextLoader) Load(ctx context.Context, linters []linter.Config) (*linter.Context, error) { + loadMode := cl.findLoadMode(linters) + pkgs, err := cl.loadPackages(ctx, loadMode) if err != nil { return nil, err } - if len(pkgProg.Packages()) == 0 { + if len(pkgs) == 0 { return nil, exitcodes.ErrNoGoFiles } - prog, loaderConfig, err := loadWholeAppIfNeeded(linters, cfg, pkgProg, log) - if err != nil { - return nil, err + var prog *loader.Program + if loadMode >= packages.LoadSyntax { + prog = cl.makeFakeLoaderProgram(pkgs) } var ssaProg *ssa.Program - if prog != nil && isSSAReprNeeded(linters) { - ssaProg = buildSSAProgram(prog, log) + if loadMode == packages.LoadAllSyntax { + ssaProg = cl.buildSSAProgram(pkgs) } - astLog := log.Child("astcache") - var astCache *astcache.Cache - if prog != nil { - astCache, err = astcache.LoadFromProgram(prog, astLog) - } else { - astCache, err = astcache.LoadFromFiles(pkgProg.Files(cfg.Run.AnalyzeTests), astLog) - } + astLog := cl.log.Child("astcache") + astCache, err := astcache.LoadFromPackages(pkgs, astLog) if err != nil { return nil, err } ret := &linter.Context{ - PkgProgram: pkgProg, - Cfg: cfg, - Program: prog, - SSAProgram: ssaProg, - LoaderConfig: loaderConfig, - ASTCache: astCache, - Log: log, + Packages: pkgs, + Program: prog, + SSAProgram: ssaProg, + LoaderConfig: &loader.Config{ + Cwd: "", // used by depguard and fallbacked to os.Getcwd + Build: nil, // used by depguard and megacheck and fallbacked to build.Default + }, + Cfg: cl.cfg, + ASTCache: astCache, + Log: cl.log, } if prog != nil { - separateNotCompilingPackages(ret) + saveNotCompilingPackages(ret) + } else { + for _, pkg := range pkgs { + if len(pkg.Errors) != 0 { + cl.log.Infof("Pkg %s errors: %v", pkg.ID, pkg.Errors) + } + } } return ret, nil } + +// saveNotCompilingPackages saves not compiling packages into separate slice: +// a lot of linters crash on such packages. Leave them only for those linters +// which can work with them. +func saveNotCompilingPackages(lintCtx *linter.Context) { + for _, pkg := range lintCtx.Packages { + if len(pkg.Errors) != 0 { + lintCtx.NotCompilingPackages = append(lintCtx.NotCompilingPackages, pkg) + } + } + + if len(lintCtx.NotCompilingPackages) != 0 { + lintCtx.Log.Infof("Not compiling packages: %+v", lintCtx.NotCompilingPackages) + } +} diff --git a/pkg/lint/load_test.go b/pkg/lint/load_test.go deleted file mode 100644 index b38a45baea2b..000000000000 --- a/pkg/lint/load_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package lint - -import ( - "testing" - - "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/golinters" - "github.com/golangci/golangci-lint/pkg/lint/astcache" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" - "github.com/stretchr/testify/assert" -) - -func TestASTCacheLoading(t *testing.T) { - linters := []linter.Config{ - linter.NewConfig(golinters.Errcheck{}).WithFullImport(), - } - - inputPaths := []string{"./...", "./", "./load.go", "load.go"} - log := logutils.NewStderrLog("") - for _, inputPath := range inputPaths { - r, err := packages.NewResolver(nil, nil, log) - assert.NoError(t, err) - - pkgProg, err := r.Resolve(inputPath) - assert.NoError(t, err) - - assert.NoError(t, err) - assert.NotEmpty(t, pkgProg.Files(true)) - - cfg := &config.Config{ - Run: config.Run{ - AnalyzeTests: true, - }, - } - prog, _, err := loadWholeAppIfNeeded(linters, cfg, pkgProg, logutils.NewStderrLog("")) - assert.NoError(t, err) - - astCacheFromProg, err := astcache.LoadFromProgram(prog, log) - assert.NoError(t, err) - - astCacheFromFiles, err := astcache.LoadFromFiles(pkgProg.Files(true), log) - assert.NoError(t, err) - - filesFromProg := astCacheFromProg.GetAllValidFiles() - filesFromFiles := astCacheFromFiles.GetAllValidFiles() - if len(filesFromProg) != len(filesFromFiles) { - t.Logf("files: %s", pkgProg.Files(true)) - t.Logf("from prog:") - for _, f := range filesFromProg { - t.Logf("%+v", *f) - } - t.Logf("from files:") - for _, f := range filesFromFiles { - t.Logf("%+v", *f) - } - t.Fatalf("lengths differ") - } - - if len(filesFromProg) != len(pkgProg.Files(true)) { - t.Fatalf("filesFromProg differ from path.Files") - } - } -} diff --git a/pkg/lint/runner.go b/pkg/lint/runner.go index 02e5862b4f8a..f67e7c601251 100644 --- a/pkg/lint/runner.go +++ b/pkg/lint/runner.go @@ -10,9 +10,11 @@ import ( "time" "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/lint/astcache" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/golangci-lint/pkg/result" "github.com/golangci/golangci-lint/pkg/result/processors" "github.com/golangci/golangci-lint/pkg/timeutils" @@ -23,7 +25,7 @@ type Runner struct { Log logutils.Log } -func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log) (*Runner, error) { +func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log, goenv *goutil.Env) (*Runner, error) { icfg := cfg.Issues excludePatterns := icfg.ExcludePatterns if icfg.UseDefaultExcludes { @@ -40,11 +42,19 @@ func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log) ( return nil, err } + skipDirs := append([]string{}, packages.StdExcludeDirRegexps...) + skipDirs = append(skipDirs, cfg.Run.SkipDirs...) + skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child("skip dirs"), cfg.Run.Args) + if err != nil { + return nil, err + } + return &Runner{ Processors: []processors.Processor{ processors.NewPathPrettifier(), // must be before diff, nolint and exclude autogenerated processor at least - processors.NewCgo(), + processors.NewCgo(goenv), skipFilesProcessor, + skipDirsProcessor, processors.NewAutogeneratedExclude(astCache), processors.NewExclude(excludeTotalPattern), @@ -56,6 +66,7 @@ func NewRunner(astCache *astcache.Cache, cfg *config.Config, log logutils.Log) ( processors.NewMaxSameIssues(icfg.MaxSameIssues, log.Child("max_same_issues")), processors.NewMaxFromLinter(icfg.MaxIssuesPerLinter, log.Child("max_from_linter")), processors.NewSourceCode(log.Child("source_code")), + processors.NewPathShortener(), }, Log: log, }, nil @@ -251,7 +262,7 @@ func (r Runner) Run(ctx context.Context, linters []linter.Config, lintCtx *linte finishedLintersN++ } - r.Log.Errorf("%d/%d linters finished: deadline exceeded: try increase it by passing --deadline option", + r.Log.Errorf("%d/%d linters finished: deadline exceeded", finishedLintersN, len(linters)) } diff --git a/pkg/logutils/stderr_log.go b/pkg/logutils/stderr_log.go index 0cf444884d77..ffccb11f3fd4 100644 --- a/pkg/logutils/stderr_log.go +++ b/pkg/logutils/stderr_log.go @@ -8,8 +8,6 @@ import ( "github.com/sirupsen/logrus" //nolint:depguard ) -var isTestRun = os.Getenv("GL_TEST_RUN") == "1" - type StderrLog struct { name string logger *logrus.Logger @@ -34,12 +32,6 @@ func NewStderrLog(name string) *StderrLog { return sl } -func exitIfTest() { - if isTestRun { - os.Exit(exitcodes.WarningInTest) - } -} - func (sl StderrLog) prefix() string { prefix := "" if sl.name != "" { @@ -71,7 +63,6 @@ func (sl StderrLog) Warnf(format string, args ...interface{}) { } sl.logger.Warnf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) - exitIfTest() } func (sl StderrLog) Infof(format string, args ...interface{}) { diff --git a/pkg/packages/exclude.go b/pkg/packages/exclude.go deleted file mode 100644 index be57ebd92fd4..000000000000 --- a/pkg/packages/exclude.go +++ /dev/null @@ -1,8 +0,0 @@ -package packages - -var StdExcludeDirRegexps = []string{ - "vendor$", "third_party$", - "testdata$", "examples$", - "Godeps$", - "builtin$", -} diff --git a/pkg/packages/package.go b/pkg/packages/package.go deleted file mode 100644 index faf16962cff6..000000000000 --- a/pkg/packages/package.go +++ /dev/null @@ -1,59 +0,0 @@ -package packages - -import ( - "go/build" - "path/filepath" - - "github.com/golangci/golangci-lint/pkg/goutils" -) - -type Package struct { - bp *build.Package - - isFake bool - dir string // dir != bp.dir only if isFake == true -} - -func (pkg *Package) Files(includeTest bool) []string { - var pkgFiles []string - for _, f := range pkg.bp.GoFiles { - if !goutils.IsCgoFilename(f) { - // skip cgo at all levels to prevent failures on file reading - pkgFiles = append(pkgFiles, f) - } - } - - // TODO: add cgo files - if includeTest { - pkgFiles = append(pkgFiles, pkg.TestFiles()...) - } - - for i, f := range pkgFiles { - pkgFiles[i] = filepath.Join(pkg.bp.Dir, f) - } - - return pkgFiles -} - -func (pkg *Package) Dir() string { - if pkg.dir != "" { // for fake packages - return pkg.dir - } - - return pkg.bp.Dir -} - -func (pkg *Package) IsTestOnly() bool { - return len(pkg.bp.GoFiles) == 0 -} - -func (pkg *Package) TestFiles() []string { - var pkgFiles []string - pkgFiles = append(pkgFiles, pkg.bp.TestGoFiles...) - pkgFiles = append(pkgFiles, pkg.bp.XTestGoFiles...) - return pkgFiles -} - -func (pkg *Package) BuildPackage() *build.Package { - return pkg.bp -} diff --git a/pkg/packages/program.go b/pkg/packages/program.go deleted file mode 100644 index 638a36e1f797..000000000000 --- a/pkg/packages/program.go +++ /dev/null @@ -1,71 +0,0 @@ -package packages - -import ( - "fmt" - "go/build" -) - -type Program struct { - packages []Package - - bctx build.Context -} - -func (p *Program) String() string { - files := p.Files(true) - if len(files) == 1 { - return files[0] - } - - return fmt.Sprintf("%s", p.Dirs()) -} - -func (p *Program) BuildContext() *build.Context { - return &p.bctx -} - -func (p Program) Packages() []Package { - return p.packages -} - -func (p *Program) addPackage(pkg *Package) { - packagesToAdd := []Package{*pkg} - if len(pkg.bp.XTestGoFiles) != 0 { - // create separate package because xtest files have different package name - xbp := build.Package{ - Dir: pkg.bp.Dir, - ImportPath: pkg.bp.ImportPath + "_test", - XTestGoFiles: pkg.bp.XTestGoFiles, - XTestImportPos: pkg.bp.XTestImportPos, - XTestImports: pkg.bp.XTestImports, - } - packagesToAdd = append(packagesToAdd, Package{ - bp: &xbp, - }) - pkg.bp.XTestGoFiles = nil - pkg.bp.XTestImportPos = nil - pkg.bp.XTestImports = nil - } - - p.packages = append(p.packages, packagesToAdd...) -} - -func (p *Program) Files(includeTest bool) []string { - var ret []string - for _, pkg := range p.packages { - ret = append(ret, pkg.Files(includeTest)...) - } - - return ret -} - -func (p *Program) Dirs() []string { - var ret []string - for _, pkg := range p.packages { - if !pkg.isFake { - ret = append(ret, pkg.Dir()) - } - } - - return ret -} diff --git a/pkg/packages/resolver.go b/pkg/packages/resolver.go deleted file mode 100644 index 29f8ad31ea0e..000000000000 --- a/pkg/packages/resolver.go +++ /dev/null @@ -1,230 +0,0 @@ -package packages - -import ( - "fmt" - "go/build" - "io/ioutil" - "os" - "path/filepath" - "regexp" - "strings" - "time" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" -) - -type Resolver struct { - excludeDirs map[string]*regexp.Regexp - buildTags []string - - skippedDirs []string - log logutils.Log - - wd string // working directory - importErrorsOccured int // count of errors because too bad files in packages -} - -func NewResolver(buildTags, excludeDirs []string, log logutils.Log) (*Resolver, error) { - excludeDirsMap := map[string]*regexp.Regexp{} - for _, dir := range excludeDirs { - re, err := regexp.Compile(dir) - if err != nil { - return nil, fmt.Errorf("can't compile regexp %q: %s", dir, err) - } - - excludeDirsMap[dir] = re - } - - wd, err := fsutils.Getwd() - if err != nil { - return nil, fmt.Errorf("can't get working dir: %s", err) - } - - return &Resolver{ - excludeDirs: excludeDirsMap, - buildTags: buildTags, - log: log, - wd: wd, - }, nil -} - -func (r Resolver) isIgnoredDir(dir string) bool { - cleanName := filepath.Clean(dir) - - dirName := filepath.Base(cleanName) - - // https://github.com/golang/dep/issues/298 - // https://github.com/tools/godep/issues/140 - if strings.HasPrefix(dirName, ".") && dirName != "." && dirName != ".." { - return true - } - if strings.HasPrefix(dirName, "_") { - return true - } - - for _, dirExludeRe := range r.excludeDirs { - if dirExludeRe.MatchString(cleanName) { - return true - } - } - - return false -} - -func (r *Resolver) resolveRecursively(root string, prog *Program) error { - // import root - if err := r.resolveDir(root, prog); err != nil { - return err - } - - fis, err := ioutil.ReadDir(root) - if err != nil { - return fmt.Errorf("can't read dir %s: %s", root, err) - } - // TODO: pass cached fis to build.Context - - for _, fi := range fis { - if !fi.IsDir() { - // ignore files: they were already imported by resolveDir(root) - continue - } - - subdir := filepath.Join(root, fi.Name()) - - // Normalize each subdir because working directory can be one of these subdirs: - // working dir = /app/subdir, resolve root is ../, without this normalization - // path of subdir will be "../subdir" but it must be ".". - // Normalize path before checking is ignored dir. - subdir, err := r.normalizePath(subdir) - if err != nil { - return err - } - - if r.isIgnoredDir(subdir) { - r.skippedDirs = append(r.skippedDirs, subdir) - continue - } - - if err := r.resolveRecursively(subdir, prog); err != nil { - return err - } - } - - return nil -} - -func (r *Resolver) resolveDir(dir string, prog *Program) error { - // TODO: fork build.Import to reuse AST parsing - bp, err := prog.bctx.ImportDir(dir, build.ImportComment|build.IgnoreVendor) - if err != nil { - if _, nogo := err.(*build.NoGoError); nogo { - // Don't complain if the failure is due to no Go source files. - return nil - } - - err = fmt.Errorf("can't import dir %q: %s", dir, err) - r.importErrorsOccured++ - if r.importErrorsOccured >= 10 { - return err - } - - r.log.Warnf("Can't analyze dir %q: %s", dir, err) - return nil - } - - pkg := Package{ - bp: bp, - } - prog.addPackage(&pkg) - return nil -} - -func (r Resolver) addFakePackage(filePath string, prog *Program) { - // Don't take build tags, is it test file or not, etc - // into account. If user explicitly wants to analyze this file - // do it. - p := Package{ - bp: &build.Package{ - // TODO: detect is it test file or not: without that we can't analyze only one test file - GoFiles: []string{filePath}, - }, - isFake: true, - dir: filepath.Dir(filePath), - } - prog.addPackage(&p) -} - -func (r Resolver) Resolve(paths ...string) (prog *Program, err error) { - startedAt := time.Now() - defer func() { - r.log.Infof("Paths resolving took %s: %s", time.Since(startedAt), prog) - }() - - if len(paths) == 0 { - return nil, fmt.Errorf("no paths are set") - } - - bctx := build.Default - bctx.BuildTags = append(bctx.BuildTags, r.buildTags...) - prog = &Program{ - bctx: bctx, - } - - for _, path := range paths { - if err := r.resolvePath(path, prog); err != nil { - return nil, err - } - } - - if len(r.skippedDirs) != 0 { - r.log.Infof("Skipped dirs: %s", r.skippedDirs) - } - - return prog, nil -} - -func (r *Resolver) normalizePath(path string) (string, error) { - return fsutils.ShortestRelPath(path, r.wd) -} - -func (r *Resolver) resolvePath(path string, prog *Program) error { - needRecursive := strings.HasSuffix(path, "/...") - if needRecursive { - path = filepath.Dir(path) - } - - evalPath, err := filepath.EvalSymlinks(path) - if err != nil { - return fmt.Errorf("can't eval symlinks for path %s: %s", path, err) - } - path = evalPath - - path, err = r.normalizePath(path) - if err != nil { - return err - } - - if needRecursive { - if err = r.resolveRecursively(path, prog); err != nil { - return fmt.Errorf("can't recursively resolve %s: %s", path, err) - } - - return nil - } - - fi, err := os.Stat(path) - if err != nil { - return fmt.Errorf("can't find path %s: %s", path, err) - } - - if fi.IsDir() { - if err := r.resolveDir(path, prog); err != nil { - return fmt.Errorf("can't resolve dir %s: %s", path, err) - } - return nil - } - - r.addFakePackage(path, prog) - return nil -} diff --git a/pkg/packages/resolver_test.go b/pkg/packages/resolver_test.go deleted file mode 100644 index 54e96f1707b4..000000000000 --- a/pkg/packages/resolver_test.go +++ /dev/null @@ -1,284 +0,0 @@ -package packages_test - -import ( - "io/ioutil" - "os" - "path/filepath" - "sort" - "strings" - "testing" - - "github.com/golangci/golangci-lint/pkg/fsutils" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/packages" - "github.com/stretchr/testify/assert" -) - -type fsPreparer struct { - t *testing.T - root string - prevWD string -} - -func (fp fsPreparer) clean() { - err := os.Chdir(fp.prevWD) - assert.NoError(fp.t, err) - - err = os.RemoveAll(fp.root) - assert.NoError(fp.t, err) -} - -func prepareFS(t *testing.T, paths ...string) *fsPreparer { - root, err := ioutil.TempDir("/tmp", "golangci.test.path_resolver") - assert.NoError(t, err) - - prevWD, err := fsutils.Getwd() - assert.NoError(t, err) - - err = os.Chdir(root) - assert.NoError(t, err) - - for _, p := range paths { - err = os.MkdirAll(filepath.Dir(p), os.ModePerm) - assert.NoError(t, err) - - if strings.HasSuffix(p, "/") { - continue - } - - goFile := "package p\n" - err = ioutil.WriteFile(p, []byte(goFile), os.ModePerm) - assert.NoError(t, err) - } - - return &fsPreparer{ - root: root, - t: t, - prevWD: prevWD, - } -} - -func newTestResolver(t *testing.T, excludeDirs []string) *packages.Resolver { - r, err := packages.NewResolver(nil, excludeDirs, logutils.NewStderrLog("")) - assert.NoError(t, err) - - return r -} - -func TestPathResolverNotExistingPath(t *testing.T) { - fp := prepareFS(t) - defer fp.clean() - - _, err := newTestResolver(t, nil).Resolve("a") - assert.EqualError(t, err, "can't eval symlinks for path a: lstat a: no such file or directory") -} - -func TestPathResolverCommonCases(t *testing.T) { - type testCase struct { - name string - prepare []string - resolve []string - expFiles []string - expDirs []string - includeTests bool - } - - testCases := []testCase{ - { - name: "empty root recursively", - resolve: []string{"./..."}, - }, - { - name: "empty root", - resolve: []string{"./"}, - }, - { - name: "vendor is excluded recursively", - prepare: []string{"vendor/a/b.go"}, - resolve: []string{"./..."}, - }, - { - name: "vendor is excluded", - prepare: []string{"vendor/a.go"}, - resolve: []string{"./..."}, - }, - { - name: "nested vendor is excluded", - prepare: []string{"d/vendor/a.go"}, - resolve: []string{"./..."}, - }, - { - name: "vendor dir is excluded by regexp, not the exact match", - prepare: []string{"vendors/a.go", "novendor/b.go"}, - resolve: []string{"./..."}, - expDirs: []string{"vendors"}, - expFiles: []string{"vendors/a.go"}, - }, - { - name: "vendor explicitly resolved", - prepare: []string{"vendor/a.go"}, - resolve: []string{"./vendor"}, - expDirs: []string{"vendor"}, - expFiles: []string{"vendor/a.go"}, - }, - { - name: "nested vendor explicitly resolved", - prepare: []string{"d/vendor/a.go"}, - resolve: []string{"d/vendor"}, - expDirs: []string{"d/vendor"}, - expFiles: []string{"d/vendor/a.go"}, - }, - { - name: "extensions filter recursively", - prepare: []string{"a/b.go", "a/c.txt", "d.go", "e.csv"}, - resolve: []string{"./..."}, - expDirs: []string{".", "a"}, - expFiles: []string{"a/b.go", "d.go"}, - }, - { - name: "extensions filter", - prepare: []string{"a/b.go", "a/c.txt", "d.go"}, - resolve: []string{"a"}, - expDirs: []string{"a"}, - expFiles: []string{"a/b.go"}, - }, - { - name: "one level dirs exclusion", - prepare: []string{"a/b/d.go", "a/c.go"}, - resolve: []string{"./a"}, - expDirs: []string{"a"}, - expFiles: []string{"a/c.go"}, - }, - { - name: "explicitly resolved files", - prepare: []string{"a/b/c.go", "a/d.txt"}, - resolve: []string{"./a/...", "a/d.txt"}, - expDirs: []string{"a/b"}, - expFiles: []string{"a/b/c.go", "a/d.txt"}, - }, - { - name: ".* dotfiles are always ignored", - prepare: []string{".git/a.go", ".circleci/b.go"}, - resolve: []string{"./..."}, - }, - { - name: "exclude dirs on any depth level", - prepare: []string{"ok/.git/a.go", "ok/b.go"}, - resolve: []string{"./..."}, - expDirs: []string{"ok"}, - expFiles: []string{"ok/b.go"}, - }, - { - name: "exclude path, not name", - prepare: []string{"ex/clude/me/a.go", "c/d.go"}, - resolve: []string{"./..."}, - expDirs: []string{"c"}, - expFiles: []string{"c/d.go"}, - }, - { - name: "exclude partial path", - prepare: []string{"prefix/ex/clude/me/a.go", "prefix/ex/clude/me/subdir/c.go", "prefix/b.go"}, - resolve: []string{"./..."}, - expDirs: []string{"prefix"}, - expFiles: []string{"prefix/b.go"}, - }, - { - name: "don't exclude file instead of dir", - prepare: []string{"a/exclude.go"}, - resolve: []string{"a"}, - expDirs: []string{"a"}, - expFiles: []string{"a/exclude.go"}, - }, - { - name: "don't exclude file instead of dir: check dir is excluded", - prepare: []string{"a/exclude.go/b.go"}, - resolve: []string{"a/..."}, - }, - { - name: "ignore _*", - prepare: []string{"_any/a.go"}, - resolve: []string{"./..."}, - }, - { - name: "include tests", - prepare: []string{"a/b.go", "a/b_test.go"}, - resolve: []string{"./..."}, - expDirs: []string{"a"}, - expFiles: []string{"a/b.go", "a/b_test.go"}, - includeTests: true, - }, - { - name: "exclude tests", - prepare: []string{"a/b.go", "a/b_test.go"}, - resolve: []string{"./..."}, - expDirs: []string{"a"}, - expFiles: []string{"a/b.go"}, - }, - { - name: "exclude tests except explicitly set", - prepare: []string{"a/b.go", "a/b_test.go", "a/c_test.go"}, - resolve: []string{"./...", "a/c_test.go"}, - expDirs: []string{"a"}, - expFiles: []string{"a/b.go", "a/c_test.go"}, - }, - { - name: "exclude dirs with no go files", - prepare: []string{"a/b.txt", "a/c/d.go"}, - resolve: []string{"./..."}, - expDirs: []string{"a/c"}, - expFiles: []string{"a/c/d.go"}, - }, - { - name: "exclude dirs with no go files with root dir", - prepare: []string{"a/b.txt", "a/c/d.go", "e.go"}, - resolve: []string{"./..."}, - expDirs: []string{".", "a/c"}, - expFiles: []string{"a/c/d.go", "e.go"}, - }, - { - name: "resolve absolute paths", - prepare: []string{"a/b.go", "a/c.txt", "d.go", "e.csv"}, - resolve: []string{"${CWD}/..."}, - expDirs: []string{".", "a"}, - expFiles: []string{"a/b.go", "d.go"}, - }, - } - - fsutils.UseWdCache(false) - for _, tc := range testCases { - t.Run(tc.name, func(t *testing.T) { - fp := prepareFS(t, tc.prepare...) - defer fp.clean() - - for i, rp := range tc.resolve { - tc.resolve[i] = strings.Replace(rp, "${CWD}", fp.root, -1) - } - - r := newTestResolver(t, []string{"vendor$", "ex/clude/me", "exclude"}) - - prog, err := r.Resolve(tc.resolve...) - assert.NoError(t, err) - assert.NotNil(t, prog) - - progFiles := prog.Files(tc.includeTests) - sort.StringSlice(progFiles).Sort() - sort.StringSlice(tc.expFiles).Sort() - - progDirs := prog.Dirs() - sort.StringSlice(progDirs).Sort() - sort.StringSlice(tc.expDirs).Sort() - - if tc.expFiles == nil { - assert.Empty(t, progFiles) - } else { - assert.Equal(t, tc.expFiles, progFiles, "files") - } - - if tc.expDirs == nil { - assert.Empty(t, progDirs) - } else { - assert.Equal(t, tc.expDirs, progDirs, "dirs") - } - }) - } -} diff --git a/pkg/packages/skip.go b/pkg/packages/skip.go new file mode 100644 index 000000000000..d9f54effa2af --- /dev/null +++ b/pkg/packages/skip.go @@ -0,0 +1,8 @@ +package packages + +var StdExcludeDirRegexps = []string{ + "^vendor$", "^third_party$", + "^testdata$", "^examples$", + "^Godeps$", + "^builtin$", +} diff --git a/pkg/result/processors/autogenerated_exclude.go b/pkg/result/processors/autogenerated_exclude.go index ac5bdc6d8cae..6e6db8d5102e 100644 --- a/pkg/result/processors/autogenerated_exclude.go +++ b/pkg/result/processors/autogenerated_exclude.go @@ -83,7 +83,7 @@ func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFile fs = &ageFileSummary{} p.fileSummaryCache[i.FilePath()] = fs - f := p.astCache.GetOrParse(i.FilePath()) + f := p.astCache.GetOrParse(i.FilePath(), nil) if f.Err != nil { return nil, fmt.Errorf("can't parse file %s: %s", i.FilePath(), f.Err) } diff --git a/pkg/result/processors/autogenerated_exclude_test.go b/pkg/result/processors/autogenerated_exclude_test.go index 53a25dd69bb7..cd86befe5af7 100644 --- a/pkg/result/processors/autogenerated_exclude_test.go +++ b/pkg/result/processors/autogenerated_exclude_test.go @@ -57,7 +57,10 @@ func TestIsAutogeneratedDetection(t *testing.T) { assert.True(t, isGenerated) } - notGeneratedCases := []string{"code not generated by", "test"} + notGeneratedCases := []string{ + "code not generated by", + "test", + } for _, ngc := range notGeneratedCases { isGenerated := isGeneratedFileByComment(ngc) assert.False(t, isGenerated) diff --git a/pkg/result/processors/cgo.go b/pkg/result/processors/cgo.go index c3cae42e7764..1095bef320c8 100644 --- a/pkg/result/processors/cgo.go +++ b/pkg/result/processors/cgo.go @@ -1,17 +1,24 @@ package processors import ( - "github.com/golangci/golangci-lint/pkg/goutils" + "path/filepath" + "strings" + + "github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/result" + "github.com/pkg/errors" ) type Cgo struct { + goCacheDir string } var _ Processor = Cgo{} -func NewCgo() *Cgo { - return &Cgo{} +func NewCgo(goenv *goutil.Env) *Cgo { + return &Cgo{ + goCacheDir: goenv.Get("GOCACHE"), + } } func (p Cgo) Name() string { @@ -19,11 +26,32 @@ func (p Cgo) Name() string { } func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) { - return filterIssues(issues, func(i *result.Issue) bool { + return filterIssuesErr(issues, func(i *result.Issue) (bool, error) { // some linters (.e.g gosec, deadcode) return incorrect filepaths for cgo issues, - // it breaks next processing, so skip them - return !goutils.IsCgoFilename(i.FilePath()) - }), nil + // also cgo files have strange issues looking like false positives. + + // cache dir contains all preprocessed files including cgo files + + issueFilePath := i.FilePath() + if !filepath.IsAbs(i.FilePath()) { + absPath, err := filepath.Abs(i.FilePath()) + if err != nil { + return false, errors.Wrapf(err, "failed to build abs path for %q", i.FilePath()) + } + issueFilePath = absPath + } + + if strings.HasPrefix(issueFilePath, p.goCacheDir) { + return false, nil + } + + if filepath.Base(i.FilePath()) == "_cgo_gotypes.go" { + // skip cgo warning for go1.10 + return false, nil + } + + return true, nil + }) } func (Cgo) Finish() {} diff --git a/pkg/result/processors/nolint.go b/pkg/result/processors/nolint.go index fde3fd78a8af..87c8702b7479 100644 --- a/pkg/result/processors/nolint.go +++ b/pkg/result/processors/nolint.go @@ -83,7 +83,7 @@ func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) { fd = &fileData{} p.cache[i.FilePath()] = fd - file := p.astCache.GetOrParse(i.FilePath()) + file := p.astCache.GetOrParse(i.FilePath(), nil) if file.Err != nil { return nil, fmt.Errorf("can't parse file %s: %s", i.FilePath(), file.Err) } diff --git a/pkg/result/processors/path_shortener.go b/pkg/result/processors/path_shortener.go new file mode 100644 index 000000000000..484f7f1f115c --- /dev/null +++ b/pkg/result/processors/path_shortener.go @@ -0,0 +1,40 @@ +package processors + +import ( + "fmt" + "strings" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type PathShortener struct { + wd string +} + +var _ Processor = PathShortener{} + +func NewPathShortener() *PathShortener { + wd, err := fsutils.Getwd() + if err != nil { + panic(fmt.Sprintf("Can't get working dir: %s", err)) + } + return &PathShortener{ + wd: wd, + } +} + +func (p PathShortener) Name() string { + return "path_shortener" +} + +func (p PathShortener) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + newI := i + newI.Text = strings.Replace(newI.Text, p.wd+"/", "", -1) + newI.Text = strings.Replace(newI.Text, p.wd, "", -1) + return newI + }), nil +} + +func (p PathShortener) Finish() {} diff --git a/pkg/result/processors/skip_dirs.go b/pkg/result/processors/skip_dirs.go new file mode 100644 index 000000000000..2728d5af191f --- /dev/null +++ b/pkg/result/processors/skip_dirs.go @@ -0,0 +1,137 @@ +package processors + +import ( + "path/filepath" + "regexp" + "sort" + "strings" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" + "github.com/pkg/errors" +) + +type SkipDirs struct { + patterns []*regexp.Regexp + log logutils.Log + skippedDirs map[string]bool + sortedAbsArgs []string +} + +var _ Processor = SkipFiles{} + +type sortedByLenStrings []string + +func (s sortedByLenStrings) Len() int { return len(s) } +func (s sortedByLenStrings) Less(i, j int) bool { return len(s[i]) > len(s[j]) } +func (s sortedByLenStrings) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDirs, error) { + var patternsRe []*regexp.Regexp + for _, p := range patterns { + patternRe, err := regexp.Compile(p) + if err != nil { + return nil, errors.Wrapf(err, "can't compile regexp %q", p) + } + patternsRe = append(patternsRe, patternRe) + } + + if len(runArgs) == 0 { + runArgs = append(runArgs, "./...") + } + var sortedAbsArgs []string + for _, arg := range runArgs { + if filepath.Base(arg) == "..." { + arg = filepath.Dir(arg) + } + absArg, err := filepath.Abs(arg) + if err != nil { + return nil, errors.Wrapf(err, "failed to abs-ify arg %q", arg) + } + sortedAbsArgs = append(sortedAbsArgs, absArg) + } + sort.Sort(sortedByLenStrings(sortedAbsArgs)) + log.Infof("sorted abs args: %s", sortedAbsArgs) + + return &SkipDirs{ + patterns: patternsRe, + log: log, + skippedDirs: map[string]bool{}, + sortedAbsArgs: sortedAbsArgs, + }, nil +} + +func (p SkipDirs) Name() string { + return "skip_dirs" +} + +func (p *SkipDirs) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.patterns) == 0 { + return issues, nil + } + + return filterIssues(issues, p.shouldPassIssue), nil +} + +func (p *SkipDirs) getLongestArgRelativeIssuePath(i *result.Issue) (string, string) { + issueAbsPath, err := filepath.Abs(i.FilePath()) + if err != nil { + p.log.Warnf("Can't abs-ify path %q: %s", i.FilePath(), err) + return "", "" + } + + for _, arg := range p.sortedAbsArgs { + if !strings.HasPrefix(issueAbsPath, arg) { + continue + } + + relPath := strings.TrimPrefix(issueAbsPath, arg) + relPath = strings.TrimPrefix(relPath, string(filepath.Separator)) + return relPath, arg + } + + p.log.Infof("Issue path %q isn't relative to any of run args", i.FilePath()) + return "", "" +} + +func (p *SkipDirs) shouldPassIssue(i *result.Issue) bool { + relIssuePath, issueArg := p.getLongestArgRelativeIssuePath(i) + if relIssuePath == "" { + return true + } + + if strings.HasSuffix(filepath.Base(relIssuePath), ".go") { + relIssuePath = filepath.Dir(relIssuePath) + } + + relIssueDirParts := strings.Split(relIssuePath, string(filepath.Separator)) + + for _, pattern := range p.patterns { + skippedDir := issueArg + for _, part := range relIssueDirParts { + skippedDir = filepath.Join(skippedDir, part) + if pattern.MatchString(part) { + relSkippedDir, err := fsutils.ShortestRelPath(skippedDir, "") + if err != nil { + p.log.Warnf("Can't construct short relative path for %q: %s", skippedDir, err) + return true + } + p.skippedDirs[relSkippedDir] = true + return false + } + } + } + + return true +} + +func (p SkipDirs) Finish() { + if len(p.skippedDirs) != 0 { + var skippedDirs []string + for dir := range p.skippedDirs { + skippedDirs = append(skippedDirs, dir) + } + p.log.Infof("Skipped dirs: %s", skippedDirs) + } +} diff --git a/test/linters_test.go b/test/linters_test.go index 8fdb0770abb9..5d14e357f135 100644 --- a/test/linters_test.go +++ b/test/linters_test.go @@ -10,7 +10,7 @@ import ( "testing" "github.com/golangci/golangci-lint/pkg/exitcodes" - "github.com/stretchr/testify/assert" + assert "github.com/stretchr/testify/require" ) func runGoErrchk(c *exec.Cmd, t *testing.T) { diff --git a/test/run_test.go b/test/run_test.go index ba5e5a127d6c..393c4d44d076 100644 --- a/test/run_test.go +++ b/test/run_test.go @@ -15,7 +15,7 @@ import ( "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/lint/lintersdb" - "github.com/stretchr/testify/assert" + assert "github.com/stretchr/testify/require" ) var root = filepath.Join("..", "...") @@ -36,8 +36,8 @@ func checkNoIssuesRun(t *testing.T, out string, exitCode int) { } func TestNoCongratsMessage(t *testing.T) { - out, exitCode := runGolangciLint(t, "../...") - assert.Equal(t, exitcodes.Success, exitCode) + out, exitCode := runGolangciLint(t, filepath.Join("..", "...")) + assert.Equal(t, exitcodes.Success, exitCode, out) assert.Equal(t, "", out) } @@ -51,26 +51,27 @@ func TestAutogeneratedNoIssues(t *testing.T) { checkNoIssuesRun(t, out, exitCode) } -func TestSymlinkLoop(t *testing.T) { - out, exitCode := runGolangciLint(t, filepath.Join(testdataDir, "symlink_loop", "...")) - checkNoIssuesRun(t, out, exitCode) +func TestEmptyDirRun(t *testing.T) { + out, exitCode := runGolangciLint(t, filepath.Join(testdataDir, "nogofiles")) + assert.Equal(t, exitcodes.NoGoFiles, exitCode) + assert.Contains(t, out, ": no go files to analyze") } -func TestRunOnAbsPath(t *testing.T) { - absPath, err := filepath.Abs(filepath.Join(testdataDir, "..")) - assert.NoError(t, err) - - out, exitCode := runGolangciLint(t, "--no-config", "--fast", absPath) - checkNoIssuesRun(t, out, exitCode) +func TestNotExistingDirRun(t *testing.T) { + out, exitCode := runGolangciLint(t, filepath.Join(testdataDir, "no_such_dir")) + assert.True(t, exitCode == exitcodes.WarningInTest || exitCode == exitcodes.IssuesFound) + assert.Contains(t, out, `cannot find package \"./testdata/no_such_dir\"`) +} - out, exitCode = runGolangciLint(t, "--no-config", absPath) +func TestSymlinkLoop(t *testing.T) { + out, exitCode := runGolangciLint(t, filepath.Join(testdataDir, "symlink_loop", "...")) checkNoIssuesRun(t, out, exitCode) } func TestDeadline(t *testing.T) { out, exitCode := runGolangciLint(t, "--deadline=1ms", root) assert.Equal(t, exitcodes.Timeout, exitCode) - assert.Contains(t, out, "deadline exceeded: try increase it by passing --deadline option") + assert.Contains(t, strings.ToLower(out), "deadline exceeded: try increase it by passing --deadline option") } func runGolangciLint(t *testing.T, args ...string) (string, int) { @@ -125,8 +126,9 @@ func runGolangciLintWithYamlConfigWithCode(t *testing.T, cfg string, args ...str } func TestTestsAreLintedByDefault(t *testing.T) { - out, exitCode := runGolangciLint(t, "./testdata/withtests") - assert.Equal(t, exitcodes.Success, exitCode, out) + out, exitCode := runGolangciLint(t, filepath.Join(testdataDir, "withtests")) + assert.Equal(t, exitcodes.IssuesFound, exitCode) + assert.Contains(t, out, "if block ends with a return") } func TestCgoOk(t *testing.T) { @@ -134,11 +136,26 @@ func TestCgoOk(t *testing.T) { checkNoIssuesRun(t, out, exitCode) } +func TestCgoWithIssues(t *testing.T) { + out, exitCode := runGolangciLint(t, "--enable-all", filepath.Join(testdataDir, "cgo_with_issues")) + assert.Equal(t, exitcodes.IssuesFound, exitCode) + assert.Contains(t, out, "Printf format %t has arg cs of wrong type") +} + func TestUnsafeOk(t *testing.T) { out, exitCode := runGolangciLint(t, "--enable-all", filepath.Join(testdataDir, "unsafe")) checkNoIssuesRun(t, out, exitCode) } +func TestSkippedDirs(t *testing.T) { + out, exitCode := runGolangciLint(t, "--print-issued-lines=false", "--no-config", "--skip-dirs", "skip_me", "-Egolint", + filepath.Join(testdataDir, "skipdirs", "...")) + assert.Equal(t, exitcodes.IssuesFound, exitCode) + assert.Equal(t, out, + "testdata/skipdirs/examples_no_skip/with_issue.go:8:9: if block ends with "+ + "a return statement, so drop this else and outdent its block (golint)\n") +} + func TestDeadcodeNoFalsePositivesInMainPkg(t *testing.T) { out, exitCode := runGolangciLint(t, "--no-config", "--disable-all", "-Edeadcode", filepath.Join(testdataDir, "deadcode_main_pkg")) @@ -173,7 +190,7 @@ func getEnabledByDefaultFastLintersExcept(except ...string) []string { ebdl := m.GetAllEnabledByDefaultLinters() ret := []string{} for _, lc := range ebdl { - if lc.DoesFullImport { + if lc.NeedsSSARepr { continue } @@ -189,7 +206,7 @@ func getAllFastLintersWith(with ...string) []string { linters := lintersdb.NewManager().GetAllSupportedLinterConfigs() ret := append([]string{}, with...) for _, lc := range linters { - if lc.DoesFullImport { + if lc.NeedsSSARepr { continue } ret = append(ret, lc.Name()) @@ -212,7 +229,7 @@ func getEnabledByDefaultFastLintersWith(with ...string) []string { ebdl := lintersdb.NewManager().GetAllEnabledByDefaultLinters() ret := append([]string{}, with...) for _, lc := range ebdl { - if lc.DoesFullImport { + if lc.NeedsSSARepr { continue } @@ -337,8 +354,8 @@ func TestEnabledLinters(t *testing.T) { }, { name: "fast option combined with enable and enable-all", - args: "--enable-all --fast --enable=typecheck", - el: getAllFastLintersWith("typecheck"), + args: "--enable-all --fast --enable=megacheck", + el: getAllFastLintersWith("megacheck"), noImplicitFast: true, }, } @@ -361,15 +378,6 @@ func TestEnabledLinters(t *testing.T) { } } -func TestGovetInFastMode(t *testing.T) { - cfg := ` - linters-settings: - use-installed-packages: true - ` - out := runGolangciLintWithYamlConfig(t, cfg, "--fast", "-Egovet", root) - assert.Equal(t, noIssuesOut, out) -} - func TestEnabledPresetsAreNotDuplicated(t *testing.T) { out, _ := runGolangciLint(t, "--no-config", "-v", "-p", "style,bugs") assert.Contains(t, out, "Active presets: [bugs style]") diff --git a/test/testdata/cgo/main.go b/test/testdata/cgo/main.go index 832bbbcc2e1c..cbb692fe7068 100644 --- a/test/testdata/cgo/main.go +++ b/test/testdata/cgo/main.go @@ -10,7 +10,9 @@ void myprint(char* s) { */ import "C" -import "unsafe" +import ( + "unsafe" +) func Example() { cs := C.CString("Hello from stdio\n") diff --git a/test/testdata/cgo_with_issues/main.go b/test/testdata/cgo_with_issues/main.go new file mode 100644 index 000000000000..6eab19a25e18 --- /dev/null +++ b/test/testdata/cgo_with_issues/main.go @@ -0,0 +1,23 @@ +package cgoexample + +/* +#include +#include + +void myprint(char* s) { + printf("%s\n", s); +} +*/ +import "C" + +import ( + "fmt" + "unsafe" +) + +func Example() { + cs := C.CString("Hello from stdio\n") + C.myprint(cs) + fmt.Printf("bad format %t", cs) + C.free(unsafe.Pointer(cs)) +} diff --git a/test/testdata/nogofiles/test.txt b/test/testdata/nogofiles/test.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/testdata/skipdirs/examples/with_issue.go b/test/testdata/skipdirs/examples/with_issue.go new file mode 100644 index 000000000000..e82aec73fdcd --- /dev/null +++ b/test/testdata/skipdirs/examples/with_issue.go @@ -0,0 +1,11 @@ +package main + +import "fmt" + +func main() { + if true { + return + } else { + fmt.Printf("") + } +} diff --git a/test/testdata/skipdirs/examples_no_skip/with_issue.go b/test/testdata/skipdirs/examples_no_skip/with_issue.go new file mode 100644 index 000000000000..e82aec73fdcd --- /dev/null +++ b/test/testdata/skipdirs/examples_no_skip/with_issue.go @@ -0,0 +1,11 @@ +package main + +import "fmt" + +func main() { + if true { + return + } else { + fmt.Printf("") + } +} diff --git a/test/testdata/skipdirs/skip_me/nested/with_issue.go b/test/testdata/skipdirs/skip_me/nested/with_issue.go new file mode 100644 index 000000000000..e82aec73fdcd --- /dev/null +++ b/test/testdata/skipdirs/skip_me/nested/with_issue.go @@ -0,0 +1,11 @@ +package main + +import "fmt" + +func main() { + if true { + return + } else { + fmt.Printf("") + } +} diff --git a/test/testdata/withtests/p.go b/test/testdata/withtests/p.go index ce0e6b49d3ef..be933f30a02d 100644 --- a/test/testdata/withtests/p.go +++ b/test/testdata/withtests/p.go @@ -2,22 +2,6 @@ package withtests import "fmt" -var varUsedOnlyInTests bool - -func usedOnlyInTests() {} - -type someType struct { - fieldUsedOnlyInTests bool - fieldUsedHere bool -} - -func usedHere() { - v := someType{ - fieldUsedHere: true, - } - fmt.Println(v) -} - func init() { - usedHere() + fmt.Printf("init") } diff --git a/test/testdata/withtests/p_test.go b/test/testdata/withtests/p_test.go index eaaba6a093d2..a238e6e9c6e6 100644 --- a/test/testdata/withtests/p_test.go +++ b/test/testdata/withtests/p_test.go @@ -6,9 +6,9 @@ import ( ) func TestSomething(t *testing.T) { - v := someType{ - fieldUsedOnlyInTests: true, + if true { + return + } else { + fmt.Printf("test") } - fmt.Println(v, varUsedOnlyInTests) - usedOnlyInTests() } diff --git a/vendor/github.com/golangci/tools/go/ssa/builder.go b/vendor/github.com/golangci/tools/go/ssa/builder.go index bfb7a2b76b85..602b27188e21 100644 --- a/vendor/github.com/golangci/tools/go/ssa/builder.go +++ b/vendor/github.com/golangci/tools/go/ssa/builder.go @@ -58,7 +58,7 @@ var ( tString = types.Typ[types.String] tUntypedNil = types.Typ[types.UntypedNil] tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators - tEface = new(types.Interface) + tEface = types.NewInterface(nil, nil).Complete() // SSA Value constants. vZero = intConst(0) @@ -154,7 +154,7 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { // All edges from e.X to done carry the short-circuit value. var edges []Value - for _ = range done.Preds { + for range done.Preds { edges = append(edges, short) } @@ -2263,10 +2263,6 @@ func (p *Package) build() { if p.info == nil { return // synthetic package, e.g. "testmain" } - if p.files == nil { - p.info = nil - return // package loaded from export data - } // Ensure we have runtime type info for all exported members. // TODO(adonovan): ideally belongs in memberFromObject, but diff --git a/vendor/github.com/golangci/tools/go/ssa/const.go b/vendor/github.com/golangci/tools/go/ssa/const.go index 8b6dbabf581c..4d0a707f5286 100644 --- a/vendor/github.com/golangci/tools/go/ssa/const.go +++ b/vendor/github.com/golangci/tools/go/ssa/const.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.6 - package ssa // This file defines the Const SSA value type. diff --git a/vendor/github.com/golangci/tools/go/ssa/emit.go b/vendor/github.com/golangci/tools/go/ssa/emit.go index 400da2122639..1036988adcb8 100644 --- a/vendor/github.com/golangci/tools/go/ssa/emit.go +++ b/vendor/github.com/golangci/tools/go/ssa/emit.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // Helpers for emitting SSA instructions. @@ -267,11 +265,6 @@ func emitJump(f *Function, target *BasicBlock) { f.currentBlock = nil } -func (b *BasicBlock) emitJump(target *BasicBlock) { - b.emit(new(Jump)) - addEdge(b, target) -} - // emitIf emits to f a conditional jump to tblock or fblock based on // cond, and updates the control-flow graph. // Postcondition: f.currentBlock is nil. diff --git a/vendor/github.com/golangci/tools/go/ssa/func.go b/vendor/github.com/golangci/tools/go/ssa/func.go index 86a3da74dab5..53635ba0114e 100644 --- a/vendor/github.com/golangci/tools/go/ssa/func.go +++ b/vendor/github.com/golangci/tools/go/ssa/func.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file implements the Function and BasicBlock types. diff --git a/vendor/github.com/golangci/tools/go/ssa/lift.go b/vendor/github.com/golangci/tools/go/ssa/lift.go index 02707970e52c..048e9b03260b 100644 --- a/vendor/github.com/golangci/tools/go/ssa/lift.go +++ b/vendor/github.com/golangci/tools/go/ssa/lift.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file defines the lifting pass which tries to "lift" Alloc @@ -38,9 +36,6 @@ package ssa // Consider exploiting liveness information to avoid creating dead // φ-nodes which we then immediately remove. // -// Integrate lifting with scalar replacement of aggregates (SRA) since -// the two are synergistic. -// // Also see many other "TODO: opt" suggestions in the code. import ( @@ -51,8 +46,8 @@ import ( "os" ) -// If true, perform sanity checking and show diagnostic information at -// each step of lifting. Very verbose. +// If true, show diagnostic information at each step of lifting. +// Very verbose. const debugLifting = false // domFrontier maps each block to the set of blocks in its dominance @@ -109,10 +104,6 @@ func buildDomFrontier(fn *Function) domFrontier { return df } -func RemoveInstr(refs []Instruction, instr Instruction) []Instruction { - return removeInstr(refs, instr) -} - func removeInstr(refs []Instruction, instr Instruction) []Instruction { i := 0 for _, ref := range refs { @@ -128,7 +119,7 @@ func removeInstr(refs []Instruction, instr Instruction) []Instruction { return refs[:i] } -// lift attempts to replace local and new Allocs accessed only with +// lift replaces local and new Allocs accessed only with // load/store by SSA registers, inserting φ-nodes where necessary. // The result is a program in classical pruned SSA form. // @@ -184,6 +175,11 @@ func lift(fn *Function) { // instructions. usesDefer := false + // A counter used to generate ~unique ids for Phi nodes, as an + // aid to debugging. We use large numbers to make them highly + // visible. All nodes are renumbered later. + fresh := 1000 + // Determine which allocs we can lift and number them densely. // The renaming phase uses this numbering for compact maps. numAllocs := 0 @@ -194,7 +190,7 @@ func lift(fn *Function) { switch instr := instr.(type) { case *Alloc: index := -1 - if liftAlloc(df, instr, newPhis) { + if liftAlloc(df, instr, newPhis, &fresh) { index = numAllocs numAllocs++ } @@ -217,29 +213,13 @@ func lift(fn *Function) { // Renaming. rename(fn.Blocks[0], renaming, newPhis) - // Eliminate dead new phis, then prepend the live ones to each block. - for _, b := range fn.Blocks { + // Eliminate dead φ-nodes. + removeDeadPhis(fn.Blocks, newPhis) - // Compress the newPhis slice to eliminate unused phis. - // TODO(adonovan): opt: compute liveness to avoid - // placing phis in blocks for which the alloc cell is - // not live. + // Prepend remaining live φ-nodes to each block. + for _, b := range fn.Blocks { nps := newPhis[b] - j := 0 - for _, np := range nps { - if !phiIsLive(np.phi) { - // discard it, first removing it from referrers - for _, newval := range np.phi.Edges { - if refs := newval.Referrers(); refs != nil { - *refs = removeInstr(*refs, np.phi) - } - } - continue - } - nps[j] = np - j++ - } - nps = nps[:j] + j := len(nps) rundefersToKill := b.rundefers if usesDefer { @@ -251,8 +231,8 @@ func lift(fn *Function) { } // Compact nps + non-nil Instrs into a new slice. - // TODO(adonovan): opt: compact in situ if there is - // sufficient space or slack in the slice. + // TODO(adonovan): opt: compact in situ (rightwards) + // if Instrs has sufficient space or slack. dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) for i, np := range nps { dst[i] = np.phi @@ -269,9 +249,6 @@ func lift(fn *Function) { dst[j] = instr j++ } - for i, np := range nps { - dst[i] = np.phi - } b.Instrs = dst } @@ -290,15 +267,76 @@ func lift(fn *Function) { fn.Locals = fn.Locals[:j] } -func phiIsLive(phi *Phi) bool { - for _, instr := range *phi.Referrers() { - if instr == phi { - continue // self-refs don't count +// removeDeadPhis removes φ-nodes not transitively needed by a +// non-Phi, non-DebugRef instruction. +func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) { + // First pass: find the set of "live" φ-nodes: those reachable + // from some non-Phi instruction. + // + // We compute reachability in reverse, starting from each φ, + // rather than forwards, starting from each live non-Phi + // instruction, because this way visits much less of the + // Value graph. + livePhis := make(map[*Phi]bool) + for _, npList := range newPhis { + for _, np := range npList { + phi := np.phi + if !livePhis[phi] && phiHasDirectReferrer(phi) { + markLivePhi(livePhis, phi) + } + } + } + + // Existing φ-nodes due to && and || operators + // are all considered live (see Go issue 19622). + for _, b := range blocks { + for _, phi := range b.phis() { + markLivePhi(livePhis, phi.(*Phi)) } - if _, ok := instr.(*DebugRef); ok { - continue // debug refs don't count + } + + // Second pass: eliminate unused phis from newPhis. + for block, npList := range newPhis { + j := 0 + for _, np := range npList { + if livePhis[np.phi] { + npList[j] = np + j++ + } else { + // discard it, first removing it from referrers + for _, val := range np.phi.Edges { + if refs := val.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + np.phi.block = nil + } + } + newPhis[block] = npList[:j] + } +} + +// markLivePhi marks phi, and all φ-nodes transitively reachable via +// its Operands, live. +func markLivePhi(livePhis map[*Phi]bool, phi *Phi) { + livePhis[phi] = true + for _, rand := range phi.Operands(nil) { + if q, ok := (*rand).(*Phi); ok { + if !livePhis[q] { + markLivePhi(livePhis, q) + } + } + } +} + +// phiHasDirectReferrer reports whether phi is directly referred to by +// a non-Phi instruction. Such instructions are the +// roots of the liveness traversal. +func phiHasDirectReferrer(phi *Phi) bool { + for _, instr := range *phi.Referrers() { + if _, ok := instr.(*Phi); !ok { + return true } - return true } return false } @@ -343,7 +381,9 @@ type newPhiMap map[*BasicBlock][]newPhi // and if so, it populates newPhis with all the φ-nodes it may require // and returns true. // -func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool { +// fresh is a source of fresh ids for phi nodes. +// +func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { // Don't lift aggregates into registers, because we don't have // a way to express their zero-constants. switch deref(alloc.Type()).Underlying().(type) { @@ -426,6 +466,10 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool { Edges: make([]Value, len(v.Preds)), Comment: alloc.Comment, } + // This is merely a debugging aid: + phi.setNum(*fresh) + *fresh++ + phi.pos = alloc.Pos() phi.setType(deref(alloc.Type())) phi.block = v @@ -444,10 +488,6 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool { return true } -func ReplaceAll(x, y Value) { - replaceAll(x, y) -} - // replaceAll replaces all intraprocedural uses of x with y, // updating x.Referrers and y.Referrers. // Precondition: x.Referrers() != nil, i.e. x must be local to some function. @@ -599,10 +639,15 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { // Continue depth-first recursion over domtree, pushing a // fresh copy of the renaming map for each subtree. - for _, v := range u.dom.children { - // TODO(adonovan): opt: avoid copy on final iteration; use destructive update. - r := make([]Value, len(renaming)) - copy(r, renaming) + for i, v := range u.dom.children { + r := renaming + if i < len(u.dom.children)-1 { + // On all but the final iteration, we must make + // a copy to avoid destructive update. + r = make([]Value, len(renaming)) + copy(r, renaming) + } rename(v, r, newPhis) } + } diff --git a/vendor/github.com/golangci/tools/go/ssa/lvalue.go b/vendor/github.com/golangci/tools/go/ssa/lvalue.go index d2226a9b4f7b..eb5d71e188fb 100644 --- a/vendor/github.com/golangci/tools/go/ssa/lvalue.go +++ b/vendor/github.com/golangci/tools/go/ssa/lvalue.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // lvalues are the union of addressable expressions and map-index diff --git a/vendor/github.com/golangci/tools/go/ssa/methods.go b/vendor/github.com/golangci/tools/go/ssa/methods.go index 7d1fb42b574f..080dca968ef8 100644 --- a/vendor/github.com/golangci/tools/go/ssa/methods.go +++ b/vendor/github.com/golangci/tools/go/ssa/methods.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file defines utilities for population of method sets. diff --git a/vendor/github.com/golangci/tools/go/ssa/print.go b/vendor/github.com/golangci/tools/go/ssa/print.go index a7deb8810005..6fd277277c05 100644 --- a/vendor/github.com/golangci/tools/go/ssa/print.go +++ b/vendor/github.com/golangci/tools/go/ssa/print.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file implements the String() methods for all Value and @@ -85,14 +83,18 @@ func (v *Alloc) String() string { func (v *Phi) String() string { var b bytes.Buffer - b.WriteString("φ [") + b.WriteString("phi [") for i, edge := range v.Edges { if i > 0 { b.WriteString(", ") } // Be robust against malformed CFG. + if v.block == nil { + b.WriteString("??") + continue + } block := -1 - if v.block != nil && i < len(v.block.Preds) { + if i < len(v.block.Preds) { block = v.block.Preds[i].Index } fmt.Fprintf(&b, "%d: ", block) diff --git a/vendor/github.com/golangci/tools/go/ssa/sanity.go b/vendor/github.com/golangci/tools/go/ssa/sanity.go index bd7377ce5bd7..c56b2682c083 100644 --- a/vendor/github.com/golangci/tools/go/ssa/sanity.go +++ b/vendor/github.com/golangci/tools/go/ssa/sanity.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // An optional pass for sanity-checking invariants of the SSA representation. @@ -213,7 +211,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { // enclosing Function or Package. } -func (s *sanity) checkFinalInstr(idx int, instr Instruction) { +func (s *sanity) checkFinalInstr(instr Instruction) { switch instr := instr.(type) { case *If: if nsuccs := len(s.block.Succs); nsuccs != 2 { @@ -328,7 +326,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { if j < n-1 { s.checkInstr(j, instr) } else { - s.checkFinalInstr(j, instr) + s.checkFinalInstr(instr) } // Check Instruction.Operands. @@ -355,7 +353,9 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { // Check that Operands that are also Instructions belong to same function. // TODO(adonovan): also check their block dominates block b. if val, ok := val.(Instruction); ok { - if val.Parent() != s.fn { + if val.Block() == nil { + s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) + } else if val.Parent() != s.fn { s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) } } diff --git a/vendor/github.com/golangci/tools/go/ssa/source.go b/vendor/github.com/golangci/tools/go/ssa/source.go index e17e0234658e..6d2223edaa36 100644 --- a/vendor/github.com/golangci/tools/go/ssa/source.go +++ b/vendor/github.com/golangci/tools/go/ssa/source.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file defines utilities for working with source positions @@ -16,7 +14,6 @@ import ( "go/ast" "go/token" "go/types" - "log" ) // EnclosingFunction returns the function that contains the syntax @@ -126,9 +123,6 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // Don't call Program.Method: avoid creating wrappers. obj := mset.At(i).Obj().(*types.Func) if obj.Pos() == pos { - if pkg.values[obj] == nil { - log.Println(obj) - } return pkg.values[obj].(*Function) } } diff --git a/vendor/github.com/golangci/tools/go/ssa/ssa.go b/vendor/github.com/golangci/tools/go/ssa/ssa.go index 250d588c74fe..8825e7b5990d 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssa.go +++ b/vendor/github.com/golangci/tools/go/ssa/ssa.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This package defines a high-level intermediate representation for @@ -77,9 +75,6 @@ type Member interface { } // A Type is a Member of a Package representing a package-level named type. -// -// Type() returns a *types.Named. -// type Type struct { object *types.TypeName pkg *Package @@ -97,7 +92,6 @@ type Type struct { type NamedConst struct { object *types.Const Value *Const - pos token.Pos pkg *Package } @@ -578,8 +572,8 @@ type BinOp struct { register // One of: // ADD SUB MUL QUO REM + - * / % - // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ - // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~ + // EQL LSS GTR NEQ LEQ GEQ == != < <= < >= Op token.Token X, Y Value } @@ -819,7 +813,7 @@ type Slice struct { type FieldAddr struct { register X Value // *struct - Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) + Field int // index into X.Type().Deref().(*types.Struct).Fields } // The Field instruction yields the Field of struct X. diff --git a/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go b/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go index 6e5a4675e53f..004571eb0b6c 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go +++ b/vendor/github.com/golangci/tools/go/ssa/ssautil/load.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssautil // This file defines utility functions for constructing programs in SSA form. @@ -13,10 +11,58 @@ import ( "go/token" "go/types" - "golang.org/x/tools/go/loader" "github.com/golangci/tools/go/ssa" + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) +// Packages creates an SSA program for a set of packages loaded from +// source syntax using the golang.org/x/tools/go/packages.Load function. +// It creates and returns an SSA package for each well-typed package in +// the initial list. The resulting list of packages has the same length +// as initial, and contains a nil if SSA could not be constructed for +// the corresponding initial package. +// +// Code for bodies of functions is not built until Build is called +// on the resulting Program. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + var fset *token.FileSet + if len(initial) > 0 { + fset = initial[0].Fset + } + + prog := ssa.NewProgram(fset, mode) + seen := make(map[*packages.Package]*ssa.Package) + var create func(p *packages.Package) *ssa.Package + create = func(p *packages.Package) *ssa.Package { + ssapkg, ok := seen[p] + if !ok { + if p.Types == nil || p.IllTyped { + // not well typed + seen[p] = nil + return nil + } + + ssapkg = prog.CreatePackage(p.Types, p.Syntax, p.TypesInfo, true) + seen[p] = ssapkg + + for _, imp := range p.Imports { + create(imp) + } + } + return ssapkg + } + + var ssapkgs []*ssa.Package + for _, p := range initial { + ssapkgs = append(ssapkgs, create(p)) + } + return prog, ssapkgs +} + // CreateProgram returns a new program in SSA form, given a program // loaded from source. An SSA package is created for each transitively // error-free package of lprog. diff --git a/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go b/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go index ede3a75744bc..7c79719c6aba 100644 --- a/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go +++ b/vendor/github.com/golangci/tools/go/ssa/ssautil/switch.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssautil // This file implements discovery of switch and type-switch constructs diff --git a/vendor/github.com/golangci/tools/go/ssa/testmain.go b/vendor/github.com/golangci/tools/go/ssa/testmain.go index 2b897246cdb8..ea232ada951f 100644 --- a/vendor/github.com/golangci/tools/go/ssa/testmain.go +++ b/vendor/github.com/golangci/tools/go/ssa/testmain.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // CreateTestMainPackage synthesizes a main package that runs all the @@ -216,9 +214,12 @@ import p {{printf "%q" .Pkg.Pkg.Path}} {{if .Go18}} type deps struct{} +func (deps) ImportPath() string { return "" } func (deps) MatchString(pat, str string) (bool, error) { return true, nil } func (deps) StartCPUProfile(io.Writer) error { return nil } +func (deps) StartTestLog(io.Writer) {} func (deps) StopCPUProfile() {} +func (deps) StopTestLog() error { return nil } func (deps) WriteHeapProfile(io.Writer) error { return nil } func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } diff --git a/vendor/github.com/golangci/tools/go/ssa/util.go b/vendor/github.com/golangci/tools/go/ssa/util.go index 317a0130b191..ddb118460969 100644 --- a/vendor/github.com/golangci/tools/go/ssa/util.go +++ b/vendor/github.com/golangci/tools/go/ssa/util.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file defines a number of miscellaneous utility functions. @@ -60,7 +58,7 @@ func recvType(obj *types.Func) types.Type { // // Exported to ssa/interp. // -// TODO(gri): this is a copy of go/types.defaultType; export that function. +// TODO(adonovan): use go/types.DefaultType after 1.8. // func DefaultType(typ types.Type) types.Type { if t, ok := typ.(*types.Basic); ok { diff --git a/vendor/github.com/golangci/tools/go/ssa/wrappers.go b/vendor/github.com/golangci/tools/go/ssa/wrappers.go index 6ca01ab35f9a..555cd70d9555 100644 --- a/vendor/github.com/golangci/tools/go/ssa/wrappers.go +++ b/vendor/github.com/golangci/tools/go/ssa/wrappers.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build go1.5 - package ssa // This file defines synthesis of Functions that delegate to declared @@ -23,7 +21,6 @@ package ssa import ( "fmt" - "go/types" ) diff --git a/vendor/github.com/stretchr/testify/require/doc.go b/vendor/github.com/stretchr/testify/require/doc.go new file mode 100644 index 000000000000..169de39221c7 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/doc.go @@ -0,0 +1,28 @@ +// Package require implements the same assertions as the `assert` package but +// stops test execution when a test fails. +// +// Example Usage +// +// The following is a complete example using require in a standard test function: +// import ( +// "testing" +// "github.com/stretchr/testify/require" +// ) +// +// func TestSomething(t *testing.T) { +// +// var a string = "Hello" +// var b string = "Hello" +// +// require.Equal(t, a, b, "The two words should be the same.") +// +// } +// +// Assertions +// +// The `require` package have same global functions as in the `assert` package, +// but instead of returning a boolean result they call `t.FailNow()`. +// +// Every assertion function also takes an optional string message as the final argument, +// allowing custom error messages to be appended to the message the assertion method outputs. +package require diff --git a/vendor/github.com/stretchr/testify/require/forward_requirements.go b/vendor/github.com/stretchr/testify/require/forward_requirements.go new file mode 100644 index 000000000000..ac71d40581b9 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/forward_requirements.go @@ -0,0 +1,16 @@ +package require + +// Assertions provides assertion methods around the +// TestingT interface. +type Assertions struct { + t TestingT +} + +// New makes a new Assertions object for the specified TestingT. +func New(t TestingT) *Assertions { + return &Assertions{ + t: t, + } +} + +//go:generate go run ../_codegen/main.go -output-package=require -template=require_forward.go.tmpl -include-format-funcs diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go new file mode 100644 index 000000000000..ac3c30878873 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/require.go @@ -0,0 +1,867 @@ +/* +* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen +* THIS FILE MUST NOT BE EDITED BY HAND + */ + +package require + +import ( + assert "github.com/stretchr/testify/assert" + http "net/http" + url "net/url" + time "time" +) + +// Condition uses a Comparison to assert a complex condition. +func Condition(t TestingT, comp assert.Comparison, msgAndArgs ...interface{}) { + if !assert.Condition(t, comp, msgAndArgs...) { + t.FailNow() + } +} + +// Conditionf uses a Comparison to assert a complex condition. +func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interface{}) { + if !assert.Conditionf(t, comp, msg, args...) { + t.FailNow() + } +} + +// Contains asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") +func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { + if !assert.Contains(t, s, contains, msgAndArgs...) { + t.FailNow() + } +} + +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { + if !assert.Containsf(t, s, contains, msg, args...) { + t.FailNow() + } +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExists(t TestingT, path string, msgAndArgs ...interface{}) { + if !assert.DirExists(t, path, msgAndArgs...) { + t.FailNow() + } +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func DirExistsf(t TestingT, path string, msg string, args ...interface{}) { + if !assert.DirExistsf(t, path, msg, args...) { + t.FailNow() + } +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatch(t, [1, 3, 2, 3], [1, 3, 3, 2]) +func ElementsMatch(t TestingT, listA interface{}, listB interface{}, msgAndArgs ...interface{}) { + if !assert.ElementsMatch(t, listA, listB, msgAndArgs...) { + t.FailNow() + } +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// assert.ElementsMatchf(t, [1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string, args ...interface{}) { + if !assert.ElementsMatchf(t, listA, listB, msg, args...) { + t.FailNow() + } +} + +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// assert.Empty(t, obj) +func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { + if !assert.Empty(t, object, msgAndArgs...) { + t.FailNow() + } +} + +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// assert.Emptyf(t, obj, "error message %s", "formatted") +func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.Emptyf(t, object, msg, args...) { + t.FailNow() + } +} + +// Equal asserts that two objects are equal. +// +// assert.Equal(t, 123, 123) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if !assert.Equal(t, expected, actual, msgAndArgs...) { + t.FailNow() + } +} + +// EqualError asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// assert.EqualError(t, err, expectedErrorString) +func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) { + if !assert.EqualError(t, theError, errString, msgAndArgs...) { + t.FailNow() + } +} + +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) { + if !assert.EqualErrorf(t, theError, errString, msg, args...) { + t.FailNow() + } +} + +// EqualValues asserts that two objects are equal or convertable to the same types +// and equal. +// +// assert.EqualValues(t, uint32(123), int32(123)) +func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if !assert.EqualValues(t, expected, actual, msgAndArgs...) { + t.FailNow() + } +} + +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// assert.EqualValuesf(t, uint32(123, "error message %s", "formatted"), int32(123)) +func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.EqualValuesf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// Equalf asserts that two objects are equal. +// +// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.Equalf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// Error asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) +// } +func Error(t TestingT, err error, msgAndArgs ...interface{}) { + if !assert.Error(t, err, msgAndArgs...) { + t.FailNow() + } +} + +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func Errorf(t TestingT, err error, msg string, args ...interface{}) { + if !assert.Errorf(t, err, msg, args...) { + t.FailNow() + } +} + +// Exactly asserts that two objects are equal in value and type. +// +// assert.Exactly(t, int32(123), int64(123)) +func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if !assert.Exactly(t, expected, actual, msgAndArgs...) { + t.FailNow() + } +} + +// Exactlyf asserts that two objects are equal in value and type. +// +// assert.Exactlyf(t, int32(123, "error message %s", "formatted"), int64(123)) +func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.Exactlyf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// Fail reports a failure through +func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) { + if !assert.Fail(t, failureMessage, msgAndArgs...) { + t.FailNow() + } +} + +// FailNow fails test +func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) { + if !assert.FailNow(t, failureMessage, msgAndArgs...) { + t.FailNow() + } +} + +// FailNowf fails test +func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{}) { + if !assert.FailNowf(t, failureMessage, msg, args...) { + t.FailNow() + } +} + +// Failf reports a failure through +func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) { + if !assert.Failf(t, failureMessage, msg, args...) { + t.FailNow() + } +} + +// False asserts that the specified value is false. +// +// assert.False(t, myBool) +func False(t TestingT, value bool, msgAndArgs ...interface{}) { + if !assert.False(t, value, msgAndArgs...) { + t.FailNow() + } +} + +// Falsef asserts that the specified value is false. +// +// assert.Falsef(t, myBool, "error message %s", "formatted") +func Falsef(t TestingT, value bool, msg string, args ...interface{}) { + if !assert.Falsef(t, value, msg, args...) { + t.FailNow() + } +} + +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExists(t TestingT, path string, msgAndArgs ...interface{}) { + if !assert.FileExists(t, path, msgAndArgs...) { + t.FailNow() + } +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { + if !assert.FileExistsf(t, path, msg, args...) { + t.FailNow() + } +} + +// HTTPBodyContains asserts that a specified handler returns a +// body that contains a string. +// +// assert.HTTPBodyContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + if !assert.HTTPBodyContains(t, handler, method, url, values, str, msgAndArgs...) { + t.FailNow() + } +} + +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// assert.HTTPBodyContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + if !assert.HTTPBodyContainsf(t, handler, method, url, values, str, msg, args...) { + t.FailNow() + } +} + +// HTTPBodyNotContains asserts that a specified handler returns a +// body that does not contain a string. +// +// assert.HTTPBodyNotContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + if !assert.HTTPBodyNotContains(t, handler, method, url, values, str, msgAndArgs...) { + t.FailNow() + } +} + +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// assert.HTTPBodyNotContainsf(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + if !assert.HTTPBodyNotContainsf(t, handler, method, url, values, str, msg, args...) { + t.FailNow() + } +} + +// HTTPError asserts that a specified handler returns an error status code. +// +// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPError(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } +} + +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPErrorf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} + +// HTTPRedirect asserts that a specified handler returns a redirect status code. +// +// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPRedirect(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } +} + +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPRedirectf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} + +// HTTPSuccess asserts that a specified handler returns a success status code. +// +// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + if !assert.HTTPSuccess(t, handler, method, url, values, msgAndArgs...) { + t.FailNow() + } +} + +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + if !assert.HTTPSuccessf(t, handler, method, url, values, msg, args...) { + t.FailNow() + } +} + +// Implements asserts that an object is implemented by the specified interface. +// +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) +func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { + if !assert.Implements(t, interfaceObject, object, msgAndArgs...) { + t.FailNow() + } +} + +// Implementsf asserts that an object is implemented by the specified interface. +// +// assert.Implementsf(t, (*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { + if !assert.Implementsf(t, interfaceObject, object, msg, args...) { + t.FailNow() + } +} + +// InDelta asserts that the two numerals are within delta of each other. +// +// assert.InDelta(t, math.Pi, (22 / 7.0), 0.01) +func InDelta(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + if !assert.InDelta(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } +} + +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValues(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + if !assert.InDeltaMapValues(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func InDeltaMapValuesf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaMapValuesf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} + +// InDeltaSlice is the same as InDelta, except it compares two slices. +func InDeltaSlice(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + if !assert.InDeltaSlice(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } +} + +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaSlicef(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// assert.InDeltaf(t, math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + if !assert.InDeltaf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} + +// InEpsilon asserts that expected and actual have a relative error less than epsilon +func InEpsilon(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + if !assert.InEpsilon(t, expected, actual, epsilon, msgAndArgs...) { + t.FailNow() + } +} + +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlice(t TestingT, expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + if !assert.InEpsilonSlice(t, expected, actual, epsilon, msgAndArgs...) { + t.FailNow() + } +} + +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + if !assert.InEpsilonSlicef(t, expected, actual, epsilon, msg, args...) { + t.FailNow() + } +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + if !assert.InEpsilonf(t, expected, actual, epsilon, msg, args...) { + t.FailNow() + } +} + +// IsType asserts that the specified objects are of the same type. +func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { + if !assert.IsType(t, expectedType, object, msgAndArgs...) { + t.FailNow() + } +} + +// IsTypef asserts that the specified objects are of the same type. +func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg string, args ...interface{}) { + if !assert.IsTypef(t, expectedType, object, msg, args...) { + t.FailNow() + } +} + +// JSONEq asserts that two JSON strings are equivalent. +// +// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) { + if !assert.JSONEq(t, expected, actual, msgAndArgs...) { + t.FailNow() + } +} + +// JSONEqf asserts that two JSON strings are equivalent. +// +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) { + if !assert.JSONEqf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// Len asserts that the specified object has specific length. +// Len also fails if the object has a type that len() not accept. +// +// assert.Len(t, mySlice, 3) +func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) { + if !assert.Len(t, object, length, msgAndArgs...) { + t.FailNow() + } +} + +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) { + if !assert.Lenf(t, object, length, msg, args...) { + t.FailNow() + } +} + +// Nil asserts that the specified object is nil. +// +// assert.Nil(t, err) +func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { + if !assert.Nil(t, object, msgAndArgs...) { + t.FailNow() + } +} + +// Nilf asserts that the specified object is nil. +// +// assert.Nilf(t, err, "error message %s", "formatted") +func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.Nilf(t, object, msg, args...) { + t.FailNow() + } +} + +// NoError asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if assert.NoError(t, err) { +// assert.Equal(t, expectedObj, actualObj) +// } +func NoError(t TestingT, err error, msgAndArgs ...interface{}) { + if !assert.NoError(t, err, msgAndArgs...) { + t.FailNow() + } +} + +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func NoErrorf(t TestingT, err error, msg string, args ...interface{}) { + if !assert.NoErrorf(t, err, msg, args...) { + t.FailNow() + } +} + +// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") +func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { + if !assert.NotContains(t, s, contains, msgAndArgs...) { + t.FailNow() + } +} + +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { + if !assert.NotContainsf(t, s, contains, msg, args...) { + t.FailNow() + } +} + +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if assert.NotEmpty(t, obj) { +// assert.Equal(t, "two", obj[1]) +// } +func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { + if !assert.NotEmpty(t, object, msgAndArgs...) { + t.FailNow() + } +} + +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.NotEmptyf(t, object, msg, args...) { + t.FailNow() + } +} + +// NotEqual asserts that the specified values are NOT equal. +// +// assert.NotEqual(t, obj1, obj2) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if !assert.NotEqual(t, expected, actual, msgAndArgs...) { + t.FailNow() + } +} + +// NotEqualf asserts that the specified values are NOT equal. +// +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if !assert.NotEqualf(t, expected, actual, msg, args...) { + t.FailNow() + } +} + +// NotNil asserts that the specified object is not nil. +// +// assert.NotNil(t, err) +func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { + if !assert.NotNil(t, object, msgAndArgs...) { + t.FailNow() + } +} + +// NotNilf asserts that the specified object is not nil. +// +// assert.NotNilf(t, err, "error message %s", "formatted") +func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { + if !assert.NotNilf(t, object, msg, args...) { + t.FailNow() + } +} + +// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// assert.NotPanics(t, func(){ RemainCalm() }) +func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if !assert.NotPanics(t, f, msgAndArgs...) { + t.FailNow() + } +} + +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.NotPanicsf(t, f, msg, args...) { + t.FailNow() + } +} + +// NotRegexp asserts that a specified regexp does not match a string. +// +// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") +// assert.NotRegexp(t, "^start", "it's not starting") +func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { + if !assert.NotRegexp(t, rx, str, msgAndArgs...) { + t.FailNow() + } +} + +// NotRegexpf asserts that a specified regexp does not match a string. +// +// assert.NotRegexpf(t, regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if !assert.NotRegexpf(t, rx, str, msg, args...) { + t.FailNow() + } +} + +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { + if !assert.NotSubset(t, list, subset, msgAndArgs...) { + t.FailNow() + } +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { + if !assert.NotSubsetf(t, list, subset, msg, args...) { + t.FailNow() + } +} + +// NotZero asserts that i is not the zero value for its type. +func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) { + if !assert.NotZero(t, i, msgAndArgs...) { + t.FailNow() + } +} + +// NotZerof asserts that i is not the zero value for its type. +func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) { + if !assert.NotZerof(t, i, msg, args...) { + t.FailNow() + } +} + +// Panics asserts that the code inside the specified PanicTestFunc panics. +// +// assert.Panics(t, func(){ GoCrazy() }) +func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if !assert.Panics(t, f, msgAndArgs...) { + t.FailNow() + } +} + +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + if !assert.PanicsWithValue(t, expected, f, msgAndArgs...) { + t.FailNow() + } +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.PanicsWithValuef(t, expected, f, msg, args...) { + t.FailNow() + } +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { + if !assert.Panicsf(t, f, msg, args...) { + t.FailNow() + } +} + +// Regexp asserts that a specified regexp matches a string. +// +// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") +// assert.Regexp(t, "start...$", "it's not starting") +func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { + if !assert.Regexp(t, rx, str, msgAndArgs...) { + t.FailNow() + } +} + +// Regexpf asserts that a specified regexp matches a string. +// +// assert.Regexpf(t, regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { + if !assert.Regexpf(t, rx, str, msg, args...) { + t.FailNow() + } +} + +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { + if !assert.Subset(t, list, subset, msgAndArgs...) { + t.FailNow() + } +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { + if !assert.Subsetf(t, list, subset, msg, args...) { + t.FailNow() + } +} + +// True asserts that the specified value is true. +// +// assert.True(t, myBool) +func True(t TestingT, value bool, msgAndArgs ...interface{}) { + if !assert.True(t, value, msgAndArgs...) { + t.FailNow() + } +} + +// Truef asserts that the specified value is true. +// +// assert.Truef(t, myBool, "error message %s", "formatted") +func Truef(t TestingT, value bool, msg string, args ...interface{}) { + if !assert.Truef(t, value, msg, args...) { + t.FailNow() + } +} + +// WithinDuration asserts that the two times are within duration delta of each other. +// +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) +func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { + if !assert.WithinDuration(t, expected, actual, delta, msgAndArgs...) { + t.FailNow() + } +} + +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { + if !assert.WithinDurationf(t, expected, actual, delta, msg, args...) { + t.FailNow() + } +} + +// Zero asserts that i is the zero value for its type. +func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) { + if !assert.Zero(t, i, msgAndArgs...) { + t.FailNow() + } +} + +// Zerof asserts that i is the zero value for its type. +func Zerof(t TestingT, i interface{}, msg string, args ...interface{}) { + if !assert.Zerof(t, i, msg, args...) { + t.FailNow() + } +} diff --git a/vendor/github.com/stretchr/testify/require/require.go.tmpl b/vendor/github.com/stretchr/testify/require/require.go.tmpl new file mode 100644 index 000000000000..d2c38f6f2864 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/require.go.tmpl @@ -0,0 +1,6 @@ +{{.Comment}} +func {{.DocInfo.Name}}(t TestingT, {{.Params}}) { + if !assert.{{.DocInfo.Name}}(t, {{.ForwardedParams}}) { + t.FailNow() + } +} diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go new file mode 100644 index 000000000000..299ceb95ad74 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/require_forward.go @@ -0,0 +1,687 @@ +/* +* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen +* THIS FILE MUST NOT BE EDITED BY HAND + */ + +package require + +import ( + assert "github.com/stretchr/testify/assert" + http "net/http" + url "net/url" + time "time" +) + +// Condition uses a Comparison to assert a complex condition. +func (a *Assertions) Condition(comp assert.Comparison, msgAndArgs ...interface{}) { + Condition(a.t, comp, msgAndArgs...) +} + +// Conditionf uses a Comparison to assert a complex condition. +func (a *Assertions) Conditionf(comp assert.Comparison, msg string, args ...interface{}) { + Conditionf(a.t, comp, msg, args...) +} + +// Contains asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") +func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { + Contains(a.t, s, contains, msgAndArgs...) +} + +// Containsf asserts that the specified string, list(array, slice...) or map contains the +// specified substring or element. +// +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) { + Containsf(a.t, s, contains, msg, args...) +} + +// DirExists checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExists(path string, msgAndArgs ...interface{}) { + DirExists(a.t, path, msgAndArgs...) +} + +// DirExistsf checks whether a directory exists in the given path. It also fails if the path is a file rather a directory or there is an error checking whether it exists. +func (a *Assertions) DirExistsf(path string, msg string, args ...interface{}) { + DirExistsf(a.t, path, msg, args...) +} + +// ElementsMatch asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatch([1, 3, 2, 3], [1, 3, 3, 2]) +func (a *Assertions) ElementsMatch(listA interface{}, listB interface{}, msgAndArgs ...interface{}) { + ElementsMatch(a.t, listA, listB, msgAndArgs...) +} + +// ElementsMatchf asserts that the specified listA(array, slice...) is equal to specified +// listB(array, slice...) ignoring the order of the elements. If there are duplicate elements, +// the number of appearances of each of them in both lists should match. +// +// a.ElementsMatchf([1, 3, 2, 3], [1, 3, 3, 2], "error message %s", "formatted") +func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg string, args ...interface{}) { + ElementsMatchf(a.t, listA, listB, msg, args...) +} + +// Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// a.Empty(obj) +func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { + Empty(a.t, object, msgAndArgs...) +} + +// Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// a.Emptyf(obj, "error message %s", "formatted") +func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) { + Emptyf(a.t, object, msg, args...) +} + +// Equal asserts that two objects are equal. +// +// a.Equal(123, 123) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + Equal(a.t, expected, actual, msgAndArgs...) +} + +// EqualError asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// a.EqualError(err, expectedErrorString) +func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) { + EqualError(a.t, theError, errString, msgAndArgs...) +} + +// EqualErrorf asserts that a function returned an error (i.e. not `nil`) +// and that it is equal to the provided error. +// +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) { + EqualErrorf(a.t, theError, errString, msg, args...) +} + +// EqualValues asserts that two objects are equal or convertable to the same types +// and equal. +// +// a.EqualValues(uint32(123), int32(123)) +func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + EqualValues(a.t, expected, actual, msgAndArgs...) +} + +// EqualValuesf asserts that two objects are equal or convertable to the same types +// and equal. +// +// a.EqualValuesf(uint32(123, "error message %s", "formatted"), int32(123)) +func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + EqualValuesf(a.t, expected, actual, msg, args...) +} + +// Equalf asserts that two objects are equal. +// +// a.Equalf(123, 123, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). Function equality +// cannot be determined and will always fail. +func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + Equalf(a.t, expected, actual, msg, args...) +} + +// Error asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } +func (a *Assertions) Error(err error, msgAndArgs ...interface{}) { + Error(a.t, err, msgAndArgs...) +} + +// Errorf asserts that a function returned an error (i.e. not `nil`). +// +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } +func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { + Errorf(a.t, err, msg, args...) +} + +// Exactly asserts that two objects are equal in value and type. +// +// a.Exactly(int32(123), int64(123)) +func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + Exactly(a.t, expected, actual, msgAndArgs...) +} + +// Exactlyf asserts that two objects are equal in value and type. +// +// a.Exactlyf(int32(123, "error message %s", "formatted"), int64(123)) +func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + Exactlyf(a.t, expected, actual, msg, args...) +} + +// Fail reports a failure through +func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) { + Fail(a.t, failureMessage, msgAndArgs...) +} + +// FailNow fails test +func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) { + FailNow(a.t, failureMessage, msgAndArgs...) +} + +// FailNowf fails test +func (a *Assertions) FailNowf(failureMessage string, msg string, args ...interface{}) { + FailNowf(a.t, failureMessage, msg, args...) +} + +// Failf reports a failure through +func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{}) { + Failf(a.t, failureMessage, msg, args...) +} + +// False asserts that the specified value is false. +// +// a.False(myBool) +func (a *Assertions) False(value bool, msgAndArgs ...interface{}) { + False(a.t, value, msgAndArgs...) +} + +// Falsef asserts that the specified value is false. +// +// a.Falsef(myBool, "error message %s", "formatted") +func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) { + Falsef(a.t, value, msg, args...) +} + +// FileExists checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExists(path string, msgAndArgs ...interface{}) { + FileExists(a.t, path, msgAndArgs...) +} + +// FileExistsf checks whether a file exists in the given path. It also fails if the path points to a directory or there is an error when trying to check the file. +func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) { + FileExistsf(a.t, path, msg, args...) +} + +// HTTPBodyContains asserts that a specified handler returns a +// body that contains a string. +// +// a.HTTPBodyContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + HTTPBodyContains(a.t, handler, method, url, values, str, msgAndArgs...) +} + +// HTTPBodyContainsf asserts that a specified handler returns a +// body that contains a string. +// +// a.HTTPBodyContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + HTTPBodyContainsf(a.t, handler, method, url, values, str, msg, args...) +} + +// HTTPBodyNotContains asserts that a specified handler returns a +// body that does not contain a string. +// +// a.HTTPBodyNotContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { + HTTPBodyNotContains(a.t, handler, method, url, values, str, msgAndArgs...) +} + +// HTTPBodyNotContainsf asserts that a specified handler returns a +// body that does not contain a string. +// +// a.HTTPBodyNotContainsf(myHandler, "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { + HTTPBodyNotContainsf(a.t, handler, method, url, values, str, msg, args...) +} + +// HTTPError asserts that a specified handler returns an error status code. +// +// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPError(a.t, handler, method, url, values, msgAndArgs...) +} + +// HTTPErrorf asserts that a specified handler returns an error status code. +// +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPErrorf(a.t, handler, method, url, values, msg, args...) +} + +// HTTPRedirect asserts that a specified handler returns a redirect status code. +// +// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPRedirect(a.t, handler, method, url, values, msgAndArgs...) +} + +// HTTPRedirectf asserts that a specified handler returns a redirect status code. +// +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// +// Returns whether the assertion was successful (true, "error message %s", "formatted") or not (false). +func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPRedirectf(a.t, handler, method, url, values, msg, args...) +} + +// HTTPSuccess asserts that a specified handler returns a success status code. +// +// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { + HTTPSuccess(a.t, handler, method, url, values, msgAndArgs...) +} + +// HTTPSuccessf asserts that a specified handler returns a success status code. +// +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// +// Returns whether the assertion was successful (true) or not (false). +func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { + HTTPSuccessf(a.t, handler, method, url, values, msg, args...) +} + +// Implements asserts that an object is implemented by the specified interface. +// +// a.Implements((*MyInterface)(nil), new(MyObject)) +func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { + Implements(a.t, interfaceObject, object, msgAndArgs...) +} + +// Implementsf asserts that an object is implemented by the specified interface. +// +// a.Implementsf((*MyInterface, "error message %s", "formatted")(nil), new(MyObject)) +func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { + Implementsf(a.t, interfaceObject, object, msg, args...) +} + +// InDelta asserts that the two numerals are within delta of each other. +// +// a.InDelta(math.Pi, (22 / 7.0), 0.01) +func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + InDelta(a.t, expected, actual, delta, msgAndArgs...) +} + +// InDeltaMapValues is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValues(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + InDeltaMapValues(a.t, expected, actual, delta, msgAndArgs...) +} + +// InDeltaMapValuesf is the same as InDelta, but it compares all values between two maps. Both maps must have exactly the same keys. +func (a *Assertions) InDeltaMapValuesf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaMapValuesf(a.t, expected, actual, delta, msg, args...) +} + +// InDeltaSlice is the same as InDelta, except it compares two slices. +func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { + InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...) +} + +// InDeltaSlicef is the same as InDelta, except it compares two slices. +func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaSlicef(a.t, expected, actual, delta, msg, args...) +} + +// InDeltaf asserts that the two numerals are within delta of each other. +// +// a.InDeltaf(math.Pi, (22 / 7.0, "error message %s", "formatted"), 0.01) +func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { + InDeltaf(a.t, expected, actual, delta, msg, args...) +} + +// InEpsilon asserts that expected and actual have a relative error less than epsilon +func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...) +} + +// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) { + InEpsilonSlice(a.t, expected, actual, epsilon, msgAndArgs...) +} + +// InEpsilonSlicef is the same as InEpsilon, except it compares each value from two slices. +func (a *Assertions) InEpsilonSlicef(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + InEpsilonSlicef(a.t, expected, actual, epsilon, msg, args...) +} + +// InEpsilonf asserts that expected and actual have a relative error less than epsilon +func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilon float64, msg string, args ...interface{}) { + InEpsilonf(a.t, expected, actual, epsilon, msg, args...) +} + +// IsType asserts that the specified objects are of the same type. +func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) { + IsType(a.t, expectedType, object, msgAndArgs...) +} + +// IsTypef asserts that the specified objects are of the same type. +func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg string, args ...interface{}) { + IsTypef(a.t, expectedType, object, msg, args...) +} + +// JSONEq asserts that two JSON strings are equivalent. +// +// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) { + JSONEq(a.t, expected, actual, msgAndArgs...) +} + +// JSONEqf asserts that two JSON strings are equivalent. +// +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) { + JSONEqf(a.t, expected, actual, msg, args...) +} + +// Len asserts that the specified object has specific length. +// Len also fails if the object has a type that len() not accept. +// +// a.Len(mySlice, 3) +func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) { + Len(a.t, object, length, msgAndArgs...) +} + +// Lenf asserts that the specified object has specific length. +// Lenf also fails if the object has a type that len() not accept. +// +// a.Lenf(mySlice, 3, "error message %s", "formatted") +func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) { + Lenf(a.t, object, length, msg, args...) +} + +// Nil asserts that the specified object is nil. +// +// a.Nil(err) +func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) { + Nil(a.t, object, msgAndArgs...) +} + +// Nilf asserts that the specified object is nil. +// +// a.Nilf(err, "error message %s", "formatted") +func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) { + Nilf(a.t, object, msg, args...) +} + +// NoError asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if a.NoError(err) { +// assert.Equal(t, expectedObj, actualObj) +// } +func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) { + NoError(a.t, err, msgAndArgs...) +} + +// NoErrorf asserts that a function returned no error (i.e. `nil`). +// +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } +func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) { + NoErrorf(a.t, err, msg, args...) +} + +// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") +func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { + NotContains(a.t, s, contains, msgAndArgs...) +} + +// NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the +// specified substring or element. +// +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) { + NotContainsf(a.t, s, contains, msg, args...) +} + +// NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if a.NotEmpty(obj) { +// assert.Equal(t, "two", obj[1]) +// } +func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { + NotEmpty(a.t, object, msgAndArgs...) +} + +// NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either +// a slice or a channel with len == 0. +// +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } +func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) { + NotEmptyf(a.t, object, msg, args...) +} + +// NotEqual asserts that the specified values are NOT equal. +// +// a.NotEqual(obj1, obj2) +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + NotEqual(a.t, expected, actual, msgAndArgs...) +} + +// NotEqualf asserts that the specified values are NOT equal. +// +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// +// Pointer variable equality is determined based on the equality of the +// referenced values (as opposed to the memory addresses). +func (a *Assertions) NotEqualf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + NotEqualf(a.t, expected, actual, msg, args...) +} + +// NotNil asserts that the specified object is not nil. +// +// a.NotNil(err) +func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) { + NotNil(a.t, object, msgAndArgs...) +} + +// NotNilf asserts that the specified object is not nil. +// +// a.NotNilf(err, "error message %s", "formatted") +func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) { + NotNilf(a.t, object, msg, args...) +} + +// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// a.NotPanics(func(){ RemainCalm() }) +func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { + NotPanics(a.t, f, msgAndArgs...) +} + +// NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. +// +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +func (a *Assertions) NotPanicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { + NotPanicsf(a.t, f, msg, args...) +} + +// NotRegexp asserts that a specified regexp does not match a string. +// +// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") +// a.NotRegexp("^start", "it's not starting") +func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { + NotRegexp(a.t, rx, str, msgAndArgs...) +} + +// NotRegexpf asserts that a specified regexp does not match a string. +// +// a.NotRegexpf(regexp.MustCompile("starts", "error message %s", "formatted"), "it's starting") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { + NotRegexpf(a.t, rx, str, msg, args...) +} + +// NotSubset asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { + NotSubset(a.t, list, subset, msgAndArgs...) +} + +// NotSubsetf asserts that the specified list(array, slice...) contains not all +// elements given in the specified subset(array, slice...). +// +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { + NotSubsetf(a.t, list, subset, msg, args...) +} + +// NotZero asserts that i is not the zero value for its type. +func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) { + NotZero(a.t, i, msgAndArgs...) +} + +// NotZerof asserts that i is not the zero value for its type. +func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) { + NotZerof(a.t, i, msg, args...) +} + +// Panics asserts that the code inside the specified PanicTestFunc panics. +// +// a.Panics(func(){ GoCrazy() }) +func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { + Panics(a.t, f, msgAndArgs...) +} + +// PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { + PanicsWithValue(a.t, expected, f, msgAndArgs...) +} + +// PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that +// the recovered panic value equals the expected panic value. +// +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) PanicsWithValuef(expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { + PanicsWithValuef(a.t, expected, f, msg, args...) +} + +// Panicsf asserts that the code inside the specified PanicTestFunc panics. +// +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +func (a *Assertions) Panicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { + Panicsf(a.t, f, msg, args...) +} + +// Regexp asserts that a specified regexp matches a string. +// +// a.Regexp(regexp.MustCompile("start"), "it's starting") +// a.Regexp("start...$", "it's not starting") +func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { + Regexp(a.t, rx, str, msgAndArgs...) +} + +// Regexpf asserts that a specified regexp matches a string. +// +// a.Regexpf(regexp.MustCompile("start", "error message %s", "formatted"), "it's starting") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { + Regexpf(a.t, rx, str, msg, args...) +} + +// Subset asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { + Subset(a.t, list, subset, msgAndArgs...) +} + +// Subsetf asserts that the specified list(array, slice...) contains all +// elements given in the specified subset(array, slice...). +// +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { + Subsetf(a.t, list, subset, msg, args...) +} + +// True asserts that the specified value is true. +// +// a.True(myBool) +func (a *Assertions) True(value bool, msgAndArgs ...interface{}) { + True(a.t, value, msgAndArgs...) +} + +// Truef asserts that the specified value is true. +// +// a.Truef(myBool, "error message %s", "formatted") +func (a *Assertions) Truef(value bool, msg string, args ...interface{}) { + Truef(a.t, value, msg, args...) +} + +// WithinDuration asserts that the two times are within duration delta of each other. +// +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) +func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { + WithinDuration(a.t, expected, actual, delta, msgAndArgs...) +} + +// WithinDurationf asserts that the two times are within duration delta of each other. +// +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { + WithinDurationf(a.t, expected, actual, delta, msg, args...) +} + +// Zero asserts that i is the zero value for its type. +func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) { + Zero(a.t, i, msgAndArgs...) +} + +// Zerof asserts that i is the zero value for its type. +func (a *Assertions) Zerof(i interface{}, msg string, args ...interface{}) { + Zerof(a.t, i, msg, args...) +} diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl b/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl new file mode 100644 index 000000000000..b93569e0a971 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/require_forward.go.tmpl @@ -0,0 +1,4 @@ +{{.CommentWithoutT "a"}} +func (a *Assertions) {{.DocInfo.Name}}({{.Params}}) { + {{.DocInfo.Name}}(a.t, {{.ForwardedParams}}) +} diff --git a/vendor/github.com/stretchr/testify/require/requirements.go b/vendor/github.com/stretchr/testify/require/requirements.go new file mode 100644 index 000000000000..e404f016d182 --- /dev/null +++ b/vendor/github.com/stretchr/testify/require/requirements.go @@ -0,0 +1,9 @@ +package require + +// TestingT is an interface wrapper around *testing.T +type TestingT interface { + Errorf(format string, args ...interface{}) + FailNow() +} + +//go:generate go run ../_codegen/main.go -output-package=require -template=require.go.tmpl -include-format-funcs diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go index 6a9821ae4bc8..9f6504914043 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go @@ -209,7 +209,7 @@ func (p *exporter) obj(obj types.Object) { p.value(obj.Val()) case *types.TypeName: - if isAlias(obj) { + if obj.IsAlias() { p.tag(aliasTag) p.pos(obj) p.qualifiedName(obj) diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go index 47dd46136296..9cf186605f6e 100644 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go @@ -128,42 +128,69 @@ func ImportData(packages map[string]*types.Package, filename, id string, data io // the corresponding package object to the packages map, and returns the object. // The packages map must contain all packages already imported. // -func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types.Package, err error) { - filename, id := FindPkg(path, srcDir) - if filename == "" { +func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { + var rc io.ReadCloser + var filename, id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. if path == "unsafe" { return types.Unsafe, nil } - err = fmt.Errorf("can't find import: %q", id) - return - } + id = path - // no need to re-import if the package was imported completely before - if pkg = packages[id]; pkg != nil && pkg.Complete() { - return - } + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + filename, id = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %q", id) + } - // open file - f, err := os.Open(filename) - if err != nil { - return - } - defer func() { - f.Close() + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) if err != nil { - // add file name to error - err = fmt.Errorf("reading export data: %s: %v", filename, err) + return nil, err } - }() + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() var hdr string - buf := bufio.NewReader(f) + buf := bufio.NewReader(rc) if hdr, err = FindExportData(buf); err != nil { return } switch hdr { case "$$\n": + // Work-around if we don't have a filename; happens only if lookup != nil. + // Either way, the filename is only needed for importer error messages, so + // this is fine. + if filename == "" { + filename = path + } return ImportData(packages, filename, id, buf) case "$$B\n": diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go deleted file mode 100644 index 225ffeedfa4a..000000000000 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias18.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !go1.9 - -package gcimporter - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return false // there are no type aliases before Go 1.9 -} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go b/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go deleted file mode 100644 index c2025d84a952..000000000000 --- a/vendor/golang.org/x/tools/go/internal/gcimporter/isAlias19.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package gcimporter - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return obj.IsAlias() -} diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go new file mode 100644 index 000000000000..f9dd1b023791 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/doc.go @@ -0,0 +1,241 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package packages loads Go packages for inspection and analysis. + +Note: Though this package is ready for widespread use, we may make minor +breaking changes if absolutely necessary. Any such change will be +announced on golang-tools@ at least one week before it is committed. No +more breaking changes will be made after December 1, 2018. + +The Load function takes as input a list of patterns and return a list of Package +structs describing individual packages matched by those patterns. +The LoadMode controls the amount of detail in the loaded packages. + +Load passes most patterns directly to the underlying build tool, +but all patterns with the prefix "query=", where query is a +non-empty string of letters from [a-z], are reserved and may be +interpreted as query operators. + +Only two query operators are currently supported, "file" and "pattern". + +The query "file=path/to/file.go" matches the package or packages enclosing +the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go" +might returns the packages "fmt" and "fmt [fmt.test]". + +The query "pattern=string" causes "string" to be passed directly to +the underlying build tool. In most cases this is unnecessary, +but an application can use Load("pattern=" + x) as an escaping mechanism +to ensure that x is not interpreted as a query operator if it contains '='. + +A third query "name=identifier" will be added soon. +It will match packages whose package declaration contains the specified identifier. +For example, "name=rand" would match the packages "math/rand" and "crypto/rand", +and "name=main" would match all executables. + +All other query operators are reserved for future use and currently +cause Load to report an error. + +The Package struct provides basic information about the package, including + + - ID, a unique identifier for the package in the returned set; + - GoFiles, the names of the package's Go source files; + - Imports, a map from source import strings to the Packages they name; + - Types, the type information for the package's exported symbols; + - Syntax, the parsed syntax trees for the package's source code; and + - TypeInfo, the result of a complete type-check of the package syntax trees. + +(See the documentation for type Package for the complete list of fields +and more detailed descriptions.) + +For example, + + Load(nil, "bytes", "unicode...") + +returns four Package structs describing the standard library packages +bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern +can match multiple packages and that a package might be matched by +multiple patterns: in general it is not possible to determine which +packages correspond to which patterns. + +Note that the list returned by Load contains only the packages matched +by the patterns. Their dependencies can be found by walking the import +graph using the Imports fields. + +The Load function can be configured by passing a pointer to a Config as +the first argument. A nil Config is equivalent to the zero Config, which +causes Load to run in LoadFiles mode, collecting minimal information. +See the documentation for type Config for details. + +As noted earlier, the Config.Mode controls the amount of detail +reported about the loaded packages, with each mode returning all the data of the +previous mode with some extra added. See the documentation for type LoadMode +for details. + +Most tools should pass their command-line arguments (after any flags) +uninterpreted to the loader, so that the loader can interpret them +according to the conventions of the underlying build system. +See the Example function for typical usage. + +*/ +package packages // import "golang.org/x/tools/go/packages" + +/* + +Motivation and design considerations + +The new package's design solves problems addressed by two existing +packages: go/build, which locates and describes packages, and +golang.org/x/tools/go/loader, which loads, parses and type-checks them. +The go/build.Package structure encodes too much of the 'go build' way +of organizing projects, leaving us in need of a data type that describes a +package of Go source code independent of the underlying build system. +We wanted something that works equally well with go build and vgo, and +also other build systems such as Bazel and Blaze, making it possible to +construct analysis tools that work in all these environments. +Tools such as errcheck and staticcheck were essentially unavailable to +the Go community at Google, and some of Google's internal tools for Go +are unavailable externally. +This new package provides a uniform way to obtain package metadata by +querying each of these build systems, optionally supporting their +preferred command-line notations for packages, so that tools integrate +neatly with users' build environments. The Metadata query function +executes an external query tool appropriate to the current workspace. + +Loading packages always returns the complete import graph "all the way down", +even if all you want is information about a single package, because the query +mechanisms of all the build systems we currently support ({go,vgo} list, and +blaze/bazel aspect-based query) cannot provide detailed information +about one package without visiting all its dependencies too, so there is +no additional asymptotic cost to providing transitive information. +(This property might not be true of a hypothetical 5th build system.) + +In calls to TypeCheck, all initial packages, and any package that +transitively depends on one of them, must be loaded from source. +Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from +source; D may be loaded from export data, and E may not be loaded at all +(though it's possible that D's export data mentions it, so a +types.Package may be created for it and exposed.) + +The old loader had a feature to suppress type-checking of function +bodies on a per-package basis, primarily intended to reduce the work of +obtaining type information for imported packages. Now that imports are +satisfied by export data, the optimization no longer seems necessary. + +Despite some early attempts, the old loader did not exploit export data, +instead always using the equivalent of WholeProgram mode. This was due +to the complexity of mixing source and export data packages (now +resolved by the upward traversal mentioned above), and because export data +files were nearly always missing or stale. Now that 'go build' supports +caching, all the underlying build systems can guarantee to produce +export data in a reasonable (amortized) time. + +Test "main" packages synthesized by the build system are now reported as +first-class packages, avoiding the need for clients (such as go/ssa) to +reinvent this generation logic. + +One way in which go/packages is simpler than the old loader is in its +treatment of in-package tests. In-package tests are packages that +consist of all the files of the library under test, plus the test files. +The old loader constructed in-package tests by a two-phase process of +mutation called "augmentation": first it would construct and type check +all the ordinary library packages and type-check the packages that +depend on them; then it would add more (test) files to the package and +type-check again. This two-phase approach had four major problems: +1) in processing the tests, the loader modified the library package, + leaving no way for a client application to see both the test + package and the library package; one would mutate into the other. +2) because test files can declare additional methods on types defined in + the library portion of the package, the dispatch of method calls in + the library portion was affected by the presence of the test files. + This should have been a clue that the packages were logically + different. +3) this model of "augmentation" assumed at most one in-package test + per library package, which is true of projects using 'go build', + but not other build systems. +4) because of the two-phase nature of test processing, all packages that + import the library package had to be processed before augmentation, + forcing a "one-shot" API and preventing the client from calling Load + in several times in sequence as is now possible in WholeProgram mode. + (TypeCheck mode has a similar one-shot restriction for a different reason.) + +Early drafts of this package supported "multi-shot" operation. +Although it allowed clients to make a sequence of calls (or concurrent +calls) to Load, building up the graph of Packages incrementally, +it was of marginal value: it complicated the API +(since it allowed some options to vary across calls but not others), +it complicated the implementation, +it cannot be made to work in Types mode, as explained above, +and it was less efficient than making one combined call (when this is possible). +Among the clients we have inspected, none made multiple calls to load +but could not be easily and satisfactorily modified to make only a single call. +However, applications changes may be required. +For example, the ssadump command loads the user-specified packages +and in addition the runtime package. It is tempting to simply append +"runtime" to the user-provided list, but that does not work if the user +specified an ad-hoc package such as [a.go b.go]. +Instead, ssadump no longer requests the runtime package, +but seeks it among the dependencies of the user-specified packages, +and emits an error if it is not found. + +Overlays: the ParseFile hook in the API permits clients to vary the way +in which ASTs are obtained from filenames; the default implementation is +based on parser.ParseFile. This features enables editor-integrated tools +that analyze the contents of modified but unsaved buffers: rather than +read from the file system, a tool can read from an archive of modified +buffers provided by the editor. +This approach has its limits. Because package metadata is obtained by +fork/execing an external query command for each build system, we can +fake only the file contents seen by the parser, type-checker, and +application, but not by the metadata query, so, for example: +- additional imports in the fake file will not be described by the + metadata, so the type checker will fail to load imports that create + new dependencies. +- in TypeCheck mode, because export data is produced by the query + command, it will not reflect the fake file contents. +- this mechanism cannot add files to a package without first saving them. + +Questions & Tasks + +- Add GOARCH/GOOS? + They are not portable concepts, but could be made portable. + Our goal has been to allow users to express themselves using the conventions + of the underlying build system: if the build system honors GOARCH + during a build and during a metadata query, then so should + applications built atop that query mechanism. + Conversely, if the target architecture of the build is determined by + command-line flags, the application can pass the relevant + flags through to the build system using a command such as: + myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin" + However, this approach is low-level, unwieldy, and non-portable. + GOOS and GOARCH seem important enough to warrant a dedicated option. + +- How should we handle partial failures such as a mixture of good and + malformed patterns, existing and non-existent packages, successful and + failed builds, import failures, import cycles, and so on, in a call to + Load? + +- Support bazel, blaze, and go1.10 list, not just go1.11 list. + +- Handle (and test) various partial success cases, e.g. + a mixture of good packages and: + invalid patterns + nonexistent packages + empty packages + packages with malformed package or import declarations + unreadable files + import cycles + other parse errors + type errors + Make sure we record errors at the correct place in the graph. + +- Missing packages among initial arguments are not reported. + Return bogus packages for them, like golist does. + +- "undeclared name" errors (for example) are reported out of source file + order. I suspect this is due to the breadth-first resolution now used + by go/types. Is that a bug? Discuss with gri. + +*/ diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go new file mode 100644 index 000000000000..53cc080d9d7b --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/external.go @@ -0,0 +1,68 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file enables an external tool to intercept package requests. +// If the tool is present then its results are used in preference to +// the go list command. + +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + "os/exec" + "strings" +) + +// findExternalTool returns the file path of a tool that supplies +// the build system package structure, or "" if not found." +// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its +// value, otherwise it searches for a binary named gopackagesdriver on the PATH. +func findExternalDriver(cfg *Config) driver { + const toolPrefix = "GOPACKAGESDRIVER=" + tool := "" + for _, env := range cfg.Env { + if val := strings.TrimPrefix(env, toolPrefix); val != env { + tool = val + } + } + if tool != "" && tool == "off" { + return nil + } + if tool == "" { + var err error + tool, err = exec.LookPath("gopackagesdriver") + if err != nil { + return nil + } + } + return func(cfg *Config, words ...string) (*driverResponse, error) { + buf := new(bytes.Buffer) + fullargs := []string{ + "list", + fmt.Sprintf("-test=%t", cfg.Tests), + fmt.Sprintf("-export=%t", usesExportData(cfg)), + fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports), + } + for _, f := range cfg.BuildFlags { + fullargs = append(fullargs, fmt.Sprintf("-buildflag=%v", f)) + } + fullargs = append(fullargs, "--") + fullargs = append(fullargs, words...) + cmd := exec.CommandContext(cfg.Context, tool, fullargs...) + cmd.Env = cfg.Env + cmd.Dir = cfg.Dir + cmd.Stdout = buf + cmd.Stderr = new(bytes.Buffer) + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) + } + var response driverResponse + if err := json.Unmarshal(buf.Bytes(), &response); err != nil { + return nil, err + } + return &response, nil + } +} diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go new file mode 100644 index 000000000000..37a6b8b13b9d --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist.go @@ -0,0 +1,624 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/semver" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + "sync" +) + +// A goTooOldError reports that the go command +// found by exec.LookPath is too old to use the new go list behavior. +type goTooOldError struct { + error +} + +// goListDriver uses the go list command to interpret the patterns and produce +// the build system package structure. +// See driver for more details. +func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + // Determine files requested in contains patterns + var containFiles []string + var packagesNamed []string + restPatterns := make([]string, 0, len(patterns)) + // Extract file= and other [querytype]= patterns. Report an error if querytype + // doesn't exist. +extractQueries: + for _, pattern := range patterns { + eqidx := strings.Index(pattern, "=") + if eqidx < 0 { + restPatterns = append(restPatterns, pattern) + } else { + query, value := pattern[:eqidx], pattern[eqidx+len("="):] + switch query { + case "file": + containFiles = append(containFiles, value) + case "pattern": + restPatterns = append(restPatterns, value) + case "name": + packagesNamed = append(packagesNamed, value) + case "": // not a reserved query + restPatterns = append(restPatterns, pattern) + default: + for _, rune := range query { + if rune < 'a' || rune > 'z' { // not a reserved query + restPatterns = append(restPatterns, pattern) + continue extractQueries + } + } + // Reject all other patterns containing "=" + return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern) + } + } + } + patterns = restPatterns + // Look for the deprecated contains: syntax. + // TODO(matloob): delete this around mid-October 2018. + restPatterns = restPatterns[:0] + for _, pattern := range patterns { + if strings.HasPrefix(pattern, "contains:") { + containFile := strings.TrimPrefix(pattern, "contains:") + containFiles = append(containFiles, containFile) + } else { + restPatterns = append(restPatterns, pattern) + } + } + containFiles = absJoin(cfg.Dir, containFiles) + + // TODO(matloob): Remove the definition of listfunc and just use golistPackages once go1.12 is released. + var listfunc driver + listfunc = func(cfg *Config, words ...string) (*driverResponse, error) { + response, err := golistDriverCurrent(cfg, words...) + if _, ok := err.(goTooOldError); ok { + listfunc = golistDriverFallback + return listfunc(cfg, words...) + } + listfunc = golistDriverCurrent + return response, err + } + + var response *driverResponse + var err error + + // see if we have any patterns to pass through to go list. + if len(restPatterns) > 0 { + response, err = listfunc(cfg, restPatterns...) + if err != nil { + return nil, err + } + } else { + response = &driverResponse{} + } + + if len(containFiles) == 0 && len(packagesNamed) == 0 { + return response, nil + } + + seenPkgs := make(map[string]*Package) // for deduplication. different containing queries could produce same packages + for _, pkg := range response.Packages { + seenPkgs[pkg.ID] = pkg + } + addPkg := func(p *Package) { + if _, ok := seenPkgs[p.ID]; ok { + return + } + seenPkgs[p.ID] = p + response.Packages = append(response.Packages, p) + } + + containsResults, err := runContainsQueries(cfg, listfunc, addPkg, containFiles) + if err != nil { + return nil, err + } + response.Roots = append(response.Roots, containsResults...) + + namedResults, err := runNamedQueries(cfg, listfunc, addPkg, packagesNamed) + if err != nil { + return nil, err + } + response.Roots = append(response.Roots, namedResults...) + + return response, nil +} + +func runContainsQueries(cfg *Config, driver driver, addPkg func(*Package), queries []string) ([]string, error) { + var results []string + for _, query := range queries { + // TODO(matloob): Do only one query per directory. + fdir := filepath.Dir(query) + cfg.Dir = fdir + dirResponse, err := driver(cfg, ".") + if err != nil { + return nil, err + } + isRoot := make(map[string]bool, len(dirResponse.Roots)) + for _, root := range dirResponse.Roots { + isRoot[root] = true + } + for _, pkg := range dirResponse.Packages { + // Add any new packages to the main set + // We don't bother to filter packages that will be dropped by the changes of roots, + // that will happen anyway during graph construction outside this function. + // Over-reporting packages is not a problem. + addPkg(pkg) + // if the package was not a root one, it cannot have the file + if !isRoot[pkg.ID] { + continue + } + for _, pkgFile := range pkg.GoFiles { + if filepath.Base(query) == filepath.Base(pkgFile) { + results = append(results, pkg.ID) + break + } + } + } + } + return results, nil +} + +// modCacheRegexp splits a path in a module cache into module, module version, and package. +var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) + +func runNamedQueries(cfg *Config, driver driver, addPkg func(*Package), queries []string) ([]string, error) { + // Determine which directories are relevant to scan. + roots, modulesEnabled, err := roots(cfg) + if err != nil { + return nil, err + } + + // Scan the selected directories. Simple matches, from GOPATH/GOROOT + // or the local module, can simply be "go list"ed. Matches from the + // module cache need special treatment. + var matchesMu sync.Mutex + var simpleMatches, modCacheMatches []string + add := func(root gopathwalk.Root, dir string) { + // Walk calls this concurrently; protect the result slices. + matchesMu.Lock() + defer matchesMu.Unlock() + + path := dir[len(root.Path)+1:] + if pathMatchesQueries(path, queries) { + switch root.Type { + case gopathwalk.RootModuleCache: + modCacheMatches = append(modCacheMatches, path) + case gopathwalk.RootCurrentModule: + // We'd need to read go.mod to find the full + // import path. Relative's easier. + rel, err := filepath.Rel(cfg.Dir, dir) + if err != nil { + // This ought to be impossible, since + // we found dir in the current module. + panic(err) + } + simpleMatches = append(simpleMatches, "./"+rel) + case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT: + simpleMatches = append(simpleMatches, path) + } + } + } + gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modulesEnabled}) + + var results []string + addResponse := func(r *driverResponse) { + for _, pkg := range r.Packages { + addPkg(pkg) + for _, name := range queries { + if pkg.Name == name { + results = append(results, pkg.ID) + break + } + } + } + } + + if len(simpleMatches) != 0 { + resp, err := driver(cfg, simpleMatches...) + if err != nil { + return nil, err + } + addResponse(resp) + } + + // Module cache matches are tricky. We want to avoid downloading new + // versions of things, so we need to use the ones present in the cache. + // go list doesn't accept version specifiers, so we have to write out a + // temporary module, and do the list in that module. + if len(modCacheMatches) != 0 { + // Collect all the matches, deduplicating by major version + // and preferring the newest. + type modInfo struct { + mod string + major string + } + mods := make(map[modInfo]string) + var imports []string + for _, modPath := range modCacheMatches { + matches := modCacheRegexp.FindStringSubmatch(modPath) + mod, ver := filepath.ToSlash(matches[1]), matches[2] + importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3])) + + major := semver.Major(ver) + if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 { + mods[modInfo{mod, major}] = ver + } + + imports = append(imports, importPath) + } + + // Build the temporary module. + var gomod bytes.Buffer + gomod.WriteString("module modquery\nrequire (\n") + for mod, version := range mods { + gomod.WriteString("\t" + mod.mod + " " + version + "\n") + } + gomod.WriteString(")\n") + + tmpCfg := *cfg + var err error + tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery") + if err != nil { + return nil, err + } + defer os.RemoveAll(tmpCfg.Dir) + + if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil { + return nil, fmt.Errorf("writing go.mod for module cache query: %v", err) + } + + // Run the query, using the import paths calculated from the matches above. + resp, err := driver(&tmpCfg, imports...) + if err != nil { + return nil, fmt.Errorf("querying module cache matches: %v", err) + } + addResponse(resp) + } + + return results, nil +} + +// roots selects the appropriate paths to walk based on the passed-in configuration, +// particularly the environment and the presence of a go.mod in cfg.Dir's parents. +func roots(cfg *Config) ([]gopathwalk.Root, bool, error) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := exec.CommandContext(cfg.Context, "go", "env", "GOROOT", "GOPATH", "GOMOD") + cmd.Stdout = stdout + cmd.Stderr = stderr + cmd.Dir = cfg.Dir + cmd.Env = cfg.Env + if err := cmd.Run(); err != nil { + return nil, false, fmt.Errorf("running go env: %v (stderr: %q)", err, stderr.Bytes()) + } + + fields := strings.Split(string(stdout.Bytes()), "\n") + if len(fields) != 4 || len(fields[3]) != 0 { + return nil, false, fmt.Errorf("go env returned unexpected output: %q (stderr: %q)", stdout.Bytes(), stderr.Bytes()) + } + goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2] + modsEnabled := gomod != "" + + var roots []gopathwalk.Root + // Always add GOROOT. + roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT}) + // If modules are enabled, scan the module dir. + if modsEnabled { + roots = append(roots, gopathwalk.Root{filepath.Dir(gomod), gopathwalk.RootCurrentModule}) + } + // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode. + for _, p := range gopath { + if modsEnabled { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache}) + } else { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH}) + } + } + + return roots, modsEnabled, nil +} + +// These functions were copied from goimports. See further documentation there. + +// pathMatchesQueries is adapted from pkgIsCandidate. +// TODO: is it reasonable to do Contains here, rather than an exact match on a path component? +func pathMatchesQueries(path string, queries []string) bool { + lastTwo := lastTwoComponents(path) + for _, query := range queries { + if strings.Contains(lastTwo, query) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, query) { + return true + } + } + } + return false +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// Fields must match go list; +// see $GOROOT/src/cmd/go/internal/load/pkg.go. +type jsonPackage struct { + ImportPath string + Dir string + Name string + Export string + GoFiles []string + CompiledGoFiles []string + CFiles []string + CgoFiles []string + CXXFiles []string + MFiles []string + HFiles []string + FFiles []string + SFiles []string + SwigFiles []string + SwigCXXFiles []string + SysoFiles []string + Imports []string + ImportMap map[string]string + Deps []string + TestGoFiles []string + TestImports []string + XTestGoFiles []string + XTestImports []string + ForTest string // q in a "p [q.test]" package, else "" + DepOnly bool + + Error *jsonPackageError +} + +type jsonPackageError struct { + ImportStack []string + Pos string + Err string +} + +func otherFiles(p *jsonPackage) [][]string { + return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} +} + +// golistDriverCurrent uses the "go list" command to expand the +// pattern words and return metadata for the specified packages. +// dir may be "" and env may be nil, as per os/exec.Command. +func golistDriverCurrent(cfg *Config, words ...string) (*driverResponse, error) { + // go list uses the following identifiers in ImportPath and Imports: + // + // "p" -- importable package or main (command) + // "q.test" -- q's test executable + // "p [q.test]" -- variant of p as built for q's test executable + // "q_test [q.test]" -- q's external test package + // + // The packages p that are built differently for a test q.test + // are q itself, plus any helpers used by the external test q_test, + // typically including "testing" and all its dependencies. + + // Run "go list" for complete + // information on the specified packages. + buf, err := golist(cfg, golistargs(cfg, words)) + if err != nil { + return nil, err + } + // Decode the JSON and convert it to Package form. + var response driverResponse + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, fmt.Errorf("JSON decoding failed: %v", err) + } + + if p.ImportPath == "" { + // The documentation for go list says that “[e]rroneous packages will have + // a non-empty ImportPath”. If for some reason it comes back empty, we + // prefer to error out rather than silently discarding data or handing + // back a package without any way to refer to it. + if p.Error != nil { + return nil, Error{ + Pos: p.Error.Pos, + Msg: p.Error.Err, + } + } + return nil, fmt.Errorf("package missing import path: %+v", p) + } + + pkg := &Package{ + Name: p.Name, + ID: p.ImportPath, + GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), + CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), + OtherFiles: absJoin(p.Dir, otherFiles(p)...), + } + + // Extract the PkgPath from the package's ID. + if i := strings.IndexByte(pkg.ID, ' '); i >= 0 { + pkg.PkgPath = pkg.ID[:i] + } else { + pkg.PkgPath = pkg.ID + } + + if pkg.PkgPath == "unsafe" { + pkg.GoFiles = nil // ignore fake unsafe.go file + } + + // Assume go list emits only absolute paths for Dir. + if p.Dir != "" && !filepath.IsAbs(p.Dir) { + log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir) + } + + if p.Export != "" && !filepath.IsAbs(p.Export) { + pkg.ExportFile = filepath.Join(p.Dir, p.Export) + } else { + pkg.ExportFile = p.Export + } + + // imports + // + // Imports contains the IDs of all imported packages. + // ImportsMap records (path, ID) only where they differ. + ids := make(map[string]bool) + for _, id := range p.Imports { + ids[id] = true + } + pkg.Imports = make(map[string]*Package) + for path, id := range p.ImportMap { + pkg.Imports[path] = &Package{ID: id} // non-identity import + delete(ids, id) + } + for id := range ids { + if id == "C" { + continue + } + + pkg.Imports[id] = &Package{ID: id} // identity import + } + if !p.DepOnly { + response.Roots = append(response.Roots, pkg.ID) + } + + // TODO(matloob): Temporary hack since CompiledGoFiles isn't always set. + if len(pkg.CompiledGoFiles) == 0 { + pkg.CompiledGoFiles = pkg.GoFiles + } + + if p.Error != nil { + pkg.Errors = append(pkg.Errors, Error{ + Pos: p.Error.Pos, + Msg: p.Error.Err, + }) + } + + response.Packages = append(response.Packages, pkg) + } + + return &response, nil +} + +// absJoin absolutizes and flattens the lists of files. +func absJoin(dir string, fileses ...[]string) (res []string) { + for _, files := range fileses { + for _, file := range files { + if !filepath.IsAbs(file) { + file = filepath.Join(dir, file) + } + res = append(res, file) + } + } + return res +} + +func golistargs(cfg *Config, words []string) []string { + fullargs := []string{ + "list", "-e", "-json", "-compiled", + fmt.Sprintf("-test=%t", cfg.Tests), + fmt.Sprintf("-export=%t", usesExportData(cfg)), + fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports), + } + fullargs = append(fullargs, cfg.BuildFlags...) + fullargs = append(fullargs, "--") + fullargs = append(fullargs, words...) + return fullargs +} + +// golist returns the JSON-encoded result of a "go list args..." query. +func golist(cfg *Config, args []string) (*bytes.Buffer, error) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := exec.CommandContext(cfg.Context, "go", args...) + cmd.Env = cfg.Env + cmd.Dir = cfg.Dir + cmd.Stdout = stdout + cmd.Stderr = stderr + if err := cmd.Run(); err != nil { + exitErr, ok := err.(*exec.ExitError) + if !ok { + // Catastrophic error: + // - executable not found + // - context cancellation + return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err) + } + + // Old go list? + if strings.Contains(fmt.Sprint(cmd.Stderr), "flag provided but not defined") { + return nil, goTooOldError{fmt.Errorf("unsupported version of go list: %s: %s", exitErr, cmd.Stderr)} + } + + // Export mode entails a build. + // If that build fails, errors appear on stderr + // (despite the -e flag) and the Export field is blank. + // Do not fail in that case. + if !usesExportData(cfg) { + return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, cmd.Stderr) + } + } + + // As of writing, go list -export prints some non-fatal compilation + // errors to stderr, even with -e set. We would prefer that it put + // them in the Package.Error JSON (see http://golang.org/issue/26319). + // In the meantime, there's nowhere good to put them, but they can + // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS + // is set. + if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" { + fmt.Fprintf(os.Stderr, "go %v stderr: <<\n%s\n>>\n", args, stderr) + } + + // debugging + if false { + fmt.Fprintln(os.Stderr, stdout) + } + + return stdout, nil +} diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback.go b/vendor/golang.org/x/tools/go/packages/golist_fallback.go new file mode 100644 index 000000000000..79b120711e89 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist_fallback.go @@ -0,0 +1,457 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "encoding/json" + "fmt" + "go/build" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + + "golang.org/x/tools/go/internal/cgo" +) + +// TODO(matloob): Delete this file once Go 1.12 is released. + +// This file provides backwards compatibility support for +// loading for versions of Go earlier than 1.10.4. This support is meant to +// assist with migration to the Package API until there's +// widespread adoption of these newer Go versions. +// This support will be removed once Go 1.12 is released +// in Q1 2019. + +func golistDriverFallback(cfg *Config, words ...string) (*driverResponse, error) { + // Turn absolute paths into GOROOT and GOPATH-relative paths to provide to go list. + // This will have surprising behavior if GOROOT or GOPATH contain multiple packages with the same + // path and a user provides an absolute path to a directory that's shadowed by an earlier + // directory in GOROOT or GOPATH with the same package path. + words = cleanAbsPaths(cfg, words) + + original, deps, err := getDeps(cfg, words...) + if err != nil { + return nil, err + } + + var tmpdir string // used for generated cgo files + var needsTestVariant []struct { + pkg, xtestPkg *Package + } + + var response driverResponse + allPkgs := make(map[string]bool) + addPackage := func(p *jsonPackage) { + id := p.ImportPath + + if allPkgs[id] { + return + } + allPkgs[id] = true + + isRoot := original[id] != nil + pkgpath := id + + if pkgpath == "unsafe" { + p.GoFiles = nil // ignore fake unsafe.go file + } + + importMap := func(importlist []string) map[string]*Package { + importMap := make(map[string]*Package) + for _, id := range importlist { + + if id == "C" { + for _, path := range []string{"unsafe", "syscall", "runtime/cgo"} { + if pkgpath != path && importMap[path] == nil { + importMap[path] = &Package{ID: path} + } + } + continue + } + importMap[vendorlessPath(id)] = &Package{ID: id} + } + return importMap + } + compiledGoFiles := absJoin(p.Dir, p.GoFiles) + // Use a function to simplify control flow. It's just a bunch of gotos. + var cgoErrors []error + var outdir string + getOutdir := func() (string, error) { + if outdir != "" { + return outdir, nil + } + if tmpdir == "" { + if tmpdir, err = ioutil.TempDir("", "gopackages"); err != nil { + return "", err + } + } + // Add a "go-build" component to the path to make the tests think the files are in the cache. + // This allows the same test to test the pre- and post-Go 1.11 go list logic because the Go 1.11 + // go list generates test mains in the cache, and the test code knows not to rely on paths in the + // cache to stay stable. + outdir = filepath.Join(tmpdir, "go-build", strings.Replace(p.ImportPath, "/", "_", -1)) + if err := os.MkdirAll(outdir, 0755); err != nil { + outdir = "" + return "", err + } + return outdir, nil + } + processCgo := func() bool { + // Suppress any cgo errors. Any relevant errors will show up in typechecking. + // TODO(matloob): Skip running cgo if Mode < LoadTypes. + outdir, err := getOutdir() + if err != nil { + cgoErrors = append(cgoErrors, err) + return false + } + files, _, err := runCgo(p.Dir, outdir, cfg.Env) + if err != nil { + cgoErrors = append(cgoErrors, err) + return false + } + compiledGoFiles = append(compiledGoFiles, files...) + return true + } + if len(p.CgoFiles) == 0 || !processCgo() { + compiledGoFiles = append(compiledGoFiles, absJoin(p.Dir, p.CgoFiles)...) // Punt to typechecker. + } + if isRoot { + response.Roots = append(response.Roots, id) + } + pkg := &Package{ + ID: id, + Name: p.Name, + GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), + CompiledGoFiles: compiledGoFiles, + OtherFiles: absJoin(p.Dir, otherFiles(p)...), + PkgPath: pkgpath, + Imports: importMap(p.Imports), + // TODO(matloob): set errors on the Package to cgoErrors + } + if p.Error != nil { + pkg.Errors = append(pkg.Errors, Error{ + Pos: p.Error.Pos, + Msg: p.Error.Err, + }) + } + response.Packages = append(response.Packages, pkg) + if cfg.Tests && isRoot { + testID := fmt.Sprintf("%s [%s.test]", id, id) + if len(p.TestGoFiles) > 0 || len(p.XTestGoFiles) > 0 { + response.Roots = append(response.Roots, testID) + testPkg := &Package{ + ID: testID, + Name: p.Name, + GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles, p.TestGoFiles), + CompiledGoFiles: append(compiledGoFiles, absJoin(p.Dir, p.TestGoFiles)...), + OtherFiles: absJoin(p.Dir, otherFiles(p)...), + PkgPath: pkgpath, + Imports: importMap(append(p.Imports, p.TestImports...)), + // TODO(matloob): set errors on the Package to cgoErrors + } + response.Packages = append(response.Packages, testPkg) + var xtestPkg *Package + if len(p.XTestGoFiles) > 0 { + xtestID := fmt.Sprintf("%s_test [%s.test]", id, id) + response.Roots = append(response.Roots, xtestID) + // Generate test variants for all packages q where a path exists + // such that xtestPkg -> ... -> q -> ... -> p (where p is the package under test) + // and rewrite all import map entries of p to point to testPkg (the test variant of + // p), and of each q to point to the test variant of that q. + xtestPkg = &Package{ + ID: xtestID, + Name: p.Name + "_test", + GoFiles: absJoin(p.Dir, p.XTestGoFiles), + CompiledGoFiles: absJoin(p.Dir, p.XTestGoFiles), + PkgPath: pkgpath + "_test", + Imports: importMap(p.XTestImports), + } + // Add to list of packages we need to rewrite imports for to refer to test variants. + // We may need to create a test variant of a package that hasn't been loaded yet, so + // the test variants need to be created later. + needsTestVariant = append(needsTestVariant, struct{ pkg, xtestPkg *Package }{pkg, xtestPkg}) + response.Packages = append(response.Packages, xtestPkg) + } + // testmain package + testmainID := id + ".test" + response.Roots = append(response.Roots, testmainID) + imports := map[string]*Package{} + imports[testPkg.PkgPath] = &Package{ID: testPkg.ID} + if xtestPkg != nil { + imports[xtestPkg.PkgPath] = &Package{ID: xtestPkg.ID} + } + testmainPkg := &Package{ + ID: testmainID, + Name: "main", + PkgPath: testmainID, + Imports: imports, + } + response.Packages = append(response.Packages, testmainPkg) + outdir, err := getOutdir() + if err != nil { + testmainPkg.Errors = append(testmainPkg.Errors, Error{ + Pos: "-", + Msg: fmt.Sprintf("failed to generate testmain: %v", err), + Kind: ListError, + }) + return + } + testmain := filepath.Join(outdir, "testmain.go") + extraimports, extradeps, err := generateTestmain(testmain, testPkg, xtestPkg) + if err != nil { + testmainPkg.Errors = append(testmainPkg.Errors, Error{ + Pos: "-", + Msg: fmt.Sprintf("failed to generate testmain: %v", err), + Kind: ListError, + }) + } + deps = append(deps, extradeps...) + for _, imp := range extraimports { // testing, testing/internal/testdeps, and maybe os + imports[imp] = &Package{ID: imp} + } + testmainPkg.GoFiles = []string{testmain} + testmainPkg.CompiledGoFiles = []string{testmain} + } + } + } + + for _, pkg := range original { + addPackage(pkg) + } + if cfg.Mode < LoadImports || len(deps) == 0 { + return &response, nil + } + + buf, err := golist(cfg, golistArgsFallback(cfg, deps)) + if err != nil { + return nil, err + } + + // Decode the JSON and convert it to Package form. + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, fmt.Errorf("JSON decoding failed: %v", err) + } + + addPackage(p) + } + + for _, v := range needsTestVariant { + createTestVariants(&response, v.pkg, v.xtestPkg) + } + + // TODO(matloob): Is this the right ordering? + sort.SliceStable(response.Packages, func(i, j int) bool { + return response.Packages[i].PkgPath < response.Packages[j].PkgPath + }) + + return &response, nil +} + +func createTestVariants(response *driverResponse, pkgUnderTest, xtestPkg *Package) { + allPkgs := make(map[string]*Package) + for _, pkg := range response.Packages { + allPkgs[pkg.ID] = pkg + } + needsTestVariant := make(map[string]bool) + needsTestVariant[pkgUnderTest.ID] = true + var needsVariantRec func(p *Package) bool + needsVariantRec = func(p *Package) bool { + if needsTestVariant[p.ID] { + return true + } + for _, imp := range p.Imports { + if needsVariantRec(allPkgs[imp.ID]) { + // Don't break because we want to make sure all dependencies + // have been processed, and all required test variants of our dependencies + // exist. + needsTestVariant[p.ID] = true + } + } + if !needsTestVariant[p.ID] { + return false + } + // Create a clone of the package. It will share the same strings and lists of source files, + // but that's okay. It's only necessary for the Imports map to have a separate identity. + testVariant := *p + testVariant.ID = fmt.Sprintf("%s [%s.test]", p.ID, pkgUnderTest.ID) + testVariant.Imports = make(map[string]*Package) + for imp, pkg := range p.Imports { + testVariant.Imports[imp] = pkg + if needsTestVariant[pkg.ID] { + testVariant.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)} + } + } + response.Packages = append(response.Packages, &testVariant) + return needsTestVariant[p.ID] + } + // finally, update the xtest package's imports + for imp, pkg := range xtestPkg.Imports { + if allPkgs[pkg.ID] == nil { + fmt.Printf("for %s: package %s doesn't exist\n", xtestPkg.ID, pkg.ID) + } + if needsVariantRec(allPkgs[pkg.ID]) { + xtestPkg.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)} + } + } +} + +// cleanAbsPaths replaces all absolute paths with GOPATH- and GOROOT-relative +// paths. If an absolute path is not GOPATH- or GOROOT- relative, it is left as an +// absolute path so an error can be returned later. +func cleanAbsPaths(cfg *Config, words []string) []string { + var searchpaths []string + var cleaned = make([]string, len(words)) + for i := range cleaned { + cleaned[i] = words[i] + // Ignore relative directory paths (they must already be goroot-relative) and Go source files + // (absolute source files are already allowed for ad-hoc packages). + // TODO(matloob): Can there be non-.go files in ad-hoc packages. + if !filepath.IsAbs(cleaned[i]) || strings.HasSuffix(cleaned[i], ".go") { + continue + } + // otherwise, it's an absolute path. Search GOPATH and GOROOT to find it. + if searchpaths == nil { + cmd := exec.Command("go", "env", "GOPATH", "GOROOT") + cmd.Env = cfg.Env + out, err := cmd.Output() + if err != nil { + searchpaths = []string{} + continue // suppress the error, it will show up again when running go list + } + lines := strings.Split(string(out), "\n") + if len(lines) != 3 || lines[0] == "" || lines[1] == "" || lines[2] != "" { + continue // suppress error + } + // first line is GOPATH + for _, path := range filepath.SplitList(lines[0]) { + searchpaths = append(searchpaths, filepath.Join(path, "src")) + } + // second line is GOROOT + searchpaths = append(searchpaths, filepath.Join(lines[1], "src")) + } + for _, sp := range searchpaths { + if strings.HasPrefix(cleaned[i], sp) { + cleaned[i] = strings.TrimPrefix(cleaned[i], sp) + cleaned[i] = strings.TrimLeft(cleaned[i], string(filepath.Separator)) + } + } + } + return cleaned +} + +// vendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +// Copied from golang.org/x/tools/imports/fix.go. +func vendorlessPath(ipath string) string { + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):] + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):] + } + return ipath +} + +// getDeps runs an initial go list to determine all the dependency packages. +func getDeps(cfg *Config, words ...string) (originalSet map[string]*jsonPackage, deps []string, err error) { + buf, err := golist(cfg, golistArgsFallback(cfg, words)) + if err != nil { + return nil, nil, err + } + + depsSet := make(map[string]bool) + originalSet = make(map[string]*jsonPackage) + var testImports []string + + // Extract deps from the JSON. + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, nil, fmt.Errorf("JSON decoding failed: %v", err) + } + + originalSet[p.ImportPath] = p + for _, dep := range p.Deps { + depsSet[dep] = true + } + if cfg.Tests { + // collect the additional imports of the test packages. + pkgTestImports := append(p.TestImports, p.XTestImports...) + for _, imp := range pkgTestImports { + if depsSet[imp] { + continue + } + depsSet[imp] = true + testImports = append(testImports, imp) + } + } + } + // Get the deps of the packages imported by tests. + if len(testImports) > 0 { + buf, err = golist(cfg, golistArgsFallback(cfg, testImports)) + if err != nil { + return nil, nil, err + } + // Extract deps from the JSON. + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, nil, fmt.Errorf("JSON decoding failed: %v", err) + } + for _, dep := range p.Deps { + depsSet[dep] = true + } + } + } + + for orig := range originalSet { + delete(depsSet, orig) + } + + deps = make([]string, 0, len(depsSet)) + for dep := range depsSet { + deps = append(deps, dep) + } + sort.Strings(deps) // ensure output is deterministic + return originalSet, deps, nil +} + +func golistArgsFallback(cfg *Config, words []string) []string { + fullargs := []string{"list", "-e", "-json"} + fullargs = append(fullargs, cfg.BuildFlags...) + fullargs = append(fullargs, "--") + fullargs = append(fullargs, words...) + return fullargs +} + +func runCgo(pkgdir, tmpdir string, env []string) (files, displayfiles []string, err error) { + // Use go/build to open cgo files and determine the cgo flags, etc, from them. + // This is tricky so it's best to avoid reimplementing as much as we can, and + // we plan to delete this support once Go 1.12 is released anyways. + // TODO(matloob): This isn't completely correct because we're using the Default + // context. Perhaps we should more accurately fill in the context. + bp, err := build.ImportDir(pkgdir, build.ImportMode(0)) + if err != nil { + return nil, nil, err + } + for _, ev := range env { + if v := strings.TrimPrefix(ev, "CGO_CPPFLAGS"); v != ev { + bp.CgoCPPFLAGS = append(bp.CgoCPPFLAGS, strings.Fields(v)...) + } else if v := strings.TrimPrefix(ev, "CGO_CFLAGS"); v != ev { + bp.CgoCFLAGS = append(bp.CgoCFLAGS, strings.Fields(v)...) + } else if v := strings.TrimPrefix(ev, "CGO_CXXFLAGS"); v != ev { + bp.CgoCXXFLAGS = append(bp.CgoCXXFLAGS, strings.Fields(v)...) + } else if v := strings.TrimPrefix(ev, "CGO_LDFLAGS"); v != ev { + bp.CgoLDFLAGS = append(bp.CgoLDFLAGS, strings.Fields(v)...) + } + } + return cgo.Run(bp, pkgdir, tmpdir, true) +} diff --git a/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go new file mode 100644 index 000000000000..128e00e25aa0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist_fallback_testmain.go @@ -0,0 +1,318 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is largely based on the Go 1.10-era cmd/go/internal/test/test.go +// testmain generation code. + +package packages + +import ( + "errors" + "fmt" + "go/ast" + "go/doc" + "go/parser" + "go/token" + "os" + "sort" + "strings" + "text/template" + "unicode" + "unicode/utf8" +) + +// TODO(matloob): Delete this file once Go 1.12 is released. + +// This file complements golist_fallback.go by providing +// support for generating testmains. + +func generateTestmain(out string, testPkg, xtestPkg *Package) (extraimports, extradeps []string, err error) { + testFuncs, err := loadTestFuncs(testPkg, xtestPkg) + if err != nil { + return nil, nil, err + } + extraimports = []string{"testing", "testing/internal/testdeps"} + if testFuncs.TestMain == nil { + extraimports = append(extraimports, "os") + } + // Transitive dependencies of ("testing", "testing/internal/testdeps"). + // os is part of the transitive closure so it and its transitive dependencies are + // included regardless of whether it's imported in the template below. + extradeps = []string{ + "errors", + "internal/cpu", + "unsafe", + "internal/bytealg", + "internal/race", + "runtime/internal/atomic", + "runtime/internal/sys", + "runtime", + "sync/atomic", + "sync", + "io", + "unicode", + "unicode/utf8", + "bytes", + "math", + "syscall", + "time", + "internal/poll", + "internal/syscall/unix", + "internal/testlog", + "os", + "math/bits", + "strconv", + "reflect", + "fmt", + "sort", + "strings", + "flag", + "runtime/debug", + "context", + "runtime/trace", + "testing", + "bufio", + "regexp/syntax", + "regexp", + "compress/flate", + "encoding/binary", + "hash", + "hash/crc32", + "compress/gzip", + "path/filepath", + "io/ioutil", + "text/tabwriter", + "runtime/pprof", + "testing/internal/testdeps", + } + return extraimports, extradeps, writeTestmain(out, testFuncs) +} + +// The following is adapted from the cmd/go testmain generation code. + +// isTestFunc tells whether fn has the type of a testing function. arg +// specifies the parameter type we look for: B, M or T. +func isTestFunc(fn *ast.FuncDecl, arg string) bool { + if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 || + fn.Type.Params.List == nil || + len(fn.Type.Params.List) != 1 || + len(fn.Type.Params.List[0].Names) > 1 { + return false + } + ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr) + if !ok { + return false + } + // We can't easily check that the type is *testing.M + // because we don't know how testing has been imported, + // but at least check that it's *M or *something.M. + // Same applies for B and T. + if name, ok := ptr.X.(*ast.Ident); ok && name.Name == arg { + return true + } + if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == arg { + return true + } + return false +} + +// isTest tells whether name looks like a test (or benchmark, according to prefix). +// It is a Test (say) if there is a character after Test that is not a lower-case letter. +// We don't want TesticularCancer. +func isTest(name, prefix string) bool { + if !strings.HasPrefix(name, prefix) { + return false + } + if len(name) == len(prefix) { // "Test" is ok + return true + } + rune, _ := utf8.DecodeRuneInString(name[len(prefix):]) + return !unicode.IsLower(rune) +} + +// loadTestFuncs returns the testFuncs describing the tests that will be run. +func loadTestFuncs(ptest, pxtest *Package) (*testFuncs, error) { + t := &testFuncs{ + TestPackage: ptest, + XTestPackage: pxtest, + } + for _, file := range ptest.GoFiles { + if !strings.HasSuffix(file, "_test.go") { + continue + } + if err := t.load(file, "_test", &t.ImportTest, &t.NeedTest); err != nil { + return nil, err + } + } + if pxtest != nil { + for _, file := range pxtest.GoFiles { + if err := t.load(file, "_xtest", &t.ImportXtest, &t.NeedXtest); err != nil { + return nil, err + } + } + } + return t, nil +} + +// writeTestmain writes the _testmain.go file for t to the file named out. +func writeTestmain(out string, t *testFuncs) error { + f, err := os.Create(out) + if err != nil { + return err + } + defer f.Close() + + if err := testmainTmpl.Execute(f, t); err != nil { + return err + } + + return nil +} + +type testFuncs struct { + Tests []testFunc + Benchmarks []testFunc + Examples []testFunc + TestMain *testFunc + TestPackage *Package + XTestPackage *Package + ImportTest bool + NeedTest bool + ImportXtest bool + NeedXtest bool +} + +// Tested returns the name of the package being tested. +func (t *testFuncs) Tested() string { + return t.TestPackage.Name +} + +type testFunc struct { + Package string // imported package name (_test or _xtest) + Name string // function name + Output string // output, for examples + Unordered bool // output is allowed to be unordered. +} + +func (t *testFuncs) load(filename, pkg string, doImport, seen *bool) error { + var fset = token.NewFileSet() + + f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) + if err != nil { + return errors.New("failed to parse test file " + filename) + } + for _, d := range f.Decls { + n, ok := d.(*ast.FuncDecl) + if !ok { + continue + } + if n.Recv != nil { + continue + } + name := n.Name.String() + switch { + case name == "TestMain": + if isTestFunc(n, "T") { + t.Tests = append(t.Tests, testFunc{pkg, name, "", false}) + *doImport, *seen = true, true + continue + } + err := checkTestFunc(fset, n, "M") + if err != nil { + return err + } + if t.TestMain != nil { + return errors.New("multiple definitions of TestMain") + } + t.TestMain = &testFunc{pkg, name, "", false} + *doImport, *seen = true, true + case isTest(name, "Test"): + err := checkTestFunc(fset, n, "T") + if err != nil { + return err + } + t.Tests = append(t.Tests, testFunc{pkg, name, "", false}) + *doImport, *seen = true, true + case isTest(name, "Benchmark"): + err := checkTestFunc(fset, n, "B") + if err != nil { + return err + } + t.Benchmarks = append(t.Benchmarks, testFunc{pkg, name, "", false}) + *doImport, *seen = true, true + } + } + ex := doc.Examples(f) + sort.Slice(ex, func(i, j int) bool { return ex[i].Order < ex[j].Order }) + for _, e := range ex { + *doImport = true // import test file whether executed or not + if e.Output == "" && !e.EmptyOutput { + // Don't run examples with no output. + continue + } + t.Examples = append(t.Examples, testFunc{pkg, "Example" + e.Name, e.Output, e.Unordered}) + *seen = true + } + return nil +} + +func checkTestFunc(fset *token.FileSet, fn *ast.FuncDecl, arg string) error { + if !isTestFunc(fn, arg) { + name := fn.Name.String() + pos := fset.Position(fn.Pos()) + return fmt.Errorf("%s: wrong signature for %s, must be: func %s(%s *testing.%s)", pos, name, name, strings.ToLower(arg), arg) + } + return nil +} + +var testmainTmpl = template.Must(template.New("main").Parse(` +package main + +import ( +{{if not .TestMain}} + "os" +{{end}} + "testing" + "testing/internal/testdeps" + +{{if .ImportTest}} + {{if .NeedTest}}_test{{else}}_{{end}} {{.TestPackage.PkgPath | printf "%q"}} +{{end}} +{{if .ImportXtest}} + {{if .NeedXtest}}_xtest{{else}}_{{end}} {{.XTestPackage.PkgPath | printf "%q"}} +{{end}} +) + +var tests = []testing.InternalTest{ +{{range .Tests}} + {"{{.Name}}", {{.Package}}.{{.Name}}}, +{{end}} +} + +var benchmarks = []testing.InternalBenchmark{ +{{range .Benchmarks}} + {"{{.Name}}", {{.Package}}.{{.Name}}}, +{{end}} +} + +var examples = []testing.InternalExample{ +{{range .Examples}} + {"{{.Name}}", {{.Package}}.{{.Name}}, {{.Output | printf "%q"}}, {{.Unordered}}}, +{{end}} +} + +func init() { + testdeps.ImportPath = {{.TestPackage.PkgPath | printf "%q"}} +} + +func main() { + m := testing.MainStart(testdeps.TestDeps{}, tests, benchmarks, examples) +{{with .TestMain}} + {{.Package}}.{{.Name}}(m) +{{else}} + os.Exit(m.Run()) +{{end}} +} + +`)) diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go new file mode 100644 index 000000000000..84a3dbb10c46 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/packages.go @@ -0,0 +1,935 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +// See doc.go for package documentation and implementation notes. + +import ( + "context" + "encoding/json" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "io/ioutil" + "log" + "os" + "path/filepath" + "runtime" + "strings" + "sync" + + "golang.org/x/tools/go/gcexportdata" +) + +// A LoadMode specifies the amount of detail to return when loading. +// Higher-numbered modes cause Load to return more information, +// but may be slower. Load may return more information than requested. +type LoadMode int + +const ( + // LoadFiles finds the packages and computes their source file lists. + // Package fields: ID, Name, Errors, GoFiles, and OtherFiles. + LoadFiles LoadMode = iota + + // LoadImports adds import information for each package + // and its dependencies. + // Package fields added: Imports. + LoadImports + + // LoadTypes adds type information for package-level + // declarations in the packages matching the patterns. + // Package fields added: Types, Fset, and IllTyped. + // This mode uses type information provided by the build system when + // possible, and may fill in the ExportFile field. + LoadTypes + + // LoadSyntax adds typed syntax trees for the packages matching the patterns. + // Package fields added: Syntax, and TypesInfo, for direct pattern matches only. + LoadSyntax + + // LoadAllSyntax adds typed syntax trees for the packages matching the patterns + // and all dependencies. + // Package fields added: Types, Fset, Illtyped, Syntax, and TypesInfo, + // for all packages in the import graph. + LoadAllSyntax +) + +// An Config specifies details about how packages should be loaded. +// The zero value is a valid configuration. +// Calls to Load do not modify this struct. +type Config struct { + // Mode controls the level of information returned for each package. + Mode LoadMode + + // Context specifies the context for the load operation. + // If the context is cancelled, the loader may stop early + // and return an ErrCancelled error. + // If Context is nil, the load cannot be cancelled. + Context context.Context + + // Dir is the directory in which to run the build system's query tool + // that provides information about the packages. + // If Dir is empty, the tool is run in the current directory. + Dir string + + // Env is the environment to use when invoking the build system's query tool. + // If Env is nil, the current environment is used. + // As in os/exec's Cmd, only the last value in the slice for + // each environment key is used. To specify the setting of only + // a few variables, append to the current environment, as in: + // + // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386") + // + Env []string + + // BuildFlags is a list of command-line flags to be passed through to + // the build system's query tool. + BuildFlags []string + + // Fset provides source position information for syntax trees and types. + // If Fset is nil, the loader will create a new FileSet. + Fset *token.FileSet + + // ParseFile is called to read and parse each file + // when preparing a package's type-checked syntax tree. + // It must be safe to call ParseFile simultaneously from multiple goroutines. + // If ParseFile is nil, the loader will uses parser.ParseFile. + // + // ParseFile should parse the source from src and use filename only for + // recording position information. + // + // An application may supply a custom implementation of ParseFile + // to change the effective file contents or the behavior of the parser, + // or to modify the syntax tree. For example, selectively eliminating + // unwanted function bodies can significantly accelerate type checking. + ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) + + // If Tests is set, the loader includes not just the packages + // matching a particular pattern but also any related test packages, + // including test-only variants of the package and the test executable. + // + // For example, when using the go command, loading "fmt" with Tests=true + // returns four packages, with IDs "fmt" (the standard package), + // "fmt [fmt.test]" (the package as compiled for the test), + // "fmt_test" (the test functions from source files in package fmt_test), + // and "fmt.test" (the test binary). + // + // In build systems with explicit names for tests, + // setting Tests may have no effect. + Tests bool + + // Overlay provides a mapping of absolute file paths to file contents. + // If the file with the given path already exists, the parser will use the + // alternative file contents provided by the map. + // + // The Package.Imports map may not include packages that are imported only + // by the alternative file contents provided by Overlay. This may cause + // type-checking to fail. + Overlay map[string][]byte +} + +// driver is the type for functions that query the build system for the +// packages named by the patterns. +type driver func(cfg *Config, patterns ...string) (*driverResponse, error) + +// driverResponse contains the results for a driver query. +type driverResponse struct { + // Roots is the set of package IDs that make up the root packages. + // We have to encode this separately because when we encode a single package + // we cannot know if it is one of the roots as that requires knowledge of the + // graph it is part of. + Roots []string `json:",omitempty"` + + // Packages is the full set of packages in the graph. + // The packages are not connected into a graph. + // The Imports if populated will be stubs that only have their ID set. + // Imports will be connected and then type and syntax information added in a + // later pass (see refine). + Packages []*Package +} + +// Load loads and returns the Go packages named by the given patterns. +// +// Config specifies loading options; +// nil behaves the same as an empty Config. +// +// Load returns an error if any of the patterns was invalid +// as defined by the underlying build system. +// It may return an empty list of packages without an error, +// for instance for an empty expansion of a valid wildcard. +// Errors associated with a particular package are recorded in the +// corresponding Package's Errors list, and do not cause Load to +// return an error. Clients may need to handle such errors before +// proceeding with further analysis. The PrintErrors function is +// provided for convenient display of all errors. +func Load(cfg *Config, patterns ...string) ([]*Package, error) { + l := newLoader(cfg) + response, err := defaultDriver(&l.Config, patterns...) + if err != nil { + return nil, err + } + return l.refine(response.Roots, response.Packages...) +} + +// defaultDriver is a driver that looks for an external driver binary, and if +// it does not find it falls back to the built in go list driver. +func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + driver := findExternalDriver(cfg) + if driver == nil { + driver = goListDriver + } + return driver(cfg, patterns...) +} + +// A Package describes a loaded Go package. +type Package struct { + // ID is a unique identifier for a package, + // in a syntax provided by the underlying build system. + // + // Because the syntax varies based on the build system, + // clients should treat IDs as opaque and not attempt to + // interpret them. + ID string + + // Name is the package name as it appears in the package source code. + Name string + + // PkgPath is the package path as used by the go/types package. + PkgPath string + + // Errors contains any errors encountered querying the metadata + // of the package, or while parsing or type-checking its files. + Errors []Error + + // GoFiles lists the absolute file paths of the package's Go source files. + GoFiles []string + + // CompiledGoFiles lists the absolute file paths of the package's source + // files that were presented to the compiler. + // This may differ from GoFiles if files are processed before compilation. + CompiledGoFiles []string + + // OtherFiles lists the absolute file paths of the package's non-Go source files, + // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on. + OtherFiles []string + + // ExportFile is the absolute path to a file containing type + // information for the package as provided by the build system. + ExportFile string + + // Imports maps import paths appearing in the package's Go source files + // to corresponding loaded Packages. + Imports map[string]*Package + + // Types provides type information for the package. + // Modes LoadTypes and above set this field for packages matching the + // patterns; type information for dependencies may be missing or incomplete. + // Mode LoadAllSyntax sets this field for all packages, including dependencies. + Types *types.Package + + // Fset provides position information for Types, TypesInfo, and Syntax. + // It is set only when Types is set. + Fset *token.FileSet + + // IllTyped indicates whether the package or any dependency contains errors. + // It is set only when Types is set. + IllTyped bool + + // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles. + // + // Mode LoadSyntax sets this field for packages matching the patterns. + // Mode LoadAllSyntax sets this field for all packages, including dependencies. + Syntax []*ast.File + + // TypesInfo provides type information about the package's syntax trees. + // It is set only when Syntax is set. + TypesInfo *types.Info +} + +// An Error describes a problem with a package's metadata, syntax, or types. +type Error struct { + Pos string // "file:line:col" or "file:line" or "" or "-" + Msg string + Kind ErrorKind +} + +// ErrorKind describes the source of the error, allowing the user to +// differentiate between errors generated by the driver, the parser, or the +// type-checker. +type ErrorKind int + +const ( + UnknownError ErrorKind = iota + ListError + ParseError + TypeError +) + +func (err Error) Error() string { + pos := err.Pos + if pos == "" { + pos = "-" // like token.Position{}.String() + } + return pos + ": " + err.Msg +} + +// flatPackage is the JSON form of Package +// It drops all the type and syntax fields, and transforms the Imports +// +// TODO(adonovan): identify this struct with Package, effectively +// publishing the JSON protocol. +type flatPackage struct { + ID string + Name string `json:",omitempty"` + PkgPath string `json:",omitempty"` + Errors []Error `json:",omitempty"` + GoFiles []string `json:",omitempty"` + CompiledGoFiles []string `json:",omitempty"` + OtherFiles []string `json:",omitempty"` + ExportFile string `json:",omitempty"` + Imports map[string]string `json:",omitempty"` +} + +// MarshalJSON returns the Package in its JSON form. +// For the most part, the structure fields are written out unmodified, and +// the type and syntax fields are skipped. +// The imports are written out as just a map of path to package id. +// The errors are written using a custom type that tries to preserve the +// structure of error types we know about. +// +// This method exists to enable support for additional build systems. It is +// not intended for use by clients of the API and we may change the format. +func (p *Package) MarshalJSON() ([]byte, error) { + flat := &flatPackage{ + ID: p.ID, + Name: p.Name, + PkgPath: p.PkgPath, + Errors: p.Errors, + GoFiles: p.GoFiles, + CompiledGoFiles: p.CompiledGoFiles, + OtherFiles: p.OtherFiles, + ExportFile: p.ExportFile, + } + if len(p.Imports) > 0 { + flat.Imports = make(map[string]string, len(p.Imports)) + for path, ipkg := range p.Imports { + flat.Imports[path] = ipkg.ID + } + } + return json.Marshal(flat) +} + +// UnmarshalJSON reads in a Package from its JSON format. +// See MarshalJSON for details about the format accepted. +func (p *Package) UnmarshalJSON(b []byte) error { + flat := &flatPackage{} + if err := json.Unmarshal(b, &flat); err != nil { + return err + } + *p = Package{ + ID: flat.ID, + Name: flat.Name, + PkgPath: flat.PkgPath, + Errors: flat.Errors, + GoFiles: flat.GoFiles, + CompiledGoFiles: flat.CompiledGoFiles, + OtherFiles: flat.OtherFiles, + ExportFile: flat.ExportFile, + } + if len(flat.Imports) > 0 { + p.Imports = make(map[string]*Package, len(flat.Imports)) + for path, id := range flat.Imports { + p.Imports[path] = &Package{ID: id} + } + } + return nil +} + +func (p *Package) String() string { return p.ID } + +// loaderPackage augments Package with state used during the loading phase +type loaderPackage struct { + *Package + importErrors map[string]error // maps each bad import to its error + loadOnce sync.Once + color uint8 // for cycle detection + needsrc bool // load from source (Mode >= LoadTypes) + needtypes bool // type information is either requested or depended on + initial bool // package was matched by a pattern +} + +// loader holds the working state of a single call to load. +type loader struct { + pkgs map[string]*loaderPackage + Config + exportMu sync.Mutex // enforces mutual exclusion of exportdata operations +} + +func newLoader(cfg *Config) *loader { + ld := &loader{} + if cfg != nil { + ld.Config = *cfg + } + if ld.Config.Env == nil { + ld.Config.Env = os.Environ() + } + if ld.Context == nil { + ld.Context = context.Background() + } + if ld.Dir == "" { + if dir, err := os.Getwd(); err == nil { + ld.Dir = dir + } + } + + if ld.Mode >= LoadTypes { + if ld.Fset == nil { + ld.Fset = token.NewFileSet() + } + + // ParseFile is required even in LoadTypes mode + // because we load source if export data is missing. + if ld.ParseFile == nil { + ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { + var isrc interface{} + if src != nil { + isrc = src + } + const mode = parser.AllErrors | parser.ParseComments + return parser.ParseFile(fset, filename, isrc, mode) + } + } + } + return ld +} + +// refine connects the supplied packages into a graph and then adds type and +// and syntax information as requested by the LoadMode. +func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) { + isRoot := make(map[string]bool, len(roots)) + for _, root := range roots { + isRoot[root] = true + } + ld.pkgs = make(map[string]*loaderPackage) + // first pass, fixup and build the map and roots + var initial []*loaderPackage + for _, pkg := range list { + lpkg := &loaderPackage{ + Package: pkg, + needtypes: ld.Mode >= LoadAllSyntax || + ld.Mode >= LoadTypes && isRoot[pkg.ID], + needsrc: ld.Mode >= LoadAllSyntax || + ld.Mode >= LoadSyntax && isRoot[pkg.ID] || + pkg.ExportFile == "" && pkg.PkgPath != "unsafe", + } + ld.pkgs[lpkg.ID] = lpkg + if isRoot[lpkg.ID] { + initial = append(initial, lpkg) + lpkg.initial = true + } + } + + // Materialize the import graph. + + const ( + white = 0 // new + grey = 1 // in progress + black = 2 // complete + ) + + // visit traverses the import graph, depth-first, + // and materializes the graph as Packages.Imports. + // + // Valid imports are saved in the Packages.Import map. + // Invalid imports (cycles and missing nodes) are saved in the importErrors map. + // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG. + // + // visit returns whether the package needs src or has a transitive + // dependency on a package that does. These are the only packages + // for which we load source code. + var stack []*loaderPackage + var visit func(lpkg *loaderPackage) bool + var srcPkgs []*loaderPackage + visit = func(lpkg *loaderPackage) bool { + switch lpkg.color { + case black: + return lpkg.needsrc + case grey: + panic("internal error: grey node") + } + lpkg.color = grey + stack = append(stack, lpkg) // push + stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports + lpkg.Imports = make(map[string]*Package, len(stubs)) + for importPath, ipkg := range stubs { + var importErr error + imp := ld.pkgs[ipkg.ID] + if imp == nil { + // (includes package "C" when DisableCgo) + importErr = fmt.Errorf("missing package: %q", ipkg.ID) + } else if imp.color == grey { + importErr = fmt.Errorf("import cycle: %s", stack) + } + if importErr != nil { + if lpkg.importErrors == nil { + lpkg.importErrors = make(map[string]error) + } + lpkg.importErrors[importPath] = importErr + continue + } + + if visit(imp) { + lpkg.needsrc = true + } + lpkg.Imports[importPath] = imp.Package + } + if lpkg.needsrc { + srcPkgs = append(srcPkgs, lpkg) + } + stack = stack[:len(stack)-1] // pop + lpkg.color = black + + return lpkg.needsrc + } + + if ld.Mode < LoadImports { + //we do this to drop the stub import packages that we are not even going to try to resolve + for _, lpkg := range initial { + lpkg.Imports = nil + } + } else { + // For each initial package, create its import DAG. + for _, lpkg := range initial { + visit(lpkg) + } + } + for _, lpkg := range srcPkgs { + // Complete type information is required for the + // immediate dependencies of each source package. + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + imp.needtypes = true + } + } + // Load type data if needed, starting at + // the initial packages (roots of the import DAG). + if ld.Mode >= LoadTypes { + var wg sync.WaitGroup + for _, lpkg := range initial { + wg.Add(1) + go func(lpkg *loaderPackage) { + ld.loadRecursive(lpkg) + wg.Done() + }(lpkg) + } + wg.Wait() + } + + result := make([]*Package, len(initial)) + for i, lpkg := range initial { + result[i] = lpkg.Package + } + return result, nil +} + +// loadRecursive loads the specified package and its dependencies, +// recursively, in parallel, in topological order. +// It is atomic and idempotent. +// Precondition: ld.Mode >= LoadTypes. +func (ld *loader) loadRecursive(lpkg *loaderPackage) { + lpkg.loadOnce.Do(func() { + // Load the direct dependencies, in parallel. + var wg sync.WaitGroup + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + wg.Add(1) + go func(imp *loaderPackage) { + ld.loadRecursive(imp) + wg.Done() + }(imp) + } + wg.Wait() + + ld.loadPackage(lpkg) + }) +} + +// loadPackage loads the specified package. +// It must be called only once per Package, +// after immediate dependencies are loaded. +// Precondition: ld.Mode >= LoadTypes. +func (ld *loader) loadPackage(lpkg *loaderPackage) { + if lpkg.PkgPath == "unsafe" { + // Fill in the blanks to avoid surprises. + lpkg.Types = types.Unsafe + lpkg.Fset = ld.Fset + lpkg.Syntax = []*ast.File{} + lpkg.TypesInfo = new(types.Info) + return + } + + // Call NewPackage directly with explicit name. + // This avoids skew between golist and go/types when the files' + // package declarations are inconsistent. + lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name) + lpkg.Fset = ld.Fset + + // Subtle: we populate all Types fields with an empty Package + // before loading export data so that export data processing + // never has to create a types.Package for an indirect dependency, + // which would then require that such created packages be explicitly + // inserted back into the Import graph as a final step after export data loading. + // The Diamond test exercises this case. + if !lpkg.needtypes { + return + } + if !lpkg.needsrc { + ld.loadFromExportData(lpkg) + return // not a source package, don't get syntax trees + } + + appendError := func(err error) { + // Convert various error types into the one true Error. + var errs []Error + switch err := err.(type) { + case Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, Error{ + Pos: "-", + Msg: err.Error(), + Kind: UnknownError, + }) + + // If you see this error message, please file a bug. + log.Printf("internal error: error %q (%T) without position", err, err) + } + + lpkg.Errors = append(lpkg.Errors, errs...) + } + + files, errs := ld.parseFiles(lpkg.CompiledGoFiles) + for _, err := range errs { + appendError(err) + } + + lpkg.Syntax = files + + lpkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + + importer := importerFunc(func(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + + // The imports map is keyed by import path. + ipkg := lpkg.Imports[path] + if ipkg == nil { + if err := lpkg.importErrors[path]; err != nil { + return nil, err + } + // There was skew between the metadata and the + // import declarations, likely due to an edit + // race, or because the ParseFile feature was + // used to supply alternative file contents. + return nil, fmt.Errorf("no metadata for %s", path) + } + + if ipkg.Types != nil && ipkg.Types.Complete() { + return ipkg.Types, nil + } + log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg) + panic("unreachable") + }) + + // This is only an approximation. + // TODO(adonovan): derive Sizes from the underlying build system. + goarch := runtime.GOARCH + const goarchPrefix = "GOARCH=" + for _, e := range ld.Config.Env { + if strings.HasPrefix(e, goarchPrefix) { + goarch = e[len(goarchPrefix):] + } + } + sizes := types.SizesFor("gc", goarch) + + // type-check + tc := &types.Config{ + Importer: importer, + + // Type-check bodies of functions only in non-initial packages. + // Example: for import graph A->B->C and initial packages {A,C}, + // we can ignore function bodies in B. + IgnoreFuncBodies: ld.Mode < LoadAllSyntax && !lpkg.initial, + + Error: appendError, + Sizes: sizes, + } + types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) + + lpkg.importErrors = nil // no longer needed + + // If !Cgo, the type-checker uses FakeImportC mode, so + // it doesn't invoke the importer for import "C", + // nor report an error for the import, + // or for any undefined C.f reference. + // We must detect this explicitly and correctly + // mark the package as IllTyped (by reporting an error). + // TODO(adonovan): if these errors are annoying, + // we could just set IllTyped quietly. + if tc.FakeImportC { + outer: + for _, f := range lpkg.Syntax { + for _, imp := range f.Imports { + if imp.Path.Value == `"C"` { + err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`} + appendError(err) + break outer + } + } + } + } + + // Record accumulated errors. + illTyped := len(lpkg.Errors) > 0 + if !illTyped { + for _, imp := range lpkg.Imports { + if imp.IllTyped { + illTyped = true + break + } + } + } + lpkg.IllTyped = illTyped +} + +// An importFunc is an implementation of the single-method +// types.Importer interface based on a function value. +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 20) + +// parseFiles reads and parses the Go source files and returns the ASTs +// of the ones that could be at least partially parsed, along with a +// list of I/O and parse errors encountered. +// +// Because files are scanned in parallel, the token.Pos +// positions of the resulting ast.Files are not ordered. +// +func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { + var wg sync.WaitGroup + n := len(filenames) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range filenames { + wg.Add(1) + go func(i int, filename string) { + ioLimit <- true // wait + // ParseFile may return both an AST and an error. + var src []byte + for f, contents := range ld.Config.Overlay { + if sameFile(f, filename) { + src = contents + } + } + var err error + if src == nil { + src, err = ioutil.ReadFile(filename) + } + if err != nil { + parsed[i], errors[i] = nil, err + } else { + parsed[i], errors[i] = ld.ParseFile(ld.Fset, filename, src) + } + <-ioLimit // signal + wg.Done() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +// +func sameFile(x, y string) bool { + if filepath.Base(x) == filepath.Base(y) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} + +// loadFromExportData returns type information for the specified +// package, loading it from an export data file on the first request. +func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) { + if lpkg.PkgPath == "" { + log.Fatalf("internal error: Package %s has no PkgPath", lpkg) + } + + // Because gcexportdata.Read has the potential to create or + // modify the types.Package for each node in the transitive + // closure of dependencies of lpkg, all exportdata operations + // must be sequential. (Finer-grained locking would require + // changes to the gcexportdata API.) + // + // The exportMu lock guards the Package.Pkg field and the + // types.Package it points to, for each Package in the graph. + // + // Not all accesses to Package.Pkg need to be protected by exportMu: + // graph ordering ensures that direct dependencies of source + // packages are fully loaded before the importer reads their Pkg field. + ld.exportMu.Lock() + defer ld.exportMu.Unlock() + + if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() { + return tpkg, nil // cache hit + } + + lpkg.IllTyped = true // fail safe + + if lpkg.ExportFile == "" { + // Errors while building export data will have been printed to stderr. + return nil, fmt.Errorf("no export data file") + } + f, err := os.Open(lpkg.ExportFile) + if err != nil { + return nil, err + } + defer f.Close() + + // Read gc export data. + // + // We don't currently support gccgo export data because all + // underlying workspaces use the gc toolchain. (Even build + // systems that support gccgo don't use it for workspace + // queries.) + r, err := gcexportdata.NewReader(f) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + + // Build the view. + // + // The gcexportdata machinery has no concept of package ID. + // It identifies packages by their PkgPath, which although not + // globally unique is unique within the scope of one invocation + // of the linker, type-checker, or gcexportdata. + // + // So, we must build a PkgPath-keyed view of the global + // (conceptually ID-keyed) cache of packages and pass it to + // gcexportdata. The view must contain every existing + // package that might possibly be mentioned by the + // current package---its transitive closure. + // + // In loadPackage, we unconditionally create a types.Package for + // each dependency so that export data loading does not + // create new ones. + // + // TODO(adonovan): it would be simpler and more efficient + // if the export data machinery invoked a callback to + // get-or-create a package instead of a map. + // + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*loaderPackage]bool) // all visited packages + var visit func(pkgs map[string]*Package) + visit = func(pkgs map[string]*Package) { + for _, p := range pkgs { + lpkg := ld.pkgs[p.ID] + if !seen[lpkg] { + seen[lpkg] = true + view[lpkg.PkgPath] = lpkg.Types + visit(lpkg.Imports) + } + } + } + visit(lpkg.Imports) + + viewLen := len(view) + 1 // adding the self package + // Parse the export data. + // (May modify incomplete packages in view but not create new ones.) + tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + if viewLen != len(view) { + log.Fatalf("Unexpected package creation during export data loading") + } + + lpkg.Types = tpkg + lpkg.IllTyped = false + + return tpkg, nil +} + +func usesExportData(cfg *Config) bool { + return LoadTypes <= cfg.Mode && cfg.Mode < LoadAllSyntax +} diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go new file mode 100644 index 000000000000..c1a4b28ca03d --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/visit.go @@ -0,0 +1,55 @@ +package packages + +import ( + "fmt" + "os" + "sort" +) + +// Visit visits all the packages in the import graph whose roots are +// pkgs, calling the optional pre function the first time each package +// is encountered (preorder), and the optional post function after a +// package's dependencies have been visited (postorder). +// The boolean result of pre(pkg) determines whether +// the imports of package pkg are visited. +func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) { + seen := make(map[*Package]bool) + var visit func(*Package) + visit = func(pkg *Package) { + if !seen[pkg] { + seen[pkg] = true + + if pre == nil || pre(pkg) { + paths := make([]string, 0, len(pkg.Imports)) + for path := range pkg.Imports { + paths = append(paths, path) + } + sort.Strings(paths) // for determinism + for _, path := range paths { + visit(pkg.Imports[path]) + } + } + + if post != nil { + post(pkg) + } + } + } + for _, pkg := range pkgs { + visit(pkg) + } +} + +// PrintErrors prints to os.Stderr the accumulated errors of all +// packages in the import graph rooted at pkgs, dependencies first. +// PrintErrors returns the number of errors printed. +func PrintErrors(pkgs []*Package) int { + var n int + Visit(pkgs, nil, func(pkg *Package) { + for _, err := range pkg.Errors { + fmt.Fprintln(os.Stderr, err) + n++ + } + }) + return n +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go index c079a499823f..38f596daf9e2 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/callee.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go @@ -7,13 +7,15 @@ package typeutil import ( "go/ast" "go/types" + + "golang.org/x/tools/go/ast/astutil" ) -// StaticCallee returns the target (function or method) of a static -// function call, if any. It returns nil for calls to builtin. -func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { +// Callee returns the named target of a function call, if any: +// a function, method, builtin, or variable. +func Callee(info *types.Info, call *ast.CallExpr) types.Object { var obj types.Object - switch fun := call.Fun.(type) { + switch fun := astutil.Unparen(call.Fun).(type) { case *ast.Ident: obj = info.Uses[fun] // type, var, builtin, or declared func case *ast.SelectorExpr: @@ -23,7 +25,16 @@ func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { obj = info.Uses[fun.Sel] // qualified identifier? } } - if f, ok := obj.(*types.Func); ok && !interfaceMethod(f) { + if _, ok := obj.(*types.TypeName); ok { + return nil // T(x) is a conversion, not a call + } + return obj +} + +// StaticCallee returns the target (function or method) of a static +// function call, if any. It returns nil for calls to builtins. +func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { + if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { return f } return nil diff --git a/vendor/golang.org/x/tools/imports/fix.go b/vendor/golang.org/x/tools/imports/fix.go index 75d37f894e24..1e3bd0958d9c 100644 --- a/vendor/golang.org/x/tools/imports/fix.go +++ b/vendor/golang.org/x/tools/imports/fix.go @@ -526,21 +526,21 @@ func scanGoDirs() map[string]*pkg { result := make(map[string]*pkg) var mu sync.Mutex - add := func(srcDir, dir string) { + add := func(root gopathwalk.Root, dir string) { mu.Lock() defer mu.Unlock() if _, dup := result[dir]; dup { return } - importpath := filepath.ToSlash(dir[len(srcDir)+len("/"):]) + importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):]) result[dir] = &pkg{ importPath: importpath, importPathShort: VendorlessPath(importpath), dir: dir, } } - gopathwalk.Walk(add, gopathwalk.Options{Debug: Debug, ModulesEnabled: false}) + gopathwalk.Walk(gopathwalk.SrcDirsRoots(), add, gopathwalk.Options{Debug: Debug, ModulesEnabled: false}) return result } diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go index cbca5b0bbfbc..dc085fc16084 100644 --- a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go +++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go @@ -25,57 +25,105 @@ type Options struct { ModulesEnabled bool // Search module caches. Also disables legacy goimports ignore rules. } +// RootType indicates the type of a Root. +type RootType int + +const ( + RootUnknown RootType = iota + RootGOROOT + RootGOPATH + RootCurrentModule + RootModuleCache +) + +// A Root is a starting point for a Walk. +type Root struct { + Path string + Type RootType +} + +// SrcDirsRoots returns the roots from build.Default.SrcDirs(). Not modules-compatible. +func SrcDirsRoots() []Root { + var roots []Root + roots = append(roots, Root{filepath.Join(build.Default.GOROOT, "src"), RootGOROOT}) + for _, p := range filepath.SplitList(build.Default.GOPATH) { + roots = append(roots, Root{filepath.Join(p, "src"), RootGOPATH}) + } + return roots +} + // Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. // For each package found, add will be called (concurrently) with the absolute // paths of the containing source directory and the package directory. -func Walk(add func(srcDir string, dir string), opts Options) { - for _, srcDir := range build.Default.SrcDirs() { - walkDir(srcDir, add, opts) +// add will be called concurrently. +func Walk(roots []Root, add func(root Root, dir string), opts Options) { + for _, root := range roots { + walkDir(root, add, opts) } } -func walkDir(srcDir string, add func(string, string), opts Options) { +func walkDir(root Root, add func(Root, string), opts Options) { + if _, err := os.Stat(root.Path); os.IsNotExist(err) { + if opts.Debug { + log.Printf("skipping nonexistant directory: %v", root.Path) + } + return + } if opts.Debug { - log.Printf("scanning %s", srcDir) + log.Printf("scanning %s", root.Path) } w := &walker{ - srcDir: srcDir, - srcV: filepath.Join(srcDir, "v"), - srcMod: filepath.Join(srcDir, "mod"), - add: add, - opts: opts, + root: root, + add: add, + opts: opts, } w.init() - if err := fastwalk.Walk(srcDir, w.walk); err != nil { - log.Printf("goimports: scanning directory %v: %v", srcDir, err) + if err := fastwalk.Walk(root.Path, w.walk); err != nil { + log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err) } if opts.Debug { - defer log.Printf("scanned %s", srcDir) + log.Printf("scanned %s", root.Path) } } // walker is the callback for fastwalk.Walk. type walker struct { - srcDir string // The source directory to scan. - srcV, srcMod string // vgo-style module cache dirs. Optional. - add func(string, string) // The callback that will be invoked for every possible Go package dir. - opts Options // Options passed to Walk by the user. + root Root // The source directory to scan. + add func(Root, string) // The callback that will be invoked for every possible Go package dir. + opts Options // Options passed to Walk by the user. ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. } // init initializes the walker based on its Options. func (w *walker) init() { - if !w.opts.ModulesEnabled { - w.ignoredDirs = w.getIgnoredDirs(w.srcDir) + var ignoredPaths []string + if w.root.Type == RootModuleCache { + ignoredPaths = []string{"cache"} + } + if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH { + ignoredPaths = w.getIgnoredDirs(w.root.Path) + ignoredPaths = append(ignoredPaths, "v", "mod") + } + + for _, p := range ignoredPaths { + full := filepath.Join(w.root.Path, p) + if fi, err := os.Stat(full); err == nil { + w.ignoredDirs = append(w.ignoredDirs, fi) + if w.opts.Debug { + log.Printf("Directory added to ignore list: %s", full) + } + } else if w.opts.Debug { + log.Printf("Error statting ignored directory: %v", err) + } } } // getIgnoredDirs reads an optional config file at /.goimportsignore // of relative directories to ignore when scanning for go files. // The provided path is one of the $GOPATH entries with "src" appended. -func (w *walker) getIgnoredDirs(path string) []os.FileInfo { +func (w *walker) getIgnoredDirs(path string) []string { file := filepath.Join(path, ".goimportsignore") slurp, err := ioutil.ReadFile(file) if w.opts.Debug { @@ -89,22 +137,14 @@ func (w *walker) getIgnoredDirs(path string) []os.FileInfo { return nil } - var ignoredDirs []os.FileInfo + var ignoredDirs []string bs := bufio.NewScanner(bytes.NewReader(slurp)) for bs.Scan() { line := strings.TrimSpace(bs.Text()) if line == "" || strings.HasPrefix(line, "#") { continue } - full := filepath.Join(path, line) - if fi, err := os.Stat(full); err == nil { - ignoredDirs = append(ignoredDirs, fi) - if w.opts.Debug { - log.Printf("Directory added to ignore list: %s", full) - } - } else if w.opts.Debug { - log.Printf("Error statting entry in .goimportsignore: %v", err) - } + ignoredDirs = append(ignoredDirs, line) } return ignoredDirs } @@ -119,12 +159,9 @@ func (w *walker) shouldSkipDir(fi os.FileInfo) bool { } func (w *walker) walk(path string, typ os.FileMode) error { - if !w.opts.ModulesEnabled && (path == w.srcV || path == w.srcMod) { - return filepath.SkipDir - } dir := filepath.Dir(path) if typ.IsRegular() { - if dir == w.srcDir { + if dir == w.root.Path { // Doesn't make sense to have regular files // directly in your $GOPATH/src or $GOROOT/src. return fastwalk.SkipFiles @@ -133,7 +170,7 @@ func (w *walker) walk(path string, typ os.FileMode) error { return nil } - w.add(w.srcDir, dir) + w.add(w.root, dir) return fastwalk.SkipFiles } if typ == os.ModeDir { diff --git a/vendor/golang.org/x/tools/internal/semver/semver.go b/vendor/golang.org/x/tools/internal/semver/semver.go new file mode 100644 index 000000000000..4af7118e55d2 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/semver/semver.go @@ -0,0 +1,388 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package semver implements comparison of semantic version strings. +// In this package, semantic version strings must begin with a leading "v", +// as in "v1.0.0". +// +// The general form of a semantic version string accepted by this package is +// +// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] +// +// where square brackets indicate optional parts of the syntax; +// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; +// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers +// using only alphanumeric characters and hyphens; and +// all-numeric PRERELEASE identifiers must not have leading zeros. +// +// This package follows Semantic Versioning 2.0.0 (see semver.org) +// with two exceptions. First, it requires the "v" prefix. Second, it recognizes +// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) +// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. +package semver + +// parsed returns the parsed form of a semantic version string. +type parsed struct { + major string + minor string + patch string + short string + prerelease string + build string + err string +} + +// IsValid reports whether v is a valid semantic version string. +func IsValid(v string) bool { + _, ok := parse(v) + return ok +} + +// Canonical returns the canonical formatting of the semantic version v. +// It fills in any missing .MINOR or .PATCH and discards build metadata. +// Two semantic versions compare equal only if their canonical formattings +// are identical strings. +// The canonical invalid semantic version is the empty string. +func Canonical(v string) string { + p, ok := parse(v) + if !ok { + return "" + } + if p.build != "" { + return v[:len(v)-len(p.build)] + } + if p.short != "" { + return v + p.short + } + return v +} + +// Major returns the major version prefix of the semantic version v. +// For example, Major("v2.1.0") == "v2". +// If v is an invalid semantic version string, Major returns the empty string. +func Major(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return v[:1+len(pv.major)] +} + +// MajorMinor returns the major.minor version prefix of the semantic version v. +// For example, MajorMinor("v2.1.0") == "v2.1". +// If v is an invalid semantic version string, MajorMinor returns the empty string. +func MajorMinor(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + i := 1 + len(pv.major) + if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { + return v[:j] + } + return v[:i] + "." + pv.minor +} + +// Prerelease returns the prerelease suffix of the semantic version v. +// For example, Prerelease("v2.1.0-pre+meta") == "-pre". +// If v is an invalid semantic version string, Prerelease returns the empty string. +func Prerelease(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.prerelease +} + +// Build returns the build suffix of the semantic version v. +// For example, Build("v2.1.0+meta") == "+meta". +// If v is an invalid semantic version string, Build returns the empty string. +func Build(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.build +} + +// Compare returns an integer comparing two versions according to +// according to semantic version precedence. +// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. +// +// An invalid semantic version string is considered less than a valid one. +// All invalid semantic version strings compare equal to each other. +func Compare(v, w string) int { + pv, ok1 := parse(v) + pw, ok2 := parse(w) + if !ok1 && !ok2 { + return 0 + } + if !ok1 { + return -1 + } + if !ok2 { + return +1 + } + if c := compareInt(pv.major, pw.major); c != 0 { + return c + } + if c := compareInt(pv.minor, pw.minor); c != 0 { + return c + } + if c := compareInt(pv.patch, pw.patch); c != 0 { + return c + } + return comparePrerelease(pv.prerelease, pw.prerelease) +} + +// Max canonicalizes its arguments and then returns the version string +// that compares greater. +func Max(v, w string) string { + v = Canonical(v) + w = Canonical(w) + if Compare(v, w) > 0 { + return v + } + return w +} + +func parse(v string) (p parsed, ok bool) { + if v == "" || v[0] != 'v' { + p.err = "missing v prefix" + return + } + p.major, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad major version" + return + } + if v == "" { + p.minor = "0" + p.patch = "0" + p.short = ".0.0" + return + } + if v[0] != '.' { + p.err = "bad minor prefix" + ok = false + return + } + p.minor, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad minor version" + return + } + if v == "" { + p.patch = "0" + p.short = ".0" + return + } + if v[0] != '.' { + p.err = "bad patch prefix" + ok = false + return + } + p.patch, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad patch version" + return + } + if len(v) > 0 && v[0] == '-' { + p.prerelease, v, ok = parsePrerelease(v) + if !ok { + p.err = "bad prerelease" + return + } + } + if len(v) > 0 && v[0] == '+' { + p.build, v, ok = parseBuild(v) + if !ok { + p.err = "bad build" + return + } + } + if v != "" { + p.err = "junk on end" + ok = false + return + } + ok = true + return +} + +func parseInt(v string) (t, rest string, ok bool) { + if v == "" { + return + } + if v[0] < '0' || '9' < v[0] { + return + } + i := 1 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + if v[0] == '0' && i != 1 { + return + } + return v[:i], v[i:], true +} + +func parsePrerelease(v string) (t, rest string, ok bool) { + // "A pre-release version MAY be denoted by appending a hyphen and + // a series of dot separated identifiers immediately following the patch version. + // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." + if v == "" || v[0] != '-' { + return + } + i := 1 + start := 1 + for i < len(v) && v[i] != '+' { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i || isBadNum(v[start:i]) { + return + } + start = i + 1 + } + i++ + } + if start == i || isBadNum(v[start:i]) { + return + } + return v[:i], v[i:], true +} + +func parseBuild(v string) (t, rest string, ok bool) { + if v == "" || v[0] != '+' { + return + } + i := 1 + start := 1 + for i < len(v) { + if !isIdentChar(v[i]) { + return + } + if v[i] == '.' { + if start == i { + return + } + start = i + 1 + } + i++ + } + if start == i { + return + } + return v[:i], v[i:], true +} + +func isIdentChar(c byte) bool { + return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' +} + +func isBadNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) && i > 1 && v[0] == '0' +} + +func isNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) +} + +func compareInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} + +func comparePrerelease(x, y string) int { + // "When major, minor, and patch are equal, a pre-release version has + // lower precedence than a normal version. + // Example: 1.0.0-alpha < 1.0.0. + // Precedence for two pre-release versions with the same major, minor, + // and patch version MUST be determined by comparing each dot separated + // identifier from left to right until a difference is found as follows: + // identifiers consisting of only digits are compared numerically and + // identifiers with letters or hyphens are compared lexically in ASCII + // sort order. Numeric identifiers always have lower precedence than + // non-numeric identifiers. A larger set of pre-release fields has a + // higher precedence than a smaller set, if all of the preceding + // identifiers are equal. + // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < + // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." + if x == y { + return 0 + } + if x == "" { + return +1 + } + if y == "" { + return -1 + } + for x != "" && y != "" { + x = x[1:] // skip - or . + y = y[1:] // skip - or . + var dx, dy string + dx, x = nextIdent(x) + dy, y = nextIdent(y) + if dx != dy { + ix := isNum(dx) + iy := isNum(dy) + if ix != iy { + if ix { + return -1 + } else { + return +1 + } + } + if ix { + if len(dx) < len(dy) { + return -1 + } + if len(dx) > len(dy) { + return +1 + } + } + if dx < dy { + return -1 + } else { + return +1 + } + } + } + if x == "" { + return -1 + } else { + return +1 + } +} + +func nextIdent(x string) (dx, rest string) { + i := 0 + for i < len(x) && x[i] != '.' { + i++ + } + return x[:i], x[i:] +}