diff --git a/cmd/gosimple/README.md b/cmd/gosimple/README.md deleted file mode 100644 index efaaeeeec..000000000 --- a/cmd/gosimple/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: gosimple has been merged into the staticcheck tool.** diff --git a/cmd/gosimple/gosimple.go b/cmd/gosimple/gosimple.go deleted file mode 100644 index c6fc7bd2d..000000000 --- a/cmd/gosimple/gosimple.go +++ /dev/null @@ -1,20 +0,0 @@ -// gosimple detects code that could be rewritten in a simpler way. -package main // import "honnef.co/go/tools/cmd/gosimple" -import ( - "fmt" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/simple" -) - -func main() { - fmt.Fprintln(os.Stderr, "Gosimple has been deprecated. Please use staticcheck instead.") - fs := lintutil.FlagSet("gosimple") - gen := fs.Bool("generated", false, "Check generated code") - fs.Parse(os.Args[1:]) - c := simple.NewChecker() - c.CheckGenerated = *gen - lintutil.ProcessFlagSet([]lint.Checker{c}, fs) -} diff --git a/cmd/megacheck/README.md b/cmd/megacheck/README.md deleted file mode 100644 index 509762886..000000000 --- a/cmd/megacheck/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: megacheck has been merged into the staticcheck tool.** diff --git a/cmd/megacheck/megacheck.go b/cmd/megacheck/megacheck.go deleted file mode 100644 index 309e73c4d..000000000 --- a/cmd/megacheck/megacheck.go +++ /dev/null @@ -1,93 +0,0 @@ -// megacheck runs staticcheck, gosimple and unused. -package main // import "honnef.co/go/tools/cmd/megacheck" - -import ( - "fmt" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/simple" - "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/unused" -) - -func main() { - fmt.Fprintln(os.Stderr, "Megacheck has been deprecated. Please use staticcheck instead.") - - var flags struct { - staticcheck struct { - enabled bool - generated bool - } - gosimple struct { - enabled bool - generated bool - } - unused struct { - enabled bool - constants bool - fields bool - functions bool - types bool - variables bool - wholeProgram bool - reflection bool - } - } - fs := lintutil.FlagSet("megacheck") - fs.BoolVar(&flags.gosimple.enabled, - "simple.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.gosimple.generated, - "simple.generated", false, "Check generated code") - - fs.BoolVar(&flags.staticcheck.enabled, - "staticcheck.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.staticcheck.generated, - "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)") - - fs.BoolVar(&flags.unused.enabled, - "unused.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.unused.constants, - "unused.consts", true, "Report unused constants") - fs.BoolVar(&flags.unused.fields, - "unused.fields", true, "Report unused fields") - fs.BoolVar(&flags.unused.functions, - "unused.funcs", true, "Report unused functions and methods") - fs.BoolVar(&flags.unused.types, - "unused.types", true, "Report unused types") - fs.BoolVar(&flags.unused.variables, - "unused.vars", true, "Report unused variables") - fs.BoolVar(&flags.unused.wholeProgram, - "unused.exported", false, "Treat arguments as a program and report unused exported identifiers") - fs.BoolVar(&flags.unused.reflection, - "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") - - fs.Bool("simple.exit-non-zero", true, "Deprecated: use -fail instead") - fs.Bool("staticcheck.exit-non-zero", true, "Deprecated: use -fail instead") - fs.Bool("unused.exit-non-zero", true, "Deprecated: use -fail instead") - - fs.Parse(os.Args[1:]) - - var checkers []lint.Checker - - if flags.staticcheck.enabled { - sac := staticcheck.NewChecker() - sac.CheckGenerated = flags.staticcheck.generated - checkers = append(checkers, sac) - } - - if flags.gosimple.enabled { - sc := simple.NewChecker() - sc.CheckGenerated = flags.gosimple.generated - checkers = append(checkers, sc) - } - - if flags.unused.enabled { - uc := &unused.Checker{} - uc.WholeProgram = flags.unused.wholeProgram - checkers = append(checkers, uc) - } - - lintutil.ProcessFlagSet(checkers, fs) -} diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 6f381850d..24a369e24 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -4,6 +4,7 @@ package main // import "honnef.co/go/tools/cmd/staticcheck" import ( "os" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/simple" @@ -16,12 +17,18 @@ func main() { fs := lintutil.FlagSet("staticcheck") fs.Parse(os.Args[1:]) - checkers := []lint.Checker{ - simple.NewChecker(), - staticcheck.NewChecker(), - stylecheck.NewChecker(), - &unused.Checker{}, + var cs []*analysis.Analyzer + for _, v := range simple.Analyzers { + cs = append(cs, v) } + for _, v := range staticcheck.Analyzers { + cs = append(cs, v) + } + for _, v := range stylecheck.Analyzers { + cs = append(cs, v) + } + + cums := []lint.CumulativeChecker{unused.NewChecker()} - lintutil.ProcessFlagSet(checkers, fs) + lintutil.ProcessFlagSet(cs, cums, fs) } diff --git a/cmd/unused/README.md b/cmd/unused/README.md deleted file mode 100644 index ddab38dc0..000000000 --- a/cmd/unused/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: unused has been merged into the staticcheck tool.** diff --git a/cmd/unused/main.go b/cmd/unused/main.go deleted file mode 100644 index 77b953bbb..000000000 --- a/cmd/unused/main.go +++ /dev/null @@ -1,57 +0,0 @@ -// unused reports unused identifiers (types, functions, ...) in your -// code. -package main // import "honnef.co/go/tools/cmd/unused" - -import ( - "fmt" - "log" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/unused" -) - -var ( - fConstants bool - fFields bool - fFunctions bool - fTypes bool - fVariables bool - fDebug string - fWholeProgram bool - fReflection bool -) - -func newChecker() *unused.Checker { - checker := &unused.Checker{} - if fDebug != "" { - debug, err := os.Create(fDebug) - if err != nil { - log.Fatal("couldn't open debug file:", err) - } - checker.Debug = debug - } - - checker.WholeProgram = fWholeProgram - return checker -} - -func main() { - fmt.Fprintln(os.Stderr, "Unused has been deprecated. Please use staticcheck instead.") - log.SetFlags(0) - - fs := lintutil.FlagSet("unused") - fs.BoolVar(&fConstants, "consts", true, "Report unused constants") - fs.BoolVar(&fFields, "fields", true, "Report unused fields") - fs.BoolVar(&fFunctions, "funcs", true, "Report unused functions and methods") - fs.BoolVar(&fTypes, "types", true, "Report unused types") - fs.BoolVar(&fVariables, "vars", true, "Report unused variables") - fs.StringVar(&fDebug, "debug", "", "Write a debug graph to `file`. Existing files will be overwritten.") - fs.BoolVar(&fWholeProgram, "exported", false, "Treat arguments as a program and report unused exported identifiers") - fs.BoolVar(&fReflection, "reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") - fs.Parse(os.Args[1:]) - - c := newChecker() - lintutil.ProcessFlagSet([]lint.Checker{c}, fs) -} diff --git a/config/config.go b/config/config.go index cfde5d51a..4ac006b9a 100644 --- a/config/config.go +++ b/config/config.go @@ -3,10 +3,34 @@ package config import ( "os" "path/filepath" + "reflect" "github.com/BurntSushi/toml" + "golang.org/x/tools/go/analysis" ) +var Analyzer = &analysis.Analyzer{ + Name: "config", + Doc: "loads configuration for the current package tree", + Run: func(pass *analysis.Pass) (interface{}, error) { + if len(pass.Files) == 0 { + cfg := DefaultConfig + return &cfg, nil + } + // FIXME(dh): this may yield the wrong path for generated files in the build cache + path := pass.Fset.PositionFor(pass.Files[0].Pos(), true).Filename + dir := filepath.Dir(path) + cfg, err := Load(dir) + return &cfg, err + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf((*Config)(nil)), +} + +func For(pass *analysis.Pass) *Config { + return pass.ResultOf[Analyzer].(*Config) +} + func mergeLists(a, b []string) []string { out := make([]string, 0, len(a)+len(b)) for _, el := range b { @@ -73,7 +97,7 @@ type Config struct { HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"` } -var defaultConfig = Config{ +var DefaultConfig = Config{ Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"}, Initialisms: []string{ "ACL", "API", "ASCII", "CPU", "CSS", "DNS", @@ -120,7 +144,7 @@ func parseConfigs(dir string) ([]Config, error) { } dir = ndir } - out = append(out, defaultConfig) + out = append(out, DefaultConfig) if len(out) < 2 { return out, nil } diff --git a/functions/concrete.go b/functions/concrete.go deleted file mode 100644 index 932acd03e..000000000 --- a/functions/concrete.go +++ /dev/null @@ -1,56 +0,0 @@ -package functions - -import ( - "go/token" - "go/types" - - "honnef.co/go/tools/ssa" -) - -func concreteReturnTypes(fn *ssa.Function) []*types.Tuple { - res := fn.Signature.Results() - if res == nil { - return nil - } - ifaces := make([]bool, res.Len()) - any := false - for i := 0; i < res.Len(); i++ { - _, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface) - any = any || ifaces[i] - } - if !any { - return []*types.Tuple{res} - } - var out []*types.Tuple - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return) - if !ok { - continue - } - vars := make([]*types.Var, res.Len()) - for i, v := range ret.Results { - var typ types.Type - if !ifaces[i] { - typ = res.At(i).Type() - } else if mi, ok := v.(*ssa.MakeInterface); ok { - // TODO(dh): if mi.X is a function call that returns - // an interface, call concreteReturnTypes on that - // function (or, really, go through Descriptions, - // avoid infinite recursion etc, just like nil error - // detection) - - // TODO(dh): support Phi nodes - typ = mi.X.Type() - } else { - typ = res.At(i).Type() - } - vars[i] = types.NewParam(token.NoPos, nil, "", typ) - } - out = append(out, types.NewTuple(vars...)) - } - // TODO(dh): deduplicate out - return out -} diff --git a/functions/functions.go b/functions/functions.go deleted file mode 100644 index 839404129..000000000 --- a/functions/functions.go +++ /dev/null @@ -1,150 +0,0 @@ -package functions - -import ( - "go/types" - "sync" - - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/static" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/staticcheck/vrp" -) - -var stdlibDescs = map[string]Description{ - "errors.New": {Pure: true}, - - "fmt.Errorf": {Pure: true}, - "fmt.Sprintf": {Pure: true}, - "fmt.Sprint": {Pure: true}, - - "sort.Reverse": {Pure: true}, - - "strings.Map": {Pure: true}, - "strings.Repeat": {Pure: true}, - "strings.Replace": {Pure: true}, - "strings.Title": {Pure: true}, - "strings.ToLower": {Pure: true}, - "strings.ToLowerSpecial": {Pure: true}, - "strings.ToTitle": {Pure: true}, - "strings.ToTitleSpecial": {Pure: true}, - "strings.ToUpper": {Pure: true}, - "strings.ToUpperSpecial": {Pure: true}, - "strings.Trim": {Pure: true}, - "strings.TrimFunc": {Pure: true}, - "strings.TrimLeft": {Pure: true}, - "strings.TrimLeftFunc": {Pure: true}, - "strings.TrimPrefix": {Pure: true}, - "strings.TrimRight": {Pure: true}, - "strings.TrimRightFunc": {Pure: true}, - "strings.TrimSpace": {Pure: true}, - "strings.TrimSuffix": {Pure: true}, - - "(*net/http.Request).WithContext": {Pure: true}, - - "math/rand.Read": {NilError: true}, - "(*math/rand.Rand).Read": {NilError: true}, -} - -type Description struct { - // The function is known to be pure - Pure bool - // The function is known to be a stub - Stub bool - // The function is known to never return (panics notwithstanding) - Infinite bool - // Variable ranges - Ranges vrp.Ranges - Loops []Loop - // Function returns an error as its last argument, but it is - // always nil - NilError bool - ConcreteReturnTypes []*types.Tuple -} - -type descriptionEntry struct { - ready chan struct{} - result Description -} - -type Descriptions struct { - CallGraph *callgraph.Graph - mu sync.Mutex - cache map[*ssa.Function]*descriptionEntry -} - -func NewDescriptions(prog *ssa.Program) *Descriptions { - return &Descriptions{ - CallGraph: static.CallGraph(prog), - cache: map[*ssa.Function]*descriptionEntry{}, - } -} - -func (d *Descriptions) Get(fn *ssa.Function) Description { - d.mu.Lock() - fd := d.cache[fn] - if fd == nil { - fd = &descriptionEntry{ - ready: make(chan struct{}), - } - d.cache[fn] = fd - d.mu.Unlock() - - { - fd.result = stdlibDescs[fn.RelString(nil)] - fd.result.Pure = fd.result.Pure || d.IsPure(fn) - fd.result.Stub = fd.result.Stub || d.IsStub(fn) - fd.result.Infinite = fd.result.Infinite || !terminates(fn) - fd.result.Ranges = vrp.BuildGraph(fn).Solve() - fd.result.Loops = findLoops(fn) - fd.result.NilError = fd.result.NilError || IsNilError(fn) - fd.result.ConcreteReturnTypes = concreteReturnTypes(fn) - } - - close(fd.ready) - } else { - d.mu.Unlock() - <-fd.ready - } - return fd.result -} - -func IsNilError(fn *ssa.Function) bool { - // TODO(dh): This is very simplistic, as we only look for constant - // nil returns. A more advanced approach would work transitively. - // An even more advanced approach would be context-aware and - // determine nil errors based on inputs (e.g. io.WriteString to a - // bytes.Buffer will always return nil, but an io.WriteString to - // an os.File might not). Similarly, an os.File opened for reading - // won't error on Close, but other files will. - res := fn.Signature.Results() - if res.Len() == 0 { - return false - } - last := res.At(res.Len() - 1) - if types.TypeString(last.Type(), nil) != "error" { - return false - } - - if fn.Blocks == nil { - return false - } - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ins := block.Instrs[len(block.Instrs)-1] - ret, ok := ins.(*ssa.Return) - if !ok { - continue - } - v := ret.Results[len(ret.Results)-1] - c, ok := v.(*ssa.Const) - if !ok { - return false - } - if !c.IsNil() { - return false - } - } - return true -} diff --git a/functions/loops.go b/functions/loops.go index 63011cf3e..92514af9a 100644 --- a/functions/loops.go +++ b/functions/loops.go @@ -4,7 +4,7 @@ import "honnef.co/go/tools/ssa" type Loop map[*ssa.BasicBlock]bool -func findLoops(fn *ssa.Function) []Loop { +func FindLoops(fn *ssa.Function) []Loop { if fn.Blocks == nil { return nil } diff --git a/functions/pure.go b/functions/pure.go index 7028eb8c6..cf914e3bb 100644 --- a/functions/pure.go +++ b/functions/pure.go @@ -1,10 +1,6 @@ package functions import ( - "go/token" - "go/types" - - "honnef.co/go/tools/callgraph" "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" ) @@ -13,7 +9,7 @@ import ( // considered a stub if it has no instructions or exactly one // instruction, which must be either returning only constant values or // a panic. -func (d *Descriptions) IsStub(fn *ssa.Function) bool { +func IsStub(fn *ssa.Function) bool { if len(fn.Blocks) == 0 { return true } @@ -39,85 +35,3 @@ func (d *Descriptions) IsStub(fn *ssa.Function) bool { return false } } - -func (d *Descriptions) IsPure(fn *ssa.Function) bool { - if fn.Signature.Results().Len() == 0 { - // A function with no return values is empty or is doing some - // work we cannot see (for example because of build tags); - // don't consider it pure. - return false - } - - for _, param := range fn.Params { - if _, ok := param.Type().Underlying().(*types.Basic); !ok { - return false - } - } - - if fn.Blocks == nil { - return false - } - checkCall := func(common *ssa.CallCommon) bool { - if common.IsInvoke() { - return false - } - builtin, ok := common.Value.(*ssa.Builtin) - if !ok { - if common.StaticCallee() != fn { - if common.StaticCallee() == nil { - return false - } - // TODO(dh): ideally, IsPure wouldn't be responsible - // for avoiding infinite recursion, but - // FunctionDescriptions would be. - node := d.CallGraph.CreateNode(common.StaticCallee()) - if callgraph.PathSearch(node, func(other *callgraph.Node) bool { - return other.Func == fn - }) != nil { - return false - } - if !d.Get(common.StaticCallee()).Pure { - return false - } - } - } else { - switch builtin.Name() { - case "len", "cap", "make", "new": - default: - return false - } - } - return true - } - for _, b := range fn.Blocks { - for _, ins := range b.Instrs { - switch ins := ins.(type) { - case *ssa.Call: - if !checkCall(ins.Common()) { - return false - } - case *ssa.Defer: - if !checkCall(&ins.Call) { - return false - } - case *ssa.Select: - return false - case *ssa.Send: - return false - case *ssa.Go: - return false - case *ssa.Panic: - return false - case *ssa.Store: - return false - case *ssa.FieldAddr: - return false - case *ssa.UnOp: - if ins.Op == token.MUL || ins.Op == token.AND { - return false - } - } - } - } - return true -} diff --git a/functions/terminates.go b/functions/terminates.go index 65f9e16dc..3e9c3a23f 100644 --- a/functions/terminates.go +++ b/functions/terminates.go @@ -2,10 +2,10 @@ package functions import "honnef.co/go/tools/ssa" -// terminates reports whether fn is supposed to return, that is if it +// Terminates reports whether fn is supposed to return, that is if it // has at least one theoretic path that returns from the function. // Explicit panics do not count as terminating. -func terminates(fn *ssa.Function) bool { +func Terminates(fn *ssa.Function) bool { if fn.Blocks == nil { // assuming that a function terminates is the conservative // choice diff --git a/internal/cache/cache.go b/internal/cache/cache.go new file mode 100644 index 000000000..508877ce7 --- /dev/null +++ b/internal/cache/cache.go @@ -0,0 +1,473 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cache implements a build artifact cache. +// +// This package is a slightly modified fork of Go's +// cmd/go/internal/cache package. +package cache + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "honnef.co/go/tools/internal/renameio" +) + +// An ActionID is a cache action key, the hash of a complete description of a +// repeatable computation (command line, environment variables, +// input file contents, executable contents). +type ActionID [HashSize]byte + +// An OutputID is a cache output key, the hash of an output of a computation. +type OutputID [HashSize]byte + +// A Cache is a package cache, backed by a file system directory tree. +type Cache struct { + dir string + now func() time.Time +} + +// Open opens and returns the cache in the given directory. +// +// It is safe for multiple processes on a single machine to use the +// same cache directory in a local file system simultaneously. +// They will coordinate using operating system file locks and may +// duplicate effort but will not corrupt the cache. +// +// However, it is NOT safe for multiple processes on different machines +// to share a cache directory (for example, if the directory were stored +// in a network file system). File locking is notoriously unreliable in +// network file systems and may not suffice to protect the cache. +// +func Open(dir string) (*Cache, error) { + info, err := os.Stat(dir) + if err != nil { + return nil, err + } + if !info.IsDir() { + return nil, &os.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")} + } + for i := 0; i < 256; i++ { + name := filepath.Join(dir, fmt.Sprintf("%02x", i)) + if err := os.MkdirAll(name, 0777); err != nil { + return nil, err + } + } + c := &Cache{ + dir: dir, + now: time.Now, + } + return c, nil +} + +// fileName returns the name of the file corresponding to the given id. +func (c *Cache) fileName(id [HashSize]byte, key string) string { + return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) +} + +var errMissing = errors.New("cache entry not found") + +const ( + // action entry file is "v1 \n" + hexSize = HashSize * 2 + entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 +) + +// verify controls whether to run the cache in verify mode. +// In verify mode, the cache always returns errMissing from Get +// but then double-checks in Put that the data being written +// exactly matches any existing entry. This provides an easy +// way to detect program behavior that would have been different +// had the cache entry been returned from Get. +// +// verify is enabled by setting the environment variable +// GODEBUG=gocacheverify=1. +var verify = false + +// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. +var DebugTest = false + +func init() { initEnv() } + +func initEnv() { + verify = false + debugHash = false + debug := strings.Split(os.Getenv("GODEBUG"), ",") + for _, f := range debug { + if f == "gocacheverify=1" { + verify = true + } + if f == "gocachehash=1" { + debugHash = true + } + if f == "gocachetest=1" { + DebugTest = true + } + } +} + +// Get looks up the action ID in the cache, +// returning the corresponding output ID and file size, if any. +// Note that finding an output ID does not guarantee that the +// saved file for that output ID is still available. +func (c *Cache) Get(id ActionID) (Entry, error) { + if verify { + return Entry{}, errMissing + } + return c.get(id) +} + +type Entry struct { + OutputID OutputID + Size int64 + Time time.Time +} + +// get is Get but does not respect verify mode, so that Put can use it. +func (c *Cache) get(id ActionID) (Entry, error) { + missing := func() (Entry, error) { + return Entry{}, errMissing + } + f, err := os.Open(c.fileName(id, "a")) + if err != nil { + return missing() + } + defer f.Close() + entry := make([]byte, entrySize+1) // +1 to detect whether f is too long + if n, err := io.ReadFull(f, entry); n != entrySize || err != io.ErrUnexpectedEOF { + return missing() + } + if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { + return missing() + } + eid, entry := entry[3:3+hexSize], entry[3+hexSize:] + eout, entry := entry[1:1+hexSize], entry[1+hexSize:] + esize, entry := entry[1:1+20], entry[1+20:] + etime, entry := entry[1:1+20], entry[1+20:] + var buf [HashSize]byte + if _, err := hex.Decode(buf[:], eid); err != nil || buf != id { + return missing() + } + if _, err := hex.Decode(buf[:], eout); err != nil { + return missing() + } + i := 0 + for i < len(esize) && esize[i] == ' ' { + i++ + } + size, err := strconv.ParseInt(string(esize[i:]), 10, 64) + if err != nil || size < 0 { + return missing() + } + i = 0 + for i < len(etime) && etime[i] == ' ' { + i++ + } + tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) + if err != nil || size < 0 { + return missing() + } + + c.used(c.fileName(id, "a")) + + return Entry{buf, size, time.Unix(0, tm)}, nil +} + +// GetFile looks up the action ID in the cache and returns +// the name of the corresponding data file. +func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) { + entry, err = c.Get(id) + if err != nil { + return "", Entry{}, err + } + file = c.OutputFile(entry.OutputID) + info, err := os.Stat(file) + if err != nil || info.Size() != entry.Size { + return "", Entry{}, errMissing + } + return file, entry, nil +} + +// GetBytes looks up the action ID in the cache and returns +// the corresponding output bytes. +// GetBytes should only be used for data that can be expected to fit in memory. +func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { + entry, err := c.Get(id) + if err != nil { + return nil, entry, err + } + data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID)) + if sha256.Sum256(data) != entry.OutputID { + return nil, entry, errMissing + } + return data, entry, nil +} + +// OutputFile returns the name of the cache file storing output with the given OutputID. +func (c *Cache) OutputFile(out OutputID) string { + file := c.fileName(out, "d") + c.used(file) + return file +} + +// Time constants for cache expiration. +// +// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), +// to avoid causing many unnecessary inode updates. The mtimes therefore +// roughly reflect "time of last use" but may in fact be older by at most an hour. +// +// We scan the cache for entries to delete at most once per trimInterval (1 day). +// +// When we do scan the cache, we delete entries that have not been used for +// at least trimLimit (5 days). Statistics gathered from a month of usage by +// Go developers found that essentially all reuse of cached entries happened +// within 5 days of the previous reuse. See golang.org/issue/22990. +const ( + mtimeInterval = 1 * time.Hour + trimInterval = 24 * time.Hour + trimLimit = 5 * 24 * time.Hour +) + +// used makes a best-effort attempt to update mtime on file, +// so that mtime reflects cache access time. +// +// Because the reflection only needs to be approximate, +// and to reduce the amount of disk activity caused by using +// cache entries, used only updates the mtime if the current +// mtime is more than an hour old. This heuristic eliminates +// nearly all of the mtime updates that would otherwise happen, +// while still keeping the mtimes useful for cache trimming. +func (c *Cache) used(file string) { + info, err := os.Stat(file) + if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval { + return + } + os.Chtimes(file, c.now(), c.now()) +} + +// Trim removes old cache entries that are likely not to be reused. +func (c *Cache) Trim() { + now := c.now() + + // We maintain in dir/trim.txt the time of the last completed cache trim. + // If the cache has been trimmed recently enough, do nothing. + // This is the common case. + data, _ := ioutil.ReadFile(filepath.Join(c.dir, "trim.txt")) + t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) + if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval { + return + } + + // Trim each of the 256 subdirectories. + // We subtract an additional mtimeInterval + // to account for the imprecision of our "last used" mtimes. + cutoff := now.Add(-trimLimit - mtimeInterval) + for i := 0; i < 256; i++ { + subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) + c.trimSubdir(subdir, cutoff) + } + + // Ignore errors from here: if we don't write the complete timestamp, the + // cache will appear older than it is, and we'll trim it again next time. + renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix()))) +} + +// trimSubdir trims a single cache subdirectory. +func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { + // Read all directory entries from subdir before removing + // any files, in case removing files invalidates the file offset + // in the directory scan. Also, ignore error from f.Readdirnames, + // because we don't care about reporting the error and we still + // want to process any entries found before the error. + f, err := os.Open(subdir) + if err != nil { + return + } + names, _ := f.Readdirnames(-1) + f.Close() + + for _, name := range names { + // Remove only cache entries (xxxx-a and xxxx-d). + if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { + continue + } + entry := filepath.Join(subdir, name) + info, err := os.Stat(entry) + if err == nil && info.ModTime().Before(cutoff) { + os.Remove(entry) + } + } +} + +// putIndexEntry adds an entry to the cache recording that executing the action +// with the given id produces an output with the given output id (hash) and size. +func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { + // Note: We expect that for one reason or another it may happen + // that repeating an action produces a different output hash + // (for example, if the output contains a time stamp or temp dir name). + // While not ideal, this is also not a correctness problem, so we + // don't make a big deal about it. In particular, we leave the action + // cache entries writable specifically so that they can be overwritten. + // + // Setting GODEBUG=gocacheverify=1 does make a big deal: + // in verify mode we are double-checking that the cache entries + // are entirely reproducible. As just noted, this may be unrealistic + // in some cases but the check is also useful for shaking out real bugs. + entry := []byte(fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())) + if verify && allowVerify { + old, err := c.get(id) + if err == nil && (old.OutputID != out || old.Size != size) { + // panic to show stack trace, so we can see what code is generating this cache entry. + msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) + panic(msg) + } + } + file := c.fileName(id, "a") + if err := ioutil.WriteFile(file, entry, 0666); err != nil { + // TODO(bcmills): This Remove potentially races with another go command writing to file. + // Can we eliminate it? + os.Remove(file) + return err + } + os.Chtimes(file, c.now(), c.now()) // mainly for tests + + return nil +} + +// Put stores the given output in the cache as the output for the action ID. +// It may read file twice. The content of file must not change between the two passes. +func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, true) +} + +// PutNoVerify is like Put but disables the verify check +// when GODEBUG=goverifycache=1 is set. +// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, +// like test output containing times and the like. +func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, false) +} + +func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { + // Compute output ID. + h := sha256.New() + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + size, err := io.Copy(h, file) + if err != nil { + return OutputID{}, 0, err + } + var out OutputID + h.Sum(out[:0]) + + // Copy to cached output file (if not already present). + if err := c.copyFile(file, out, size); err != nil { + return out, size, err + } + + // Add to cache index. + return out, size, c.putIndexEntry(id, out, size, allowVerify) +} + +// PutBytes stores the given bytes in the cache as the output for the action ID. +func (c *Cache) PutBytes(id ActionID, data []byte) error { + _, _, err := c.Put(id, bytes.NewReader(data)) + return err +} + +// copyFile copies file into the cache, expecting it to have the given +// output ID and size, if that file is not present already. +func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { + name := c.fileName(out, "d") + info, err := os.Stat(name) + if err == nil && info.Size() == size { + // Check hash. + if f, err := os.Open(name); err == nil { + h := sha256.New() + io.Copy(h, f) + f.Close() + var out2 OutputID + h.Sum(out2[:0]) + if out == out2 { + return nil + } + } + // Hash did not match. Fall through and rewrite file. + } + + // Copy file to cache directory. + mode := os.O_RDWR | os.O_CREATE + if err == nil && info.Size() > size { // shouldn't happen but fix in case + mode |= os.O_TRUNC + } + f, err := os.OpenFile(name, mode, 0666) + if err != nil { + return err + } + defer f.Close() + if size == 0 { + // File now exists with correct size. + // Only one possible zero-length file, so contents are OK too. + // Early return here makes sure there's a "last byte" for code below. + return nil + } + + // From here on, if any of the I/O writing the file fails, + // we make a best-effort attempt to truncate the file f + // before returning, to avoid leaving bad bytes in the file. + + // Copy file to f, but also into h to double-check hash. + if _, err := file.Seek(0, 0); err != nil { + f.Truncate(0) + return err + } + h := sha256.New() + w := io.MultiWriter(f, h) + if _, err := io.CopyN(w, file, size-1); err != nil { + f.Truncate(0) + return err + } + // Check last byte before writing it; writing it will make the size match + // what other processes expect to find and might cause them to start + // using the file. + buf := make([]byte, 1) + if _, err := file.Read(buf); err != nil { + f.Truncate(0) + return err + } + h.Write(buf) + sum := h.Sum(nil) + if !bytes.Equal(sum, out[:]) { + f.Truncate(0) + return fmt.Errorf("file content changed underfoot") + } + + // Commit cache file entry. + if _, err := f.Write(buf); err != nil { + f.Truncate(0) + return err + } + if err := f.Close(); err != nil { + // Data might not have been written, + // but file may look like it is the right size. + // To be extra careful, remove cached file. + os.Remove(name) + return err + } + os.Chtimes(name, c.now(), c.now()) // mainly for tests + + return nil +} diff --git a/internal/cache/cache_test.go b/internal/cache/cache_test.go new file mode 100644 index 000000000..7229bc4ce --- /dev/null +++ b/internal/cache/cache_test.go @@ -0,0 +1,270 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "encoding/binary" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "testing" + "time" +) + +func init() { + verify = false // even if GODEBUG is set +} + +func TestBasic(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + _, err = Open(filepath.Join(dir, "notexist")) + if err == nil { + t.Fatal(`Open("tmp/notexist") succeeded, want failure`) + } + + cdir := filepath.Join(dir, "c1") + if err := os.Mkdir(cdir, 0777); err != nil { + t.Fatal(err) + } + + c1, err := Open(cdir) + if err != nil { + t.Fatalf("Open(c1) (create): %v", err) + } + if err := c1.putIndexEntry(dummyID(1), dummyID(12), 13, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + if err := c1.putIndexEntry(dummyID(1), dummyID(2), 3, true); err != nil { // overwrite entry + t.Fatalf("addIndexEntry: %v", err) + } + if entry, err := c1.Get(dummyID(1)); err != nil || entry.OutputID != dummyID(2) || entry.Size != 3 { + t.Fatalf("c1.Get(1) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(2), 3) + } + + c2, err := Open(cdir) + if err != nil { + t.Fatalf("Open(c2) (reuse): %v", err) + } + if entry, err := c2.Get(dummyID(1)); err != nil || entry.OutputID != dummyID(2) || entry.Size != 3 { + t.Fatalf("c2.Get(1) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(2), 3) + } + if err := c2.putIndexEntry(dummyID(2), dummyID(3), 4, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + if entry, err := c1.Get(dummyID(2)); err != nil || entry.OutputID != dummyID(3) || entry.Size != 4 { + t.Fatalf("c1.Get(2) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(3), 4) + } +} + +func TestGrowth(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + + n := 10000 + if testing.Short() { + n = 1000 + } + + for i := 0; i < n; i++ { + if err := c.putIndexEntry(dummyID(i), dummyID(i*99), int64(i)*101, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + id := ActionID(dummyID(i)) + entry, err := c.Get(id) + if err != nil { + t.Fatalf("Get(%x): %v", id, err) + } + if entry.OutputID != dummyID(i*99) || entry.Size != int64(i)*101 { + t.Errorf("Get(%x) = %x, %d, want %x, %d", id, entry.OutputID, entry.Size, dummyID(i*99), int64(i)*101) + } + } + for i := 0; i < n; i++ { + id := ActionID(dummyID(i)) + entry, err := c.Get(id) + if err != nil { + t.Fatalf("Get2(%x): %v", id, err) + } + if entry.OutputID != dummyID(i*99) || entry.Size != int64(i)*101 { + t.Errorf("Get2(%x) = %x, %d, want %x, %d", id, entry.OutputID, entry.Size, dummyID(i*99), int64(i)*101) + } + } +} + +func TestVerifyPanic(t *testing.T) { + os.Setenv("GODEBUG", "gocacheverify=1") + initEnv() + defer func() { + os.Unsetenv("GODEBUG") + verify = false + }() + + if !verify { + t.Fatal("initEnv did not set verify") + } + + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + + id := ActionID(dummyID(1)) + if err := c.PutBytes(id, []byte("abc")); err != nil { + t.Fatal(err) + } + + defer func() { + if err := recover(); err != nil { + t.Log(err) + return + } + }() + c.PutBytes(id, []byte("def")) + t.Fatal("mismatched Put did not panic in verify mode") +} + +func dummyID(x int) [HashSize]byte { + var out [HashSize]byte + binary.LittleEndian.PutUint64(out[:], uint64(x)) + return out +} + +func TestCacheTrim(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + const start = 1000000000 + now := int64(start) + c.now = func() time.Time { return time.Unix(now, 0) } + + checkTime := func(name string, mtime int64) { + t.Helper() + file := filepath.Join(c.dir, name[:2], name) + info, err := os.Stat(file) + if err != nil { + t.Fatal(err) + } + if info.ModTime().Unix() != mtime { + t.Fatalf("%s mtime = %d, want %d", name, info.ModTime().Unix(), mtime) + } + } + + id := ActionID(dummyID(1)) + c.PutBytes(id, []byte("abc")) + entry, _ := c.Get(id) + c.PutBytes(ActionID(dummyID(2)), []byte("def")) + mtime := now + checkTime(fmt.Sprintf("%x-a", id), mtime) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime) + + // Get should not change recent mtimes. + now = start + 10 + c.Get(id) + checkTime(fmt.Sprintf("%x-a", id), mtime) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime) + + // Get should change distant mtimes. + now = start + 5000 + mtime2 := now + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + checkTime(fmt.Sprintf("%x-a", id), mtime2) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime2) + + // Trim should leave everything alone: it's all too new. + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + data, err := ioutil.ReadFile(filepath.Join(dir, "trim.txt")) + if err != nil { + t.Fatal(err) + } + checkTime(fmt.Sprintf("%x-a", dummyID(2)), start) + + // Trim less than a day later should not do any work at all. + now = start + 80000 + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + data2, err := ioutil.ReadFile(filepath.Join(dir, "trim.txt")) + if err != nil { + t.Fatal(err) + } + if !bytes.Equal(data, data2) { + t.Fatalf("second trim did work: %q -> %q", data, data2) + } + + // Fast forward and do another trim just before the 5 day cutoff. + // Note that because of usedQuantum the cutoff is actually 5 days + 1 hour. + // We used c.Get(id) just now, so 5 days later it should still be kept. + // On the other hand almost a full day has gone by since we wrote dummyID(2) + // and we haven't looked at it since, so 5 days later it should be gone. + now += 5 * 86400 + checkTime(fmt.Sprintf("%x-a", dummyID(2)), start) + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + mtime3 := now + if _, err := c.Get(dummyID(2)); err == nil { // haven't done a Get for this since original write above + t.Fatalf("Trim did not remove dummyID(2)") + } + + // The c.Get(id) refreshed id's mtime again. + // Check that another 5 days later it is still not gone, + // but check by using checkTime, which doesn't bring mtime forward. + now += 5 * 86400 + c.Trim() + checkTime(fmt.Sprintf("%x-a", id), mtime3) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime3) + + // Half a day later Trim should still be a no-op, because there was a Trim recently. + // Even though the entry for id is now old enough to be trimmed, + // it gets a reprieve until the time comes for a new Trim scan. + now += 86400 / 2 + c.Trim() + checkTime(fmt.Sprintf("%x-a", id), mtime3) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime3) + + // Another half a day later, Trim should actually run, and it should remove id. + now += 86400/2 + 1 + c.Trim() + if _, err := c.Get(dummyID(1)); err == nil { + t.Fatal("Trim did not remove dummyID(1)") + } +} diff --git a/internal/cache/default.go b/internal/cache/default.go new file mode 100644 index 000000000..3034f76a5 --- /dev/null +++ b/internal/cache/default.go @@ -0,0 +1,85 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "sync" +) + +// Default returns the default cache to use. +func Default() (*Cache, error) { + defaultOnce.Do(initDefaultCache) + return defaultCache, defaultDirErr +} + +var ( + defaultOnce sync.Once + defaultCache *Cache +) + +// cacheREADME is a message stored in a README in the cache directory. +// Because the cache lives outside the normal Go trees, we leave the +// README as a courtesy to explain where it came from. +const cacheREADME = `This directory holds cached build artifacts from staticcheck. +` + +// initDefaultCache does the work of finding the default cache +// the first time Default is called. +func initDefaultCache() { + dir := DefaultDir() + if err := os.MkdirAll(dir, 0777); err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + if _, err := os.Stat(filepath.Join(dir, "README")); err != nil { + // Best effort. + ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666) + } + + c, err := Open(dir) + if err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + defaultCache = c +} + +var ( + defaultDirOnce sync.Once + defaultDir string + defaultDirErr error +) + +// DefaultDir returns the effective STATICCHECK_CACHE setting. +func DefaultDir() string { + // Save the result of the first call to DefaultDir for later use in + // initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that + // subprocesses will inherit it, but that means initDefaultCache can't + // otherwise distinguish between an explicit "off" and a UserCacheDir error. + + defaultDirOnce.Do(func() { + defaultDir = os.Getenv("STATICCHECK_CACHE") + if filepath.IsAbs(defaultDir) { + return + } + if defaultDir != "" { + defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not an absolute path") + return + } + + // Compute default location. + dir, err := os.UserCacheDir() + if err != nil { + defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not defined and %v", err) + return + } + defaultDir = filepath.Join(dir, "staticcheck") + }) + + return defaultDir +} diff --git a/internal/cache/hash.go b/internal/cache/hash.go new file mode 100644 index 000000000..a53543ec5 --- /dev/null +++ b/internal/cache/hash.go @@ -0,0 +1,176 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "crypto/sha256" + "fmt" + "hash" + "io" + "os" + "sync" +) + +var debugHash = false // set when GODEBUG=gocachehash=1 + +// HashSize is the number of bytes in a hash. +const HashSize = 32 + +// A Hash provides access to the canonical hash function used to index the cache. +// The current implementation uses salted SHA256, but clients must not assume this. +type Hash struct { + h hash.Hash + name string // for debugging + buf *bytes.Buffer // for verify +} + +// hashSalt is a salt string added to the beginning of every hash +// created by NewHash. Using the Staticcheck version makes sure that different +// versions of the command do not address the same cache +// entries, so that a bug in one version does not affect the execution +// of other versions. This salt will result in additional ActionID files +// in the cache, but not additional copies of the large output files, +// which are still addressed by unsalted SHA256. +var hashSalt []byte + +func SetSalt(b []byte) { + hashSalt = b +} + +// Subkey returns an action ID corresponding to mixing a parent +// action ID with a string description of the subkey. +func Subkey(parent ActionID, desc string) ActionID { + h := sha256.New() + h.Write([]byte("subkey:")) + h.Write(parent[:]) + h.Write([]byte(desc)) + var out ActionID + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH subkey %x %q = %x\n", parent, desc, out) + } + if verify { + hashDebug.Lock() + hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc) + hashDebug.Unlock() + } + return out +} + +// NewHash returns a new Hash. +// The caller is expected to Write data to it and then call Sum. +func NewHash(name string) *Hash { + h := &Hash{h: sha256.New(), name: name} + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name) + } + h.Write(hashSalt) + if verify { + h.buf = new(bytes.Buffer) + } + return h +} + +// Write writes data to the running hash. +func (h *Hash) Write(b []byte) (int, error) { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %q\n", h.name, b) + } + if h.buf != nil { + h.buf.Write(b) + } + return h.h.Write(b) +} + +// Sum returns the hash of the data written previously. +func (h *Hash) Sum() [HashSize]byte { + var out [HashSize]byte + h.h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %x\n", h.name, out) + } + if h.buf != nil { + hashDebug.Lock() + if hashDebug.m == nil { + hashDebug.m = make(map[[HashSize]byte]string) + } + hashDebug.m[out] = h.buf.String() + hashDebug.Unlock() + } + return out +} + +// In GODEBUG=gocacheverify=1 mode, +// hashDebug holds the input to every computed hash ID, +// so that we can work backward from the ID involved in a +// cache entry mismatch to a description of what should be there. +var hashDebug struct { + sync.Mutex + m map[[HashSize]byte]string +} + +// reverseHash returns the input used to compute the hash id. +func reverseHash(id [HashSize]byte) string { + hashDebug.Lock() + s := hashDebug.m[id] + hashDebug.Unlock() + return s +} + +var hashFileCache struct { + sync.Mutex + m map[string][HashSize]byte +} + +// FileHash returns the hash of the named file. +// It caches repeated lookups for a given file, +// and the cache entry for a file can be initialized +// using SetFileHash. +// The hash used by FileHash is not the same as +// the hash used by NewHash. +func FileHash(file string) ([HashSize]byte, error) { + hashFileCache.Lock() + out, ok := hashFileCache.m[file] + hashFileCache.Unlock() + + if ok { + return out, nil + } + + h := sha256.New() + f, err := os.Open(file) + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + _, err = io.Copy(h, f) + f.Close() + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %x\n", file, out) + } + + SetFileHash(file, out) + return out, nil +} + +// SetFileHash sets the hash returned by FileHash for file. +func SetFileHash(file string, sum [HashSize]byte) { + hashFileCache.Lock() + if hashFileCache.m == nil { + hashFileCache.m = make(map[string][HashSize]byte) + } + hashFileCache.m[file] = sum + hashFileCache.Unlock() +} diff --git a/internal/cache/hash_test.go b/internal/cache/hash_test.go new file mode 100644 index 000000000..3bf714303 --- /dev/null +++ b/internal/cache/hash_test.go @@ -0,0 +1,52 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "io/ioutil" + "os" + "testing" +) + +func TestHash(t *testing.T) { + oldSalt := hashSalt + hashSalt = nil + defer func() { + hashSalt = oldSalt + }() + + h := NewHash("alice") + h.Write([]byte("hello world")) + sum := fmt.Sprintf("%x", h.Sum()) + want := "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + if sum != want { + t.Errorf("hash(hello world) = %v, want %v", sum, want) + } +} + +func TestHashFile(t *testing.T) { + f, err := ioutil.TempFile("", "cmd-go-test-") + if err != nil { + t.Fatal(err) + } + name := f.Name() + fmt.Fprintf(f, "hello world") + defer os.Remove(name) + if err := f.Close(); err != nil { + t.Fatal(err) + } + + var h ActionID // make sure hash result is assignable to ActionID + h, err = FileHash(name) + if err != nil { + t.Fatal(err) + } + sum := fmt.Sprintf("%x", h) + want := "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + if sum != want { + t.Errorf("hash(hello world) = %v, want %v", sum, want) + } +} diff --git a/internal/passes/buildssa/buildssa.go b/internal/passes/buildssa/buildssa.go new file mode 100644 index 000000000..fde918d12 --- /dev/null +++ b/internal/passes/buildssa/buildssa.go @@ -0,0 +1,116 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildssa defines an Analyzer that constructs the SSA +// representation of an error-free package and returns the set of all +// functions within it. It does not report any diagnostics itself but +// may be used as an input to other analyzers. +// +// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE. +package buildssa + +import ( + "go/ast" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/ssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "buildssa", + Doc: "build SSA-form IR for later passes", + Run: run, + ResultType: reflect.TypeOf(new(SSA)), +} + +// SSA provides SSA-form intermediate representation for all the +// non-blank source functions in the current package. +type SSA struct { + Pkg *ssa.Package + SrcFuncs []*ssa.Function +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Plundered from ssautil.BuildPackage. + + // We must create a new Program for each Package because the + // analysis API provides no place to hang a Program shared by + // all Packages. Consequently, SSA Packages and Functions do not + // have a canonical representation across an analysis session of + // multiple packages. This is unlikely to be a problem in + // practice because the analysis API essentially forces all + // packages to be analysed independently, so any given call to + // Analysis.Run on a package will see only SSA objects belonging + // to a single Program. + + mode := ssa.GlobalDebug + + prog := ssa.NewProgram(pass.Fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pass.Pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) + ssapkg.Build() + + // Compute list of source functions, including literals, + // in source order. + var funcs []*ssa.Function + var addAnons func(f *ssa.Function) + addAnons = func(f *ssa.Function) { + funcs = append(funcs, f) + for _, anon := range f.AnonFuncs { + addAnons(anon) + } + } + addAnons(ssapkg.Members["init"].(*ssa.Function)) + for _, f := range pass.Files { + for _, decl := range f.Decls { + if fdecl, ok := decl.(*ast.FuncDecl); ok { + + // SSA will not build a Function + // for a FuncDecl named blank. + // That's arguably too strict but + // relaxing it would break uniqueness of + // names of package members. + if fdecl.Name.Name == "_" { + continue + } + + // (init functions have distinct Func + // objects named "init" and distinct + // ssa.Functions named "init#1", ...) + + fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) + if fn == nil { + panic(fn) + } + + f := ssapkg.Prog.FuncValue(fn) + if f == nil { + panic(fn) + } + + addAnons(f) + } + } + } + + return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil +} diff --git a/internal/passes/buildssa/buildssa_test.go b/internal/passes/buildssa/buildssa_test.go new file mode 100644 index 000000000..0e0435d26 --- /dev/null +++ b/internal/passes/buildssa/buildssa_test.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildssa_test + +import ( + "fmt" + "os" + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "honnef.co/go/tools/internal/passes/buildssa" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + result := analysistest.Run(t, testdata, buildssa.Analyzer, "a")[0].Result + + ssainfo := result.(*buildssa.SSA) + got := fmt.Sprint(ssainfo.SrcFuncs) + want := `[a.init a.Fib (a.T).fib]` + if got != want { + t.Errorf("SSA.SrcFuncs = %s, want %s", got, want) + for _, f := range ssainfo.SrcFuncs { + f.WriteTo(os.Stderr) + } + } +} diff --git a/internal/passes/buildssa/testdata/src/a/a.go b/internal/passes/buildssa/testdata/src/a/a.go new file mode 100644 index 000000000..ddb13dacb --- /dev/null +++ b/internal/passes/buildssa/testdata/src/a/a.go @@ -0,0 +1,16 @@ +package a + +func Fib(x int) int { + if x < 2 { + return x + } + return Fib(x-1) + Fib(x-2) +} + +type T int + +func (T) fib(x int) int { return Fib(x) } + +func _() { + print("hi") +} diff --git a/internal/renameio/renameio.go b/internal/renameio/renameio.go new file mode 100644 index 000000000..3f3f1708f --- /dev/null +++ b/internal/renameio/renameio.go @@ -0,0 +1,83 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package renameio writes files atomically by renaming temporary files. +package renameio + +import ( + "bytes" + "io" + "io/ioutil" + "os" + "path/filepath" + "runtime" + "strings" + "time" +) + +const patternSuffix = "*.tmp" + +// Pattern returns a glob pattern that matches the unrenamed temporary files +// created when writing to filename. +func Pattern(filename string) string { + return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) +} + +// WriteFile is like ioutil.WriteFile, but first writes data to an arbitrary +// file in the same directory as filename, then renames it atomically to the +// final name. +// +// That ensures that the final location, if it exists, is always a complete file. +func WriteFile(filename string, data []byte) (err error) { + return WriteToFile(filename, bytes.NewReader(data)) +} + +// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader +// instead of a slice. +func WriteToFile(filename string, data io.Reader) (err error) { + f, err := ioutil.TempFile(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) + if err != nil { + return err + } + defer func() { + // Only call os.Remove on f.Name() if we failed to rename it: otherwise, + // some other process may have created a new file with the same name after + // that. + if err != nil { + f.Close() + os.Remove(f.Name()) + } + }() + + if _, err := io.Copy(f, data); err != nil { + return err + } + // Sync the file before renaming it: otherwise, after a crash the reader may + // observe a 0-length file instead of the actual contents. + // See https://golang.org/issue/22397#issuecomment-380831736. + if err := f.Sync(); err != nil { + return err + } + if err := f.Close(); err != nil { + return err + } + + var start time.Time + for { + err := os.Rename(f.Name(), filename) + if err == nil || runtime.GOOS != "windows" || !strings.HasSuffix(err.Error(), "Access is denied.") { + return err + } + + // Windows seems to occasionally trigger spurious "Access is denied" errors + // here (see golang.org/issue/31247). We're not sure why. It's probably + // worth a little extra latency to avoid propagating the spurious errors. + if start.IsZero() { + start = time.Now() + } else if time.Since(start) >= 500*time.Millisecond { + return err + } + time.Sleep(5 * time.Millisecond) + } +} diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index cf797fb1b..affee6607 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -4,13 +4,14 @@ import ( "go/ast" "go/types" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/internal/passes/buildssa" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" ) -func CheckRangeStringRunes(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { rng, ok := node.(*ast.RangeStmt) if !ok || !IsBlank(rng.Key) { @@ -59,10 +60,11 @@ func CheckRangeStringRunes(j *lint.Job) { return true } - j.Errorf(rng, "should range over string, not []rune(string)") + pass.Reportf(rng.Pos(), "should range over string, not []rune(string)") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } diff --git a/lint/analysis.go b/lint/analysis.go new file mode 100644 index 000000000..6e914e02a --- /dev/null +++ b/lint/analysis.go @@ -0,0 +1,39 @@ +package lint + +import ( + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +var IsGeneratedAnalyzer = &analysis.Analyzer{ + Name: "isgenerated", + Doc: "annotate file names that have been code generated", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[string]bool{} + for _, f := range pass.Files { + path := pass.Fset.PositionFor(f.Pos(), false).Filename + m[path] = isGenerated(path) + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[string]bool{}), +} + +var TokenFileAnalyzer = &analysis.Analyzer{ + Name: "tokenfileanalyzer", + Doc: "creates a mapping of *token.File to *ast.File", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*token.File]*ast.File{} + for _, af := range pass.Files { + tf := pass.Fset.File(af.Pos()) + m[tf] = af + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), +} diff --git a/lint/lint.go b/lint/lint.go index c4d9ff671..72f69ba83 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -4,36 +4,20 @@ package lint // import "honnef.co/go/tools/lint" import ( "bytes" "fmt" - "go/ast" + "go/scanner" "go/token" "go/types" - "io" - "os" "path/filepath" - "runtime" "sort" "strings" "sync" - "time" "unicode" - "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" "honnef.co/go/tools/config" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/ssa/ssautil" ) -type Job struct { - Pkg *Pkg - GoVersion int - - check Check - problems []Problem - - duration time.Duration -} - type Ignore interface { Match(p Problem) bool } @@ -42,17 +26,18 @@ type LineIgnore struct { File string Line int Checks []string - matched bool - pos token.Pos + Matched bool + Pos token.Pos } func (li *LineIgnore) Match(p Problem) bool { - if p.Position.Filename != li.File || p.Position.Line != li.Line { + pos := p.Pos + if pos.Filename != li.File || pos.Line != li.Line { return false } for _, c := range li.Checks { if m, _ := filepath.Match(c, p.Check); m { - li.matched = true + li.Matched = true return true } } @@ -61,7 +46,7 @@ func (li *LineIgnore) Match(p Problem) bool { func (li *LineIgnore) String() string { matched := "not matched" - if li.matched { + if li.Matched { matched = "matched" } return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) @@ -73,7 +58,7 @@ type FileIgnore struct { } func (fi *FileIgnore) Match(p Problem) bool { - if p.Position.Filename != fi.File { + if p.Pos.Filename != fi.File { return false } for _, c := range fi.Checks { @@ -84,43 +69,6 @@ func (fi *FileIgnore) Match(p Problem) bool { return false } -type GlobIgnore struct { - Pattern string - Checks []string -} - -func (gi *GlobIgnore) Match(p Problem) bool { - if gi.Pattern != "*" { - pkgpath := p.Package.Types.Path() - if strings.HasSuffix(pkgpath, "_test") { - pkgpath = pkgpath[:len(pkgpath)-len("_test")] - } - name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename)) - if m, _ := filepath.Match(gi.Pattern, name); !m { - return false - } - } - for _, c := range gi.Checks { - if m, _ := filepath.Match(c, p.Check); m { - return true - } - } - return false -} - -type Program struct { - SSA *ssa.Program - InitialPackages []*Pkg - AllPackages []*packages.Package - AllFunctions []*ssa.Function -} - -func (prog *Program) Fset() *token.FileSet { - return prog.InitialPackages[0].Fset -} - -type Func func(*Job) - type Severity uint8 const ( @@ -131,367 +79,177 @@ const ( // Problem represents a problem in some source code. type Problem struct { - Position token.Position // position in source file - Text string // the prose that describes the problem + Pos token.Position + Message string Check string - Package *Pkg Severity Severity } func (p *Problem) String() string { - if p.Check == "" { - return p.Text - } - return fmt.Sprintf("%s (%s)", p.Text, p.Check) -} - -type Checker interface { - Name() string - Prefix() string - Init(*Program) - Checks() []Check -} - -type Check struct { - Fn Func - ID string - FilterGenerated bool - Doc string + return fmt.Sprintf("%s (%s)", p.Message, p.Check) } // A Linter lints Go source code. type Linter struct { - Checkers []Checker - Ignores []Ignore - GoVersion int - ReturnIgnored bool - Config config.Config - - MaxConcurrentJobs int - PrintStats bool - - automaticIgnores []Ignore -} - -func (l *Linter) ignore(p Problem) bool { - ignored := false - for _, ig := range l.automaticIgnores { - // We cannot short-circuit these, as we want to record, for - // each ignore, whether it matched or not. - if ig.Match(p) { - ignored = true - } - } - if ignored { - // no need to execute other ignores if we've already had a - // match. - return true - } - for _, ig := range l.Ignores { - // We can short-circuit here, as we aren't tracking any - // information. - if ig.Match(p) { - return true - } - } - - return false -} - -func (j *Job) File(node Positioner) *ast.File { - return j.Pkg.tokenFileMap[j.Pkg.Fset.File(node.Pos())] -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -type PerfStats struct { - PackageLoading time.Duration - SSABuild time.Duration - OtherInitWork time.Duration - CheckerInits map[string]time.Duration - Jobs []JobStat + Checkers []*analysis.Analyzer + CumulativeCheckers []CumulativeChecker + GoVersion int + Config config.Config } -type JobStat struct { - Job string - Duration time.Duration +type CumulativeChecker interface { + Analyzer() *analysis.Analyzer + Result() []types.Object + ProblemObject(*token.FileSet, types.Object) Problem } -func (stats *PerfStats) Print(w io.Writer) { - fmt.Fprintln(w, "Package loading:", stats.PackageLoading) - fmt.Fprintln(w, "SSA build:", stats.SSABuild) - fmt.Fprintln(w, "Other init work:", stats.OtherInitWork) - - fmt.Fprintln(w, "Checker inits:") - for checker, d := range stats.CheckerInits { - fmt.Fprintf(w, "\t%s: %s\n", checker, d) +func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { + var analyzers []*analysis.Analyzer + analyzers = append(analyzers, l.Checkers...) + for _, cum := range l.CumulativeCheckers { + analyzers = append(analyzers, cum.Analyzer()) } - fmt.Fprintln(w) - fmt.Fprintln(w, "Jobs:") - sort.Slice(stats.Jobs, func(i, j int) bool { - return stats.Jobs[i].Duration < stats.Jobs[j].Duration - }) - var total time.Duration - for _, job := range stats.Jobs { - fmt.Fprintf(w, "\t%s: %s\n", job.Job, job.Duration) - total += job.Duration + r, err := NewRunner() + if err != nil { + return nil, err } - fmt.Fprintf(w, "\tTotal: %s\n", total) -} -func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { - allPkgs := allPackages(initial) - t := time.Now() - ssaprog, _ := ssautil.Packages(allPkgs, ssa.GlobalDebug) - ssaprog.Build() - if stats != nil { - stats.SSABuild = time.Since(t) + pkgs, err := r.Run(cfg, patterns, analyzers) + if err != nil { + return nil, err } - runtime.GC() - - t = time.Now() - pkgMap := map[*ssa.Package]*Pkg{} - var pkgs []*Pkg - for _, pkg := range initial { - ssapkg := ssaprog.Package(pkg.Types) - var cfg config.Config - if len(pkg.GoFiles) != 0 { - path := pkg.GoFiles[0] - dir := filepath.Dir(path) - var err error - // OPT(dh): we're rebuilding the entire config tree for - // each package. for example, if we check a/b/c and - // a/b/c/d, we'll process a, a/b, a/b/c, a, a/b, a/b/c, - // a/b/c/d – we should cache configs per package and only - // load the new levels. - cfg, err = config.Load(dir) - if err != nil { - // FIXME(dh): we couldn't load the config, what are we - // supposed to do? probably tell the user somehow - } - cfg = cfg.Merge(l.Config) - } - - pkg := &Pkg{ - SSA: ssapkg, - Package: pkg, - Config: cfg, - Generated: map[string]bool{}, - tokenFileMap: map[*token.File]*ast.File{}, - } - pkg.Inspector = inspector.New(pkg.Syntax) - for _, f := range pkg.Syntax { - tf := pkg.Fset.File(f.Pos()) - pkg.tokenFileMap[tf] = f - path := DisplayPosition(pkg.Fset, f.Pos()).Filename - pkg.Generated[path] = isGenerated(path) - } - pkgMap[ssapkg] = pkg - pkgs = append(pkgs, pkg) - } - - prog := &Program{ - SSA: ssaprog, - InitialPackages: pkgs, - AllPackages: allPkgs, - } - - for fn := range ssautil.AllFunctions(ssaprog) { - prog.AllFunctions = append(prog.AllFunctions, fn) - if fn.Pkg == nil { - continue - } - if pkg, ok := pkgMap[fn.Pkg]; ok { - pkg.InitialFunctions = append(pkg.InitialFunctions, fn) - } - } + tpkgToPkg := map[*types.Package]*Package{} + for _, pkg := range pkgs { + tpkgToPkg[pkg.Types] = pkg - var out []Problem - l.automaticIgnores = nil - for _, pkg := range initial { - for _, f := range pkg.Syntax { - found := false - commentLoop: - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.Contains(c.Text, "//lint:") { - found = true - break commentLoop - } + for _, err := range pkg.errs { + switch err := err.(type) { + case types.Error: + p := Problem{ + Pos: err.Fset.PositionFor(err.Pos, false), + Message: err.Msg, + Severity: Error, + Check: "compile", } - } - if !found { - continue - } - cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - switch cmd { - case "ignore", "file-ignore": - if len(args) < 2 { - // FIXME(dh): this causes duplicated warnings when using megacheck - p := Problem{ - Position: DisplayPosition(prog.Fset(), c.Pos()), - Text: "malformed linter directive; missing the required reason field?", - Check: "", - Package: nil, - } - out = append(out, p) - continue - } - default: - // unknown directive, ignore - continue - } - checks := strings.Split(args[0], ",") - pos := DisplayPosition(prog.Fset(), node.Pos()) - var ig Ignore - switch cmd { - case "ignore": - ig = &LineIgnore{ - File: pos.Filename, - Line: pos.Line, - Checks: checks, - pos: c.Pos(), - } - case "file-ignore": - ig = &FileIgnore{ - File: pos.Filename, - Checks: checks, - } - } - l.automaticIgnores = append(l.automaticIgnores, ig) + pkg.problems = append(pkg.problems, p) + case packages.Error: + p := Problem{ + Pos: parsePos(err.Pos), + Message: err.Msg, + Severity: Error, + Check: "compile", + } + pkg.problems = append(pkg.problems, p) + case scanner.ErrorList: + for _, err := range err { + p := Problem{ + Pos: err.Pos, + Message: err.Msg, + Severity: Error, + Check: "compile", } + pkg.problems = append(pkg.problems, p) + } + case error: + p := Problem{ + Pos: token.Position{}, + Message: err.Error(), + Severity: Error, + Check: "compile", } + pkg.problems = append(pkg.problems, p) } } } - if stats != nil { - stats.OtherInitWork = time.Since(t) - } - - for _, checker := range l.Checkers { - t := time.Now() - checker.Init(prog) - if stats != nil { - stats.CheckerInits[checker.Name()] = time.Since(t) + var problems []Problem + for _, cum := range l.CumulativeCheckers { + for _, res := range cum.Result() { + pkg := tpkgToPkg[res.Pkg()] + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + if allowedChecks[cum.Analyzer().Name] { + pos := DisplayPosition(pkg.Fset, res.Pos()) + if pkg.gen[pos.Filename] { + continue + } + p := cum.ProblemObject(pkg.Fset, res) + problems = append(problems, p) + } } } - var jobs []*Job - var allChecks []string - - var wg sync.WaitGroup - for _, checker := range l.Checkers { - for _, check := range checker.Checks() { - allChecks = append(allChecks, check.ID) - if check.Fn == nil { - continue + for _, pkg := range pkgs { + for _, ig := range pkg.ignores { + for i := range pkg.problems { + p := &pkg.problems[i] + if ig.Match(*p) { + p.Severity = Ignored + } } - for _, pkg := range pkgs { - j := &Job{ - Pkg: pkg, - check: check, - GoVersion: l.GoVersion, + for i := range problems { + p := &problems[i] + if ig.Match(*p) { + p.Severity = Ignored } - jobs = append(jobs, j) - wg.Add(1) - go func(check Check, j *Job) { - t := time.Now() - check.Fn(j) - j.duration = time.Since(t) - wg.Done() - }(check, j) } } - } - wg.Wait() - - for _, j := range jobs { - if stats != nil { - stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) - } - for _, p := range j.problems { - if p.Package == nil { - panic(fmt.Sprintf("internal error: problem at position %s has nil package", p.Position)) + if pkg.cfg == nil { + // The package failed to load, otherwise we would have a + // valid config. Pass through all errors. + problems = append(problems, pkg.problems...) + } else { + for _, p := range pkg.problems { + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + allowedChecks["compile"] = true + if allowedChecks[p.Check] { + problems = append(problems, p) + } } - allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) + } - if l.ignore(p) { - p.Severity = Ignored + for _, ig := range pkg.ignores { + ig, ok := ig.(*LineIgnore) + if !ok { + continue } - // TODO(dh): support globs in check white/blacklist - // OPT(dh): this approach doesn't actually disable checks, - // it just discards their results. For the moment, that's - // fine. None of our checks are super expensive. In the - // future, we may want to provide opt-in expensive - // analysis, which shouldn't run at all. It may be easiest - // to implement this in the individual checks. - if (l.ReturnIgnored || p.Severity != Ignored) && allowedChecks[p.Check] { - out = append(out, p) + if ig.Matched { + continue } - } - } - for _, ig := range l.automaticIgnores { - ig, ok := ig.(*LineIgnore) - if !ok { - continue - } - if ig.matched { - continue - } - - couldveMatched := false - for _, pkg := range pkgs { - for _, f := range pkg.tokenFileMap { - if prog.Fset().Position(f.Pos()).Filename != ig.File { + couldveMatched := false + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + for _, c := range ig.Checks { + if !allowedChecks[c] { continue } - allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) - for _, c := range ig.Checks { - if !allowedChecks[c] { - continue - } - couldveMatched = true - break - } + couldveMatched = true break } - } - if !couldveMatched { - // The ignored checks were disabled for the containing package. - // Don't flag the ignore for not having matched. - continue - } - p := Problem{ - Position: DisplayPosition(prog.Fset(), ig.pos), - Text: "this linter directive didn't match anything; should it be removed?", - Check: "", - Package: nil, + if !couldveMatched { + // The ignored checks were disabled for the containing package. + // Don't flag the ignore for not having matched. + continue + } + p := Problem{ + Pos: DisplayPosition(pkg.Fset, ig.Pos), + Message: "this linter directive didn't match anything; should it be removed?", + Check: "", + } + problems = append(problems, p) } - out = append(out, p) } - sort.Slice(out, func(i int, j int) bool { - pi, pj := out[i].Position, out[j].Position + if len(problems) == 0 { + return nil, nil + } + + sort.Slice(problems, func(i, j int) bool { + pi := problems[i].Pos + pj := problems[j].Pos if pi.Filename != pj.Filename { return pi.Filename < pj.Filename @@ -503,32 +261,22 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { return pi.Column < pj.Column } - return out[i].Text < out[j].Text + return problems[i].Message < problems[j].Message }) - if l.PrintStats && stats != nil { - stats.Print(os.Stderr) - } - - if len(out) < 2 { - return out - } - - uniq := make([]Problem, 0, len(out)) - uniq = append(uniq, out[0]) - prev := out[0] - for _, p := range out[1:] { - if prev.Position == p.Position && prev.Text == p.Text { - continue + var out []Problem + out = append(out, problems[0]) + for i, p := range problems[1:] { + // We may encounter duplicate problems because one file + // can be part of many packages. + if problems[i] != p { + out = append(out, p) } - prev = p - uniq = append(uniq, p) } - - return uniq + return out, nil } -func FilterChecks(allChecks []string, checks []string) map[string]bool { +func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bool { // OPT(dh): this entire computation could be cached per package allowedChecks := map[string]bool{} @@ -541,7 +289,7 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { if check == "*" || check == "all" { // Match all for _, c := range allChecks { - allowedChecks[c] = b + allowedChecks[c.Name] = b } } else if strings.HasSuffix(check, "*") { // Glob @@ -549,17 +297,17 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 for _, c := range allChecks { - idx := strings.IndexFunc(c, func(r rune) bool { return unicode.IsNumber(r) }) + idx := strings.IndexFunc(c.Name, func(r rune) bool { return unicode.IsNumber(r) }) if isCat { // Glob is S*, which should match S1000 but not SA1000 - cat := c[:idx] + cat := c.Name[:idx] if prefix == cat { - allowedChecks[c] = b + allowedChecks[c.Name] = b } } else { // Glob is S1* - if strings.HasPrefix(c, prefix) { - allowedChecks[c] = b + if strings.HasPrefix(c.Name, prefix) { + allowedChecks[c.Name] = b } } } @@ -571,19 +319,6 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { return allowedChecks } -// Pkg represents a package being linted. -type Pkg struct { - SSA *ssa.Package - InitialFunctions []*ssa.Function - *packages.Package - Config config.Config - Inspector *inspector.Inspector - // TODO(dh): this map should probably map from *ast.File, not string - Generated map[string]bool - - tokenFileMap map[*token.File]*ast.File -} - type Positioner interface { Pos() token.Pos } @@ -602,34 +337,6 @@ func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { return pos } -func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - pos := DisplayPosition(j.Pkg.Fset, n.Pos()) - if j.Pkg.Generated[pos.Filename] && j.check.FilterGenerated { - return nil - } - problem := Problem{ - Position: pos, - Text: fmt.Sprintf(format, args...), - Check: j.check.ID, - Package: j.Pkg, - } - j.problems = append(j.problems, problem) - return &j.problems[len(j.problems)-1] -} - -func allPackages(pkgs []*packages.Package) []*packages.Package { - var out []*packages.Package - packages.Visit( - pkgs, - func(pkg *packages.Package) bool { - out = append(out, pkg) - return true - }, - nil, - ) - return out -} - var bufferPool = &sync.Pool{ New: func() interface{} { buf := bytes.NewBuffer(nil) @@ -670,10 +377,24 @@ func writePackage(buf *bytes.Buffer, pkg *types.Package) { if pkg == nil { return } - var s string - s = pkg.Path() + s := pkg.Path() if s != "" { buf.WriteString(s) buf.WriteByte('.') } } + +type StringSliceVar []string + +func (v StringSliceVar) String() string { + return strings.Join(v, ",") +} + +func (v *StringSliceVar) Set(s string) error { + *v = StringSliceVar(strings.Split(s, ",")) + return nil +} + +func (v *StringSliceVar) Get() interface{} { + return []string(*v) +} diff --git a/lint/lint_test.go b/lint/lint_test.go deleted file mode 100644 index 437e75015..000000000 --- a/lint/lint_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package lint_test - -import ( - "testing" - - . "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/testutil" -) - -type testChecker struct{} - -func (testChecker) Name() string { return "stylecheck" } -func (testChecker) Prefix() string { return "TEST" } -func (testChecker) Init(prog *Program) {} - -func (testChecker) Checks() []Check { - return []Check{ - {ID: "TEST1000", FilterGenerated: false, Fn: testLint}, - } -} - -func testLint(j *Job) { - // Flag all functions - for _, fn := range j.Pkg.InitialFunctions { - if fn.Synthetic == "" { - j.Errorf(fn, "This is a test problem") - } - } -} - -func TestAll(t *testing.T) { - c := testChecker{} - testutil.TestAll(t, c, "") -} diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index ab6800ab9..31cf2eeea 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -4,6 +4,7 @@ package lintdsl import ( "bytes" + "flag" "fmt" "go/ast" "go/constant" @@ -12,6 +13,7 @@ import ( "go/types" "strings" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/lint" "honnef.co/go/tools/ssa" ) @@ -71,16 +73,6 @@ func IsPointerLike(T types.Type) bool { return false } -func IsGenerated(f *ast.File) bool { - comments := f.Comments - if len(comments) > 0 { - comment := comments[0].Text() - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") - } - return false -} - func IsIdent(expr ast.Expr, ident string) bool { id, ok := expr.(*ast.Ident) return ok && id.Name == ident @@ -103,26 +95,26 @@ func IsZero(expr ast.Expr) bool { return IsIntLiteral(expr, "0") } -func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { - return IsType(j.Pkg.TypesInfo.TypeOf(expr), name) +func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool { + return IsType(pass.TypesInfo.TypeOf(expr), name) } -func IsInTest(j *lint.Job, node lint.Positioner) bool { +func IsInTest(pass *analysis.Pass, node lint.Positioner) bool { // FIXME(dh): this doesn't work for global variables with // initializers - f := j.Pkg.Fset.File(node.Pos()) + f := pass.Fset.File(node.Pos()) return f != nil && strings.HasSuffix(f.Name(), "_test.go") } -func IsInMain(j *lint.Job, node lint.Positioner) bool { +func IsInMain(pass *analysis.Pass, node lint.Positioner) bool { if node, ok := node.(packager); ok { return node.Package().Pkg.Name() == "main" } - return j.Pkg.Types.Name() == "main" + return pass.Pkg.Name() == "main" } -func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { - info := j.Pkg.TypesInfo +func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string { + info := pass.TypesInfo sel := info.Selections[expr] if sel == nil { if x, ok := expr.X.(*ast.Ident); ok { @@ -138,16 +130,16 @@ func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) } -func IsNil(j *lint.Job, expr ast.Expr) bool { - return j.Pkg.TypesInfo.Types[expr].IsNil() +func IsNil(pass *analysis.Pass, expr ast.Expr) bool { + return pass.TypesInfo.Types[expr].IsNil() } -func BoolConst(j *lint.Job, expr ast.Expr) bool { - val := j.Pkg.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() +func BoolConst(pass *analysis.Pass, expr ast.Expr) bool { + val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() return constant.BoolVal(val) } -func IsBoolConst(j *lint.Job, expr ast.Expr) bool { +func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool { // We explicitly don't support typed bools because more often than // not, custom bool types are used as binary enums and the // explicit comparison is desired. @@ -156,7 +148,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { if !ok { return false } - obj := j.Pkg.TypesInfo.ObjectOf(ident) + obj := pass.TypesInfo.ObjectOf(ident) c, ok := obj.(*types.Const) if !ok { return false @@ -171,8 +163,8 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { return true } -func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { - tv := j.Pkg.TypesInfo.Types[expr] +func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) { + tv := pass.TypesInfo.Types[expr] if tv.Value == nil { return 0, false } @@ -182,8 +174,8 @@ func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { return constant.Int64Val(tv.Value) } -func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) { - val := j.Pkg.TypesInfo.Types[expr].Value +func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { + val := pass.TypesInfo.Types[expr].Value if val == nil { return "", false } @@ -212,20 +204,21 @@ func DereferenceR(T types.Type) types.Type { return T } -func IsGoVersion(j *lint.Job, minor int) bool { - return j.GoVersion >= minor +func IsGoVersion(pass *analysis.Pass, minor int) bool { + version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int) + return version >= minor } -func CallNameAST(j *lint.Job, call *ast.CallExpr) string { +func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { switch fun := call.Fun.(type) { case *ast.SelectorExpr: - fn, ok := j.Pkg.TypesInfo.ObjectOf(fun.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) if !ok { return "" } return lint.FuncName(fn) case *ast.Ident: - obj := j.Pkg.TypesInfo.ObjectOf(fun) + obj := pass.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: return lint.FuncName(obj) @@ -239,35 +232,35 @@ func CallNameAST(j *lint.Job, call *ast.CallExpr) string { } } -func IsCallToAST(j *lint.Job, node ast.Node, name string) bool { +func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool { call, ok := node.(*ast.CallExpr) if !ok { return false } - return CallNameAST(j, call) == name + return CallNameAST(pass, call) == name } -func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool { +func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool { for _, name := range names { - if IsCallToAST(j, node, name) { + if IsCallToAST(pass, node, name) { return true } } return false } -func Render(j *lint.Job, x interface{}) string { +func Render(pass *analysis.Pass, x interface{}) string { var buf bytes.Buffer - if err := printer.Fprint(&buf, j.Pkg.Fset, x); err != nil { + if err := printer.Fprint(&buf, pass.Fset, x); err != nil { panic(err) } return buf.String() } -func RenderArgs(j *lint.Job, args []ast.Expr) string { +func RenderArgs(pass *analysis.Pass, args []ast.Expr) string { var ss []string for _, arg := range args { - ss = append(ss, Render(j, arg)) + ss = append(ss, Render(pass, arg)) } return strings.Join(ss, ", ") } @@ -359,3 +352,26 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel } return out } + +func File(pass *analysis.Pass, node lint.Positioner) *ast.File { + pass.Fset.PositionFor(node.Pos(), true) + m := pass.ResultOf[lint.TokenFileAnalyzer].(map[*token.File]*ast.File) + return m[pass.Fset.File(node.Pos())] +} + +// IsGenerated reports whether pos is in a generated file, It ignores +// //line directives. +func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { + file := pass.Fset.PositionFor(pos, false).Filename + m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + return m[file] +} + +func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) { + file := lint.DisplayPosition(pass.Fset, pos).Filename + m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + if m[file] { + return + } + pass.Reportf(pos, f, args...) +} diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index 23aa132de..e4c3fd315 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -51,7 +51,7 @@ type Text struct { } func (o Text) Format(p lint.Problem) { - fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Position), p.String()) + fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Pos), p.String()) } type JSON struct { @@ -85,11 +85,11 @@ func (o JSON) Format(p lint.Problem) { Code: p.Check, Severity: severity(p.Severity), Location: location{ - File: p.Position.Filename, - Line: p.Position.Line, - Column: p.Position.Column, + File: p.Pos.Filename, + Line: p.Pos.Line, + Column: p.Pos.Column, }, - Message: p.Text, + Message: p.Message, } _ = json.NewEncoder(o.W).Encode(jp) } @@ -102,20 +102,21 @@ type Stylish struct { } func (o *Stylish) Format(p lint.Problem) { - if p.Position.Filename == "" { - p.Position.Filename = "-" + pos := p.Pos + if pos.Filename == "" { + pos.Filename = "-" } - if p.Position.Filename != o.prevFile { + if pos.Filename != o.prevFile { if o.prevFile != "" { o.tw.Flush() fmt.Fprintln(o.W) } - fmt.Fprintln(o.W, p.Position.Filename) - o.prevFile = p.Position.Filename + fmt.Fprintln(o.W, pos.Filename) + o.prevFile = pos.Filename o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0) } - fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", p.Position.Line, p.Position.Column, p.Check, p.Text) + fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message) } func (o *Stylish) Stats(total, errors, warnings int) { diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 701711831..ab8942589 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -8,65 +8,49 @@ package lintutil // import "honnef.co/go/tools/lint/lintutil" import ( + "crypto/sha256" "errors" "flag" "fmt" "go/build" "go/token" + "io" "log" "os" "regexp" "runtime" - "runtime/debug" "runtime/pprof" "strconv" "strings" - "time" "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/cache" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil/format" "honnef.co/go/tools/version" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" ) -func usage(name string, flags *flag.FlagSet) func() { - return func() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) - fmt.Fprintf(os.Stderr, "Flags:\n") - flags.PrintDefaults() - } -} - -func parseIgnore(s string) ([]lint.Ignore, error) { - var out []lint.Ignore - if len(s) == 0 { - return nil, nil - } - for _, part := range strings.Fields(s) { - p := strings.Split(part, ":") - if len(p) != 2 { - return nil, errors.New("malformed ignore string") - } - path := p[0] - checks := strings.Split(p[1], ",") - out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks}) +func NewVersionFlag() flag.Getter { + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(VersionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) } - return out, nil + return version } -type versionFlag int +type VersionFlag int -func (v *versionFlag) String() string { +func (v *VersionFlag) String() string { return fmt.Sprintf("1.%d", *v) + } -func (v *versionFlag) Set(s string) error { +func (v *VersionFlag) Set(s string) error { if len(s) < 3 { return errors.New("invalid Go version") } @@ -77,14 +61,26 @@ func (v *versionFlag) Set(s string) error { return errors.New("invalid Go version") } i, err := strconv.Atoi(s[2:]) - *v = versionFlag(i) + *v = VersionFlag(i) return err } -func (v *versionFlag) Get() interface{} { +func (v *VersionFlag) Get() interface{} { return int(*v) } +func usage(name string, flags *flag.FlagSet) func() { + return func() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) + fmt.Fprintf(os.Stderr, "Flags:\n") + flags.PrintDefaults() + } +} + type list []string func (list *list) String() string { @@ -112,8 +108,6 @@ func FlagSet(name string) *flag.FlagSet { flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") flags.String("explain", "", "Print description of `check`") - flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently") - flags.Bool("debug.print-stats", false, "Print debug statistics") flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") flags.String("debug.memprofile", "", "Write memory profile to `file`") @@ -124,7 +118,7 @@ func FlagSet(name string) *flag.FlagSet { tags := build.Default.ReleaseTags v := tags[len(tags)-1][2:] - version := new(versionFlag) + version := new(VersionFlag) if err := version.Set(v); err != nil { panic(fmt.Sprintf("internal error: %s", err)) } @@ -133,22 +127,16 @@ func FlagSet(name string) *flag.FlagSet { return flags } -func findCheck(cs []lint.Checker, check string) (lint.Check, bool) { +func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) { for _, c := range cs { - for _, cc := range c.Checks() { - if cc.ID == check { - return cc, true - } + if c.Name == check { + return c, true } } - return lint.Check{}, false + return nil, false } -func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { - if _, ok := os.LookupEnv("GOGC"); !ok { - debug.SetGCPercent(50) - } - +func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) @@ -158,8 +146,6 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) - maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int) - printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool) cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) @@ -194,7 +180,12 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { } if explain != "" { - check, ok := findCheck(cs, explain) + var haystack []*analysis.Analyzer + haystack = append(haystack, cs...) + for _, cum := range cums { + haystack = append(haystack, cum.Analyzer()) + } + check, ok := findCheck(haystack, explain) if !ok { fmt.Fprintln(os.Stderr, "Couldn't find check", explain) exit(1) @@ -207,16 +198,12 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { exit(0) } - ps, err := Lint(cs, fs.Args(), &Options{ - Tags: strings.Fields(tags), - LintTests: tests, - Ignores: ignore, - GoVersion: goVersion, - ReturnIgnored: showIgnored, - Config: cfg, - - MaxConcurrentJobs: maxConcurrentJobs, - PrintStats: printStats, + ps, err := Lint(cs, cums, fs.Args(), &Options{ + Tags: strings.Fields(tags), + LintTests: tests, + Ignores: ignore, + GoVersion: goVersion, + Config: cfg, }) if err != nil { fmt.Fprintln(os.Stderr, err) @@ -243,15 +230,19 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { ) fail := *fs.Lookup("fail").Value.(*list) - var allChecks []string - for _, p := range ps { - allChecks = append(allChecks, p.Check) + analyzers := make([]*analysis.Analyzer, len(cs), len(cs)+len(cums)) + copy(analyzers, cs) + for _, cum := range cums { + analyzers = append(analyzers, cum.Analyzer()) } - - shouldExit := lint.FilterChecks(allChecks, fail) + shouldExit := lint.FilterChecks(analyzers, fail) + shouldExit["compile"] = true total = len(ps) for _, p := range ps { + if p.Severity == lint.Ignored && !showIgnored { + continue + } if shouldExit[p.Check] { errors++ } else { @@ -271,75 +262,54 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { type Options struct { Config config.Config - Tags []string - LintTests bool - Ignores string - GoVersion int - ReturnIgnored bool - - MaxConcurrentJobs int - PrintStats bool + Tags []string + LintTests bool + Ignores string + GoVersion int } -func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, error) { - stats := lint.PerfStats{ - CheckerInits: map[string]time.Duration{}, +func computeSalt() ([]byte, error) { + if version.Version != "devel" { + return []byte(version.Version), nil } - - if opt == nil { - opt = &Options{} + p, err := os.Executable() + if err != nil { + return nil, err } - ignores, err := parseIgnore(opt.Ignores) + f, err := os.Open(p) if err != nil { return nil, err } - - conf := &packages.Config{ - Mode: packages.LoadAllSyntax, - Tests: opt.LintTests, - BuildFlags: []string{ - "-tags=" + strings.Join(opt.Tags, " "), - }, + defer f.Close() + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return nil, err } + return h.Sum(nil), nil +} - t := time.Now() - if len(paths) == 0 { - paths = []string{"."} - } - pkgs, err := packages.Load(conf, paths...) +func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string, opt *Options) ([]lint.Problem, error) { + salt, err := computeSalt() if err != nil { - return nil, err - } - stats.PackageLoading = time.Since(t) - runtime.GC() - - var problems []lint.Problem - workingPkgs := make([]*packages.Package, 0, len(pkgs)) - for _, pkg := range pkgs { - if pkg.IllTyped { - problems = append(problems, compileErrors(pkg)...) - } else { - workingPkgs = append(workingPkgs, pkg) - } + return nil, fmt.Errorf("could not compute salt for cache: %s", err) } + cache.SetSalt(salt) - if len(workingPkgs) == 0 { - return problems, nil + if opt == nil { + opt = &Options{} } l := &lint.Linter{ - Checkers: cs, - Ignores: ignores, - GoVersion: opt.GoVersion, - ReturnIgnored: opt.ReturnIgnored, - Config: opt.Config, - - MaxConcurrentJobs: opt.MaxConcurrentJobs, - PrintStats: opt.PrintStats, + Checkers: cs, + CumulativeCheckers: cums, + GoVersion: opt.GoVersion, + Config: opt.Config, } - problems = append(problems, l.Lint(workingPkgs, &stats)...) - - return problems, nil + cfg := &packages.Config{} + if opt.LintTests { + cfg.Tests = true + } + return l.Lint(cfg, paths) } var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) @@ -361,34 +331,3 @@ func parsePos(pos string) token.Position { Column: col, } } - -func compileErrors(pkg *packages.Package) []lint.Problem { - if !pkg.IllTyped { - return nil - } - if len(pkg.Errors) == 0 { - // transitively ill-typed - var ps []lint.Problem - for _, imp := range pkg.Imports { - ps = append(ps, compileErrors(imp)...) - } - return ps - } - var ps []lint.Problem - for _, err := range pkg.Errors { - p := lint.Problem{ - Position: parsePos(err.Pos), - Text: err.Msg, - Check: "compile", - } - ps = append(ps, p) - } - return ps -} - -func ProcessArgs(name string, cs []lint.Checker, args []string) { - flags := FlagSet(name) - flags.Parse(args) - - ProcessFlagSet(cs, flags) -} diff --git a/lint/runner.go b/lint/runner.go new file mode 100644 index 000000000..332c805e9 --- /dev/null +++ b/lint/runner.go @@ -0,0 +1,659 @@ +package lint + +import ( + "bytes" + "encoding/gob" + "encoding/hex" + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + "regexp" + "runtime" + "sort" + "strconv" + "strings" + "sync" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/objectpath" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/cache" + "honnef.co/go/tools/loader" +) + +type Package struct { + *packages.Package + Imports map[string]*Package + initial bool + fromSource bool + hash string + + resultsMu sync.Mutex + results map[*analysis.Analyzer]*result + + cfg *config.Config + gen map[string]bool + problems []Problem + ignores []Ignore + errs []error +} + +type result struct { + v interface{} + err error + ready chan struct{} +} + +type buildResult struct { + done chan struct{} +} + +type Runner struct { + ld loader.Loader + cache *cache.Cache + + factsMu sync.RWMutex + facts map[types.Object][]analysis.Fact + pkgFacts map[*types.Package][]analysis.Fact + + builtMu sync.Mutex + built map[*Package]*buildResult +} + +func (r *Runner) importObjectFact(obj types.Object, fact analysis.Fact) bool { + r.factsMu.RLock() + defer r.factsMu.RUnlock() + // OPT(dh): consider looking for the fact in the analysisAction + // first, to avoid lock contention + for _, f := range r.facts[obj] { + if reflect.TypeOf(f) == reflect.TypeOf(fact) { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + } + return false +} + +func (r *Runner) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { + r.factsMu.RLock() + defer r.factsMu.RUnlock() + for _, f := range r.pkgFacts[pkg] { + if reflect.TypeOf(f) == reflect.TypeOf(fact) { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + } + return false +} + +func (r *Runner) exportObjectFact(ac *analysisAction, obj types.Object, fact analysis.Fact) { + r.factsMu.Lock() + r.facts[obj] = append(r.facts[obj], fact) + r.factsMu.Unlock() + path, err := objectpath.For(obj) + if err == nil { + ac.newFacts = append(ac.newFacts, Fact{string(path), fact}) + } +} + +func (r *Runner) exportPackageFact(ac *analysisAction, fact analysis.Fact) { + r.factsMu.Lock() + r.pkgFacts[ac.pkg.Types] = append(r.pkgFacts[ac.pkg.Types], fact) + r.factsMu.Unlock() + ac.newFacts = append(ac.newFacts, Fact{"", fact}) +} + +type Fact struct { + Path string + Fact analysis.Fact +} + +type analysisAction struct { + analyzer *analysis.Analyzer + pkg *Package + newFacts []Fact + problems []Problem +} + +func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { + p := Problem{ + Pos: DisplayPosition(pass.Fset, d.Pos), + Message: d.Message, + Check: pass.Analyzer.Name, + } + ac.problems = append(ac.problems, p) +} + +func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { + ac.pkg.resultsMu.Lock() + res := ac.pkg.results[ac.analyzer] + if res != nil { + ac.pkg.resultsMu.Unlock() + <-res.ready + return res.v, res.err + } else { + res = &result{ + ready: make(chan struct{}), + } + ac.pkg.results[ac.analyzer] = res + ac.pkg.resultsMu.Unlock() + + defer func() { + res.v = ret + res.err = err + close(res.ready) + }() + + // Package may be a dependency or a package the user requested + // Facts for a dependency may be cached or not + // Diagnostics for a user package may be cached or not (not yet) + // When we have to analyze a package, we have to analyze it with all dependencies. + + pass := new(analysis.Pass) + *pass = analysis.Pass{ + Analyzer: ac.analyzer, + Fset: ac.pkg.Fset, + Files: ac.pkg.Syntax, + // type information may be nil or may be populated. if it is + // nil, it will get populated later. + Pkg: ac.pkg.Types, + TypesInfo: ac.pkg.TypesInfo, + TypesSizes: ac.pkg.TypesSizes, + ResultOf: map[*analysis.Analyzer]interface{}{}, + ImportObjectFact: r.importObjectFact, + ImportPackageFact: r.importPackageFact, + ExportObjectFact: func(obj types.Object, fact analysis.Fact) { + r.exportObjectFact(ac, obj, fact) + }, + ExportPackageFact: func(fact analysis.Fact) { + r.exportPackageFact(ac, fact) + }, + Report: func(d analysis.Diagnostic) { + ac.report(pass, d) + }, + } + + if !ac.pkg.initial { + // Don't report problems in dependencies + pass.Report = func(analysis.Diagnostic) {} + } + return r.runAnalysisUser(pass, ac) + } +} + +func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) { + if len(a.FactTypes) == 0 { + return nil, true + } + + var facts []Fact + // Look in the cache for facts + aID, err := passActionID(pkg, a) + if err != nil { + return nil, false + } + aID = cache.Subkey(aID, "facts") + b, _, err := r.cache.GetBytes(aID) + if err != nil { + // No cached facts, analyse this package like a user-provided one, but ignore diagnostics + return nil, false + } + + if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&facts); err != nil { + // Cached facts are broken, analyse this package like a user-provided one, but ignore diagnostics + return nil, false + } + return facts, true +} + +func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { + if !ac.pkg.fromSource { + panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) + } + + // User-provided package, analyse it + // First analyze it with dependencies + var req []*analysis.Analyzer + req = append(req, ac.analyzer.Requires...) + if pass.Analyzer != IsGeneratedAnalyzer && pass.Analyzer != config.Analyzer { + // Ensure all packages have the generated map and config. This is + // required by interna of the runner. Analyses that themselves + // make use of either have an explicit dependency so that other + // runners work correctly, too. + req = append(req, IsGeneratedAnalyzer, config.Analyzer) + } + for _, req := range req { + acReq := &analysisAction{analyzer: req, pkg: ac.pkg} + ret, err := r.runAnalysis(acReq) + if err != nil { + // We couldn't run a dependency, no point in going on + return nil, err + } + + pass.ResultOf[req] = ret + } + + // Then with this analyzer + ret, err := ac.analyzer.Run(pass) + if err != nil { + return nil, err + } + + // Persist facts to cache + if len(ac.analyzer.FactTypes) > 0 { + buf := &bytes.Buffer{} + if err := gob.NewEncoder(buf).Encode(ac.newFacts); err != nil { + return nil, err + } + aID, err := passActionID(ac.pkg, ac.analyzer) + if err != nil { + return nil, err + } + aID = cache.Subkey(aID, "facts") + if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil { + return nil, err + } + } + + return ret, nil +} + +func NewRunner() (*Runner, error) { + cache, err := cache.Default() + if err != nil { + return nil, err + } + + return &Runner{ + cache: cache, + facts: map[types.Object][]analysis.Fact{}, + pkgFacts: map[*types.Package][]analysis.Fact{}, + built: map[*Package]*buildResult{}, + }, nil +} + +func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer) ([]*Package, error) { + for _, a := range analyzers { + for _, f := range a.FactTypes { + gob.Register(f) + } + } + + var dcfg packages.Config + if cfg != nil { + dcfg = *cfg + } + loaded, err := r.ld.Graph(dcfg, patterns...) + if err != nil { + return nil, err + } + + m := map[*packages.Package]*Package{} + packages.Visit(loaded, nil, func(l *packages.Package) { + m[l] = &Package{ + Package: l, + Imports: map[string]*Package{}, + results: map[*analysis.Analyzer]*result{}, + } + for _, err := range l.Errors { + m[l].errs = append(m[l].errs, err) + } + for k, v := range l.Imports { + m[l].Imports[k] = m[v] + } + + m[l].hash, err = packageHash(m[l]) + if err != nil { + m[l].errs = append(m[l].errs, err) + } + }) + pkgs := make([]*Package, len(loaded)) + for i, l := range loaded { + pkgs[i] = m[l] + pkgs[i].initial = true + } + + var wg sync.WaitGroup + wg.Add(len(pkgs)) + // OPT(dh): The ideal number of parallel jobs depends on the shape + // of the graph. We may risk having one goroutine doing work and + // all other goroutines being blocked on its completion. At the + // same time, Go dependency graphs aren't always very amiable + // towards parallelism. For example, on the standard library, we + // only achieve about 400% CPU usage (out of a possible 800% on + // this machine), and only 2x scaling. + sem := make(chan struct{}, runtime.GOMAXPROCS(-1)) + for _, pkg := range pkgs { + pkg := pkg + sem <- struct{}{} + go func() { + r.processPkg(pkg, analyzers) + <-sem + wg.Done() + }() + } + wg.Wait() + + return pkgs, nil +} + +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) + +func parsePos(pos string) token.Position { + if pos == "-" || pos == "" { + return token.Position{} + } + parts := posRe.FindStringSubmatch(pos) + if parts == nil { + panic(fmt.Sprintf("internal error: malformed position %q", pos)) + } + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) + return token.Position{ + Filename: file, + Line: line, + Column: col, + } +} + +func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { + if pkg.Types != nil { + panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package)) + } + // Load type information + if pkg.initial { + // Load package from source + pkg.fromSource = true + return r.ld.LoadFromSource(pkg.Package) + } + + var allFacts []Fact + failed := false + for _, a := range analyzers { + if len(a.FactTypes) > 0 { + facts, ok := r.loadCachedFacts(a, pkg) + if !ok { + failed = true + break + } + allFacts = append(allFacts, facts...) + } + } + + if failed { + pkg.fromSource = true + return r.ld.LoadFromSource(pkg.Package) + } + + // Load package from export data + if err := r.ld.LoadFromExport(pkg.Package); err != nil { + // We asked Go to give us up to date export data, yet + // we can't load it. There must be something wrong. + // + // Attempt loading from source. This should fail (because + // otherwise there would be export data); we just want to + // get the compile errors. If loading from source succeeds + // we discard the result, anyway. Otherwise we'll fail + // when trying to reload from export data later. + pkg.fromSource = true + if err := r.ld.LoadFromSource(pkg.Package); err != nil { + return err + } + // Make sure this package can't be imported successfully + pkg.Package.Errors = append(pkg.Package.Errors, packages.Error{ + Pos: "-", + Msg: fmt.Sprintf("could not load export data: %s", err), + Kind: packages.ParseError, + }) + return fmt.Errorf("could not load export data: %s", err) + } + + for _, f := range allFacts { + if f.Path == "" { + // This is a package fact + r.factsMu.Lock() + r.pkgFacts[pkg.Types] = append(r.pkgFacts[pkg.Types], f.Fact) + r.factsMu.Unlock() + continue + } + obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analysing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + r.factsMu.Lock() + r.facts[obj] = append(r.facts[obj], f.Fact) + r.factsMu.Unlock() + } + return nil +} + +func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { + r.builtMu.Lock() + res := r.built[pkg] + if res != nil { + r.builtMu.Unlock() + <-res.done + return + } + + res = &buildResult{done: make(chan struct{})} + r.built[pkg] = res + r.builtMu.Unlock() + + defer func() { + // Clear information we no longer need. Make sure to do this + // when returning from processPkg so that we clear + // dependencies, not just initial packages. + pkg.TypesInfo = nil + pkg.Syntax = nil + pkg.results = nil + close(res.done) + }() + + if len(pkg.errs) != 0 { + return + } + + for _, imp := range pkg.Imports { + r.processPkg(imp, analyzers) + if len(imp.errs) > 0 { + var s string + for _, err := range imp.errs { + s += "\n\t" + err.Error() + } + pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s)) + return + } + } + if pkg.PkgPath == "unsafe" { + pkg.Types = types.Unsafe + return + } + + if err := r.loadPkg(pkg, analyzers); err != nil { + pkg.errs = append(pkg.errs, err) + return + } + + if !pkg.fromSource { + // Nothing left to do for the package. + return + } + + // Run analyses on initial packages and those missing facts + var wg sync.WaitGroup + wg.Add(len(analyzers)) + errs := make([]error, len(analyzers)) + var acs []*analysisAction + for i, a := range analyzers { + i := i + a := a + ac := &analysisAction{analyzer: a, pkg: pkg} + acs = append(acs, ac) + go func() { + defer wg.Done() + // Only initial packages and packages with missing + // facts will have been loaded from source. + if pkg.initial || len(a.FactTypes) > 0 { + if _, err := r.runAnalysis(ac); err != nil { + errs[i] = fmt.Errorf("error running analyzer %s on %s: %s", a, pkg, err) + return + } + } + }() + } + wg.Wait() + for _, err := range errs { + if err != nil { + pkg.errs = append(pkg.errs, err) + } + } + + // We can't process ignores at this point because `unused` needs + // to see more than one package to make its decision. + ignores, problems := parseDirectives(pkg.Package) + pkg.ignores = append(pkg.ignores, ignores...) + pkg.problems = append(pkg.problems, problems...) + for _, ac := range acs { + pkg.problems = append(pkg.problems, ac.problems...) + } + pkg.cfg = pkg.results[config.Analyzer].v.(*config.Config) + pkg.gen = pkg.results[IsGeneratedAnalyzer].v.(map[string]bool) + + // In a previous version of the code, we would throw away all type + // information and reload it from export data. That was + // nonsensical. The *types.Package doesn't keep any information + // live that export data wouldn't also. We only need to discard + // the AST and the TypesInfo maps; that happens after we return + // from processPkg. +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { + var ignores []Ignore + var problems []Problem + + for _, f := range pkg.Syntax { + found := false + commentLoop: + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.Contains(c.Text, "//lint:") { + found = true + break commentLoop + } + } + } + if !found { + continue + } + cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + switch cmd { + case "ignore", "file-ignore": + if len(args) < 2 { + // FIXME(dh): this causes duplicated warnings when using megacheck + p := Problem{ + Pos: DisplayPosition(pkg.Fset, c.Pos()), + Message: "malformed linter directive; missing the required reason field?", + Severity: Error, + Check: "", + } + problems = append(problems, p) + continue + } + default: + // unknown directive, ignore + continue + } + checks := strings.Split(args[0], ",") + pos := DisplayPosition(pkg.Fset, node.Pos()) + var ig Ignore + switch cmd { + case "ignore": + ig = &LineIgnore{ + File: pos.Filename, + Line: pos.Line, + Checks: checks, + Pos: c.Pos(), + } + case "file-ignore": + ig = &FileIgnore{ + File: pos.Filename, + Checks: checks, + } + } + ignores = append(ignores, ig) + } + } + } + } + + return ignores, problems +} + +func packageHash(pkg *Package) (string, error) { + key := cache.NewHash("package hash") + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + for _, f := range pkg.CompiledGoFiles { + h, err := cache.FileHash(f) + if err != nil { + return "", err + } + fmt.Fprintf(key, "file %s %x\n", f, h) + } + imps := make([]*Package, 0, len(pkg.Imports)) + for _, v := range pkg.Imports { + imps = append(imps, v) + } + sort.Slice(imps, func(i, j int) bool { + return imps[i].PkgPath < imps[j].PkgPath + }) + for _, dep := range imps { + if dep.PkgPath == "unsafe" { + continue + } + + fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, dep.hash) + } + h := key.Sum() + return hex.EncodeToString(h[:]), nil +} + +func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) { + key := cache.NewHash("action ID") + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + fmt.Fprintf(key, "pkghash %s\n", pkg.hash) + fmt.Fprintf(key, "analyzer %s\n", analyzer.Name) + + return key.Sum(), nil +} diff --git a/lint/testdata/src/Test/line-ignores.go b/lint/testdata/src/Test/line-ignores.go index 77660e69c..6479f4ae4 100644 --- a/lint/testdata/src/Test/line-ignores.go +++ b/lint/testdata/src/Test/line-ignores.go @@ -3,20 +3,17 @@ package pkg // the line directive should not affect the line ignores //line random-file:1 -func fn1() {} // MATCH "test problem" +func fn1() {} // want `test problem` //lint:ignore TEST1000 This should be ignored, because ... //lint:ignore XXX1000 Testing that multiple linter directives work correctly func fn2() {} -//lint:ignore TEST1000 -func fn3() {} // MATCH "test problem" +//lint:ignore TEST1000 // want `malformed linter directive` +func fn3() {} // want `test problem` //lint:ignore TEST1000 ignore func fn4() { - //lint:ignore TEST1000 ignore + //lint:ignore TEST1000 ignore // want `this linter directive didn't match anything` var _ int } - -// MATCH:12 "malformed linter directive" -// MATCH:17 "this linter directive didn't match anything" diff --git a/lint/testutil/util.go b/lint/testutil/util.go deleted file mode 100644 index f3b046041..000000000 --- a/lint/testutil/util.go +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package testutil provides helpers for testing staticcheck. -package testutil // import "honnef.co/go/tools/lint/testutil" - -import ( - "fmt" - "go/parser" - "go/token" - "io/ioutil" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "testing" - - "golang.org/x/tools/go/packages" - "honnef.co/go/tools/config" - "honnef.co/go/tools/lint" -) - -func TestAll(t *testing.T, c lint.Checker, dir string) { - testPackages(t, c, dir) -} - -func testPackages(t *testing.T, c lint.Checker, dir string) { - gopath := filepath.Join("testdata", dir) - gopath, err := filepath.Abs(gopath) - if err != nil { - t.Fatal(err) - } - fis, err := ioutil.ReadDir(filepath.Join(gopath, "src")) - if err != nil { - if os.IsNotExist(err) { - // no packages to test - return - } - t.Fatal("couldn't get test packages:", err) - } - - var paths []string - for _, fi := range fis { - if strings.HasSuffix(fi.Name(), ".disabled") { - continue - } - paths = append(paths, fi.Name()) - } - - conf := &packages.Config{ - Mode: packages.LoadAllSyntax, - Tests: true, - Env: append(os.Environ(), "GOPATH="+gopath), - } - - pkgs, err := packages.Load(conf, paths...) - if err != nil { - t.Error("Error loading packages:", err) - return - } - - versions := map[int][]*packages.Package{} - for _, pkg := range pkgs { - path := strings.TrimSuffix(pkg.Types.Path(), ".test") - parts := strings.Split(path, "_") - - version := 0 - if len(parts) > 1 { - part := parts[len(parts)-1] - if len(part) >= 4 && strings.HasPrefix(part, "go1") { - v, err := strconv.Atoi(part[len("go1"):]) - if err != nil { - continue - } - version = v - } - } - versions[version] = append(versions[version], pkg) - } - - for version, pkgs := range versions { - sources := map[string][]byte{} - var files []string - - for _, pkg := range pkgs { - files = append(files, pkg.GoFiles...) - for _, fi := range pkg.GoFiles { - src, err := ioutil.ReadFile(fi) - if err != nil { - t.Fatal(err) - } - sources[fi] = src - } - } - - sort.Strings(files) - filesUniq := make([]string, 0, len(files)) - if len(files) < 2 { - filesUniq = files - } else { - filesUniq = append(filesUniq, files[0]) - prev := files[0] - for _, f := range files[1:] { - if f == prev { - continue - } - prev = f - filesUniq = append(filesUniq, f) - } - } - - lintGoVersion(t, c, version, pkgs, filesUniq, sources) - } -} - -func lintGoVersion( - t *testing.T, - c lint.Checker, - version int, - pkgs []*packages.Package, - files []string, - sources map[string][]byte, -) { - l := &lint.Linter{Checkers: []lint.Checker{c}, GoVersion: version, Config: config.Config{Checks: []string{"all"}}} - problems := l.Lint(pkgs, nil) - - for _, fi := range files { - src := sources[fi] - - ins := parseInstructions(t, fi, src) - - for _, in := range ins { - ok := false - for i, p := range problems { - if p.Position.Line != in.Line || p.Position.Filename != fi { - continue - } - if in.Match.MatchString(p.Text) { - // remove this problem from ps - copy(problems[i:], problems[i+1:]) - problems = problems[:len(problems)-1] - - ok = true - break - } - } - if !ok { - t.Errorf("Lint failed at %s:%d; /%v/ did not match", fi, in.Line, in.Match) - } - } - } - for _, p := range problems { - t.Errorf("Unexpected problem at %s: %v", p.Position, p.Text) - } -} - -type instruction struct { - Line int // the line number this applies to - Match *regexp.Regexp // what pattern to match - Replacement string // what the suggested replacement line should be -} - -// parseInstructions parses instructions from the comments in a Go source file. -// It returns nil if none were parsed. -func parseInstructions(t *testing.T, filename string, src []byte) []instruction { - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, filename, src, parser.ParseComments) - if err != nil { - t.Fatalf("Test file %v does not parse: %v", filename, err) - } - var ins []instruction - for _, cg := range f.Comments { - ln := fset.PositionFor(cg.Pos(), false).Line - raw := cg.Text() - for _, line := range strings.Split(raw, "\n") { - if line == "" || strings.HasPrefix(line, "#") { - continue - } - if line == "OK" && ins == nil { - // so our return value will be non-nil - ins = make([]instruction, 0) - continue - } - if !strings.Contains(line, "MATCH") { - continue - } - rx, err := extractPattern(line) - if err != nil { - t.Fatalf("At %v:%d: %v", filename, ln, err) - } - matchLine := ln - if i := strings.Index(line, "MATCH:"); i >= 0 { - // This is a match for a different line. - lns := strings.TrimPrefix(line[i:], "MATCH:") - lns = lns[:strings.Index(lns, " ")] - matchLine, err = strconv.Atoi(lns) - if err != nil { - t.Fatalf("Bad match line number %q at %v:%d: %v", lns, filename, ln, err) - } - } - var repl string - if r, ok := extractReplacement(line); ok { - repl = r - } - ins = append(ins, instruction{ - Line: matchLine, - Match: rx, - Replacement: repl, - }) - } - } - return ins -} - -func extractPattern(line string) (*regexp.Regexp, error) { - n := strings.Index(line, " ") - if n == 01 { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - line = line[n+1:] - var pat string - switch line[0] { - case '/': - a, b := strings.Index(line, "/"), strings.LastIndex(line, "/") - if a == -1 || a == b { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - pat = line[a+1 : b] - case '"': - a, b := strings.Index(line, `"`), strings.LastIndex(line, `"`) - if a == -1 || a == b { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - pat = regexp.QuoteMeta(line[a+1 : b]) - default: - return nil, fmt.Errorf("malformed match instruction %q", line) - } - - rx, err := regexp.Compile(pat) - if err != nil { - return nil, fmt.Errorf("bad match pattern %q: %v", pat, err) - } - return rx, nil -} - -func extractReplacement(line string) (string, bool) { - // Look for this: / -> ` - // (the end of a match and start of a backtick string), - // and then the closing backtick. - const start = "/ -> `" - a, b := strings.Index(line, start), strings.LastIndex(line, "`") - if a < 0 || a > b { - return "", false - } - return line[a+len(start) : b], true -} diff --git a/loader/loader.go b/loader/loader.go new file mode 100644 index 000000000..9c6885d48 --- /dev/null +++ b/loader/loader.go @@ -0,0 +1,197 @@ +package loader + +import ( + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "log" + "os" + "sync" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/packages" +) + +type Loader struct { + exportMu sync.RWMutex +} + +// Graph resolves patterns and returns packages with all the +// information required to later load type information, and optionally +// syntax trees. +// +// The provided config can set any setting with the exception of Mode. +func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) { + cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes + pkgs, err := packages.Load(&cfg, patterns...) + if err != nil { + return nil, err + } + fset := token.NewFileSet() + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + pkg.Fset = fset + }) + return pkgs, nil +} + +// LoadFromExport loads a package from export data. All of its +// dependencies must have been loaded already. +func (ld *Loader) LoadFromExport(pkg *packages.Package) error { + ld.exportMu.Lock() + defer ld.exportMu.Unlock() + + pkg.IllTyped = true + for path, pkg := range pkg.Imports { + if pkg.Types == nil { + return fmt.Errorf("dependency %q hasn't been loaded yet", path) + } + } + if pkg.ExportFile == "" { + return fmt.Errorf("no export data for %q", pkg.ID) + } + f, err := os.Open(pkg.ExportFile) + if err != nil { + return err + } + defer f.Close() + + r, err := gcexportdata.NewReader(f) + if err != nil { + return err + } + + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*packages.Package]bool) // all visited packages + var visit func(pkgs map[string]*packages.Package) + visit = func(pkgs map[string]*packages.Package) { + for _, pkg := range pkgs { + if !seen[pkg] { + seen[pkg] = true + view[pkg.PkgPath] = pkg.Types + visit(pkg.Imports) + } + } + } + visit(pkg.Imports) + tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath) + if err != nil { + return err + } + pkg.Types = tpkg + pkg.IllTyped = false + return nil +} + +// LoadFromSource loads a package from source. All of its dependencies +// must have been loaded already. +func (ld *Loader) LoadFromSource(pkg *packages.Package) error { + ld.exportMu.RLock() + defer ld.exportMu.RUnlock() + + pkg.IllTyped = true + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + + // OPT(dh): many packages have few files, much fewer than there + // are CPU cores. Additionally, parsing each individual file is + // very fast. A naive parallel implementation of this loop won't + // be faster, and tends to be slower due to extra scheduling, + // bookkeeping and potentially false sharing of cache lines. + pkg.Syntax = make([]*ast.File, len(pkg.CompiledGoFiles)) + for i, file := range pkg.CompiledGoFiles { + f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments) + if err != nil { + pkg.Errors = append(pkg.Errors, convertError(err)...) + return err + } + pkg.Syntax[i] = f + } + pkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + + importer := func(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + imp := pkg.Imports[path] + if imp == nil { + return nil, nil + } + if len(imp.Errors) > 0 { + return nil, imp.Errors[0] + } + return imp.Types, nil + } + tc := &types.Config{ + Importer: importerFunc(importer), + Error: func(err error) { + pkg.Errors = append(pkg.Errors, convertError(err)...) + }, + } + err := types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) + if err != nil { + return err + } + pkg.IllTyped = false + return nil +} + +func convertError(err error) []packages.Error { + var errs []packages.Error + // taken from go/packages + switch err := err.(type) { + case packages.Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, packages.Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: packages.ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, packages.Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: packages.ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, packages.Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: packages.TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, packages.Error{ + Pos: "-", + Msg: err.Error(), + Kind: packages.UnknownError, + }) + + // If you see this error message, please file a bug. + log.Printf("internal error: error %q (%T) without position", err, err) + } + return errs +} + +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/simple/analysis.go b/simple/analysis.go new file mode 100644 index 000000000..9449e2e74 --- /dev/null +++ b/simple/analysis.go @@ -0,0 +1,223 @@ +package simple + +import ( + "flag" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "S1000": { + Name: "S1000", + Run: LintSingleCaseSelect, + Doc: docS1000, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1001": { + Name: "S1001", + Run: LintLoopCopy, + Doc: docS1001, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1002": { + Name: "S1002", + Run: LintIfBoolCmp, + Doc: docS1002, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1003": { + Name: "S1003", + Run: LintStringsContains, + Doc: docS1003, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1004": { + Name: "S1004", + Run: LintBytesCompare, + Doc: docS1004, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1005": { + Name: "S1005", + Run: LintUnnecessaryBlank, + Doc: docS1005, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1006": { + Name: "S1006", + Run: LintForTrue, + Doc: docS1006, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1007": { + Name: "S1007", + Run: LintRegexpRaw, + Doc: docS1007, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1008": { + Name: "S1008", + Run: LintIfReturn, + Doc: docS1008, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1009": { + Name: "S1009", + Run: LintRedundantNilCheckWithLen, + Doc: docS1009, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1010": { + Name: "S1010", + Run: LintSlicing, + Doc: docS1010, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1011": { + Name: "S1011", + Run: LintLoopAppend, + Doc: docS1011, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1012": { + Name: "S1012", + Run: LintTimeSince, + Doc: docS1012, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1016": { + Name: "S1016", + Run: LintSimplerStructConversion, + Doc: docS1016, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1017": { + Name: "S1017", + Run: LintTrim, + Doc: docS1017, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1018": { + Name: "S1018", + Run: LintLoopSlide, + Doc: docS1018, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1019": { + Name: "S1019", + Run: LintMakeLenCap, + Doc: docS1019, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1020": { + Name: "S1020", + Run: LintAssertNotNil, + Doc: docS1020, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1021": { + Name: "S1021", + Run: LintDeclareAssign, + Doc: docS1021, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1023": { + Name: "S1023", + Run: LintRedundantBreak, + Doc: docS1023, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1024": { + Name: "S1024", + Run: LintTimeUntil, + Doc: docS1024, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1025": { + Name: "S1025", + Run: LintRedundantSprintf, + Doc: docS1025, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1028": { + Name: "S1028", + Run: LintErrorsNewSprintf, + Doc: docS1028, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1029": { + Name: "S1029", + Run: LintRangeStringRunes, + Doc: docS1029, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "S1030": { + Name: "S1030", + Run: LintBytesBufferConversions, + Doc: docS1030, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1031": { + Name: "S1031", + Run: LintNilCheckAroundRange, + Doc: docS1031, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1032": { + Name: "S1032", + Run: LintSortHelpers, + Doc: docS1032, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1033": { + Name: "S1033", + Run: LintGuardedDelete, + Doc: docS1033, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1034": { + Name: "S1034", + Run: LintSimplifyTypeSwitch, + Doc: docS1034, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, +} diff --git a/simple/doc.go b/simple/doc.go index ea437da56..b84b59d8b 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -424,3 +424,15 @@ sort.Strings(x) Available since 2019.1 ` + +var docS1033 = `Unnecessary guard around call to delete + +Available since: + Unreleased +` + +var docS1034 = `Use result of type assertion to simplify cases + +Available since: + Unreleased +` diff --git a/simple/lint.go b/simple/lint.go index db805770c..183adfc9f 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -8,67 +8,19 @@ import ( "go/token" "go/types" "reflect" + "sort" "strings" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" . "honnef.co/go/tools/arg" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" - - "golang.org/x/tools/go/types/typeutil" ) -type Checker struct { - CheckGenerated bool - MS *typeutil.MethodSetCache -} - -func NewChecker() *Checker { - return &Checker{ - MS: &typeutil.MethodSetCache{}, - } -} - -func (*Checker) Name() string { return "gosimple" } -func (*Checker) Prefix() string { return "S" } - -func (c *Checker) Init(prog *lint.Program) {} - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect, Doc: docS1000}, - {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy, Doc: docS1001}, - {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp, Doc: docS1002}, - {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains, Doc: docS1003}, - {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare, Doc: docS1004}, - {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank, Doc: docS1005}, - {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue, Doc: docS1006}, - {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw, Doc: docS1007}, - {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn, Doc: docS1008}, - {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen, Doc: docS1009}, - {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing, Doc: docS1010}, - {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend, Doc: docS1011}, - {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince, Doc: docS1012}, - {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion, Doc: docS1016}, - {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim, Doc: docS1017}, - {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide, Doc: docS1018}, - {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap, Doc: docS1019}, - {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil, Doc: docS1020}, - {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign, Doc: docS1021}, - {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak, Doc: docS1023}, - {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil, Doc: docS1024}, - {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf, Doc: docS1025}, - {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf, Doc: docS1028}, - {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes, Doc: docS1029}, - {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions, Doc: docS1030}, - {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange, Doc: docS1031}, - {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers, Doc: docS1032}, - {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete, Doc: ``}, - {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch, Doc: ``}, - } -} - -func (c *Checker) LintSingleCaseSelect(j *lint.Job) { +func LintSingleCaseSelect(pass *analysis.Pass) (interface{}, error) { isSingleSelect := func(node ast.Node) bool { v, ok := node.(*ast.SelectStmt) if !ok { @@ -92,7 +44,7 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { return } seen[v.Body.List[0]] = struct{}{} - j.Errorf(node, "should use for range instead of for { select {} }") + ReportfFG(pass, node.Pos(), "should use for range instead of for { select {} }") case *ast.SelectStmt: if _, ok := seen[v]; ok { return @@ -100,13 +52,14 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { if !isSingleSelect(v) { return } - j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") + ReportfFG(pass, node.Pos(), "should use a simple channel send/receive instead of select with a single case") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintLoopCopy(j *lint.Job) { +func LintLoopCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) @@ -128,7 +81,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { return } - if _, ok := j.Pkg.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { + if _, ok := pass.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { return } lidx, ok := lhs.Index.(*ast.Ident) @@ -139,16 +92,16 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.TypeOf(lhs) == nil || j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { + if pass.TypesInfo.TypeOf(lhs) == nil || pass.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { return } - if j.Pkg.TypesInfo.ObjectOf(lidx) != j.Pkg.TypesInfo.ObjectOf(key) { + if pass.TypesInfo.ObjectOf(lidx) != pass.TypesInfo.ObjectOf(key) { return } - if !types.Identical(j.Pkg.TypesInfo.TypeOf(lhs), j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0])) { + if !types.Identical(pass.TypesInfo.TypeOf(lhs), pass.TypesInfo.TypeOf(stmt.Rhs[0])) { return } - if _, ok := j.Pkg.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { + if _, ok := pass.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { return } @@ -162,7 +115,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(ridx) != j.Pkg.TypesInfo.ObjectOf(key) { + if pass.TypesInfo.ObjectOf(ridx) != pass.TypesInfo.ObjectOf(key) { return } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { @@ -170,38 +123,39 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(rhs) != j.Pkg.TypesInfo.ObjectOf(value) { + if pass.TypesInfo.ObjectOf(rhs) != pass.TypesInfo.ObjectOf(value) { return } } else { return } - j.Errorf(loop, "should use copy() instead of a loop") + ReportfFG(pass, loop.Pos(), "should use copy() instead of a loop") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintIfBoolCmp(j *lint.Job) { +func LintIfBoolCmp(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) if expr.Op != token.EQL && expr.Op != token.NEQ { return } - x := IsBoolConst(j, expr.X) - y := IsBoolConst(j, expr.Y) + x := IsBoolConst(pass, expr.X) + y := IsBoolConst(pass, expr.Y) if !x && !y { return } var other ast.Expr var val bool if x { - val = BoolConst(j, expr.X) + val = BoolConst(pass, expr.X) other = expr.Y } else { - val = BoolConst(j, expr.Y) + val = BoolConst(pass, expr.Y) other = expr.X } - basic, ok := j.Pkg.TypesInfo.TypeOf(other).Underlying().(*types.Basic) + basic, ok := pass.TypesInfo.TypeOf(other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { return } @@ -209,21 +163,22 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { op = "!" } - r := op + Render(j, other) + r := op + Render(pass, other) l1 := len(r) r = strings.TrimLeft(r, "!") if (l1-len(r))%2 == 1 { r = "!" + r } - if IsInTest(j, node) { + if IsInTest(pass, node) { return } - j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) + ReportfFG(pass, expr.Pos(), "should omit comparison to bool constant, can be simplified to %s", r) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintBytesBufferConversions(j *lint.Job) { +func LintBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if len(call.Args) != 1 { @@ -239,18 +194,19 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { return } - typ := j.Pkg.TypesInfo.TypeOf(call.Fun) - if typ == types.Universe.Lookup("string").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).Bytes") { - j.Errorf(call, "should use %v.String() instead of %v", Render(j, sel.X), Render(j, call)) - } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).String") { - j.Errorf(call, "should use %v.Bytes() instead of %v", Render(j, sel.X), Render(j, call)) + typ := pass.TypesInfo.TypeOf(call.Fun) + if typ == types.Universe.Lookup("string").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") { + ReportfFG(pass, call.Pos(), "should use %v.String() instead of %v", Render(pass, sel.X), Render(pass, call)) + } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") { + ReportfFG(pass, call.Pos(), "should use %v.Bytes() instead of %v", Render(pass, sel.X), Render(pass, call)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintStringsContains(j *lint.Job) { +func LintStringsContains(pass *analysis.Pass) (interface{}, error) { // map of value to token to bool value allowed := map[int64]map[token.Token]bool{ -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, @@ -264,7 +220,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { return } - value, ok := ExprToInt(j, expr.Y) + value, ok := ExprToInt(pass, expr.Y) if !ok { return } @@ -310,12 +266,13 @@ func (c *Checker) LintStringsContains(j *lint.Job) { if !b { prefix = "!" } - j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) + ReportfFG(pass, node.Pos(), "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(pass, call.Args)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintBytesCompare(j *lint.Job) { +func LintBytesCompare(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) if expr.Op != token.NEQ && expr.Op != token.EQL { @@ -325,42 +282,44 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { if !ok { return } - if !IsCallToAST(j, call, "bytes.Compare") { + if !IsCallToAST(pass, call, "bytes.Compare") { return } - value, ok := ExprToInt(j, expr.Y) + value, ok := ExprToInt(pass, expr.Y) if !ok || value != 0 { return } - args := RenderArgs(j, call.Args) + args := RenderArgs(pass, call.Args) prefix := "" if expr.Op == token.NEQ { prefix = "!" } - j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) + ReportfFG(pass, node.Pos(), "should use %sbytes.Equal(%s) instead", prefix, args) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintForTrue(j *lint.Job) { +func LintForTrue(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if loop.Init != nil || loop.Post != nil { return } - if !IsBoolConst(j, loop.Cond) || !BoolConst(j, loop.Cond) { + if !IsBoolConst(pass, loop.Cond) || !BoolConst(pass, loop.Cond) { return } - j.Errorf(loop, "should use for {} instead of for true {}") + ReportfFG(pass, loop.Pos(), "should use for {} instead of for true {}") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRegexpRaw(j *lint.Job) { +func LintRegexpRaw(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "regexp.MustCompile") && - !IsCallToAST(j, call, "regexp.Compile") { + if !IsCallToAST(pass, call, "regexp.MustCompile") && + !IsCallToAST(pass, call, "regexp.Compile") { return } sel, ok := call.Fun.(*ast.SelectorExpr) @@ -408,12 +367,13 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { } } - j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) + ReportfFG(pass, call.Pos(), "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintIfReturn(j *lint.Job) { +func LintIfReturn(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { block := node.(*ast.BlockStmt) l := len(block.List) @@ -455,7 +415,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret1.Results) != 1 { return } - if !IsBoolConst(j, ret1.Results[0]) { + if !IsBoolConst(pass, ret1.Results[0]) { return } @@ -466,12 +426,13 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret2.Results) != 1 { return } - if !IsBoolConst(j, ret2.Results[0]) { + if !IsBoolConst(pass, ret2.Results[0]) { return } - j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") + ReportfFG(pass, n1.Pos(), "should use 'return ' instead of 'if { return }; return '") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -482,7 +443,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { // if x != nil && len(x) > N {} // if x != nil && len(x) >= N {} (where N != 0) // -func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { +func LintRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) if ok { @@ -492,7 +453,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return false, false } - c, ok := j.Pkg.TypesInfo.ObjectOf(id).(*types.Const) + c, ok := pass.TypesInfo.ObjectOf(id).(*types.Const) if !ok { return false, false } @@ -522,7 +483,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return } - if !IsNil(j, x.Y) { + if !IsNil(pass, x.Y) { return } @@ -585,7 +546,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array var nilType string - switch j.Pkg.TypesInfo.TypeOf(xx).(type) { + switch pass.TypesInfo.TypeOf(xx).(type) { case *types.Slice: nilType = "nil slices" case *types.Map: @@ -595,12 +556,13 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { default: return } - j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) + ReportfFG(pass, expr.Pos(), "should omit nil check; len() for %s is defined as zero", nilType) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintSlicing(j *lint.Job) { +func LintSlicing(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { n := node.(*ast.SliceExpr) if n.Max != nil { @@ -618,26 +580,27 @@ func (c *Checker) LintSlicing(j *lint.Job) { if !ok || fun.Name != "len" { return } - if _, ok := j.Pkg.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { + if _, ok := pass.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { return } arg, ok := call.Args[Arg("len.v")].(*ast.Ident) if !ok || arg.Obj != s.Obj { return } - j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") + ReportfFG(pass, n.Pos(), "should omit second index in slice, s[a:len(s)] is identical to s[a:]") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) + return nil, nil } -func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { +func refersTo(pass *analysis.Pass, expr ast.Expr, ident *ast.Ident) bool { found := false fn := func(node ast.Node) bool { ident2, ok := node.(*ast.Ident) if !ok { return true } - if j.Pkg.TypesInfo.ObjectOf(ident) == j.Pkg.TypesInfo.ObjectOf(ident2) { + if pass.TypesInfo.ObjectOf(ident) == pass.TypesInfo.ObjectOf(ident2) { found = true return false } @@ -647,7 +610,7 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { return found } -func (c *Checker) LintLoopAppend(j *lint.Job) { +func LintLoopAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) if !IsBlank(loop.Key) { @@ -667,7 +630,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { return } - if refersTo(j, stmt.Lhs[0], val) { + if refersTo(pass, stmt.Lhs[0], val) { return } call, ok := stmt.Rhs[0].(*ast.CallExpr) @@ -681,14 +644,14 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - obj := j.Pkg.TypesInfo.ObjectOf(fun) + obj := pass.TypesInfo.ObjectOf(fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { return } - src := j.Pkg.TypesInfo.TypeOf(loop.X) - dst := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) + src := pass.TypesInfo.TypeOf(loop.X) + dst := pass.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { @@ -698,7 +661,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { return } - if Render(j, stmt.Lhs[0]) != Render(j, call.Args[Arg("append.slice")]) { + if Render(pass, stmt.Lhs[0]) != Render(pass, call.Args[Arg("append.slice")]) { return } @@ -706,51 +669,54 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(val) != j.Pkg.TypesInfo.ObjectOf(el) { + if pass.TypesInfo.ObjectOf(val) != pass.TypesInfo.ObjectOf(el) { return } - j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", - Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) + ReportfFG(pass, loop.Pos(), "should replace loop with %s = append(%s, %s...)", + Render(pass, stmt.Lhs[0]), Render(pass, call.Args[Arg("append.slice")]), Render(pass, loop.X)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTimeSince(j *lint.Job) { +func LintTimeSince(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { return } - if !IsCallToAST(j, sel.X, "time.Now") { + if !IsCallToAST(pass, sel.X, "time.Now") { return } if sel.Sel.Name != "Sub" { return } - j.Errorf(call, "should use time.Since instead of time.Now().Sub") + ReportfFG(pass, call.Pos(), "should use time.Since instead of time.Now().Sub") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTimeUntil(j *lint.Job) { - if !IsGoVersion(j, 8) { - return +func LintTimeUntil(pass *analysis.Pass) (interface{}, error) { + if !IsGoVersion(pass, 8) { + return nil, nil } fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "(time.Time).Sub") { + if !IsCallToAST(pass, call, "(time.Time).Sub") { return } - if !IsCallToAST(j, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { + if !IsCallToAST(pass, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { return } - j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") + ReportfFG(pass, call.Pos(), "should use time.Until instead of t.Sub(time.Now())") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { +func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { @@ -763,7 +729,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { case *ast.IndexExpr: // The type-checker should make sure that it's a map, but // let's be safe. - if _, ok := j.Pkg.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { + if _, ok := pass.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { return } case *ast.UnaryExpr: @@ -775,7 +741,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } cp := *assign cp.Lhs = cp.Lhs[0:1] - j.Errorf(assign, "should write %s instead of %s", Render(j, &cp), Render(j, assign)) + ReportfFG(pass, assign.Pos(), "should write %s instead of %s", Render(pass, &cp), Render(pass, assign)) } fn2 := func(node ast.Node) { @@ -795,7 +761,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { if expr.Op != token.ARROW { continue } - j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") + ReportfFG(pass, lh.Pos(), "'_ = <-ch' can be simplified to '<-ch'") } } @@ -804,22 +770,23 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { // for x, _ if !IsBlank(rs.Key) && IsBlank(rs.Value) { - j.Errorf(rs.Value, "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(j, rs.Key), rs.Tok) + ReportfFG(pass, rs.Value.Pos(), "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(pass, rs.Key), rs.Tok) } // for _, _ || for _ if IsBlank(rs.Key) && (IsBlank(rs.Value) || rs.Value == nil) { - j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") + ReportfFG(pass, rs.Key.Pos(), "should omit values from range; this loop is equivalent to `for range ...`") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) - if IsGoVersion(j, 4) { - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) + if IsGoVersion(pass, 4) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) } + return nil, nil } -func (c *Checker) LintSimplerStructConversion(j *lint.Job) { +func LintSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { var skip ast.Node fn := func(node ast.Node) { // Do not suggest type conversion between pointers @@ -838,7 +805,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return } - typ1, _ := j.Pkg.TypesInfo.TypeOf(lit.Type).(*types.Named) + typ1, _ := pass.TypesInfo.TypeOf(lit.Type).(*types.Named) if typ1 == nil { return } @@ -858,7 +825,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return nil, nil, false } - typ := j.Pkg.TypesInfo.TypeOf(sel.X) + typ := pass.TypesInfo.TypeOf(sel.X) return typ, ident, typ != nil } if len(lit.Elts) == 0 { @@ -926,7 +893,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if typ1 == typ2 { return } - if IsGoVersion(j, 8) { + if IsGoVersion(pass, 8) { if !types.IdenticalIgnoreTags(s1, s2) { return } @@ -935,13 +902,14 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { return } } - j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", + ReportfFG(pass, node.Pos(), "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTrim(j *lint.Job) { +func LintTrim(pass *analysis.Pass) (interface{}, error) { sameNonDynamic := func(node1, node2 ast.Node) bool { if reflect.TypeOf(node1) != reflect.TypeOf(node2) { return false @@ -951,9 +919,9 @@ func (c *Checker) LintTrim(j *lint.Job) { case *ast.Ident: return node1.Obj == node2.(*ast.Ident).Obj case *ast.SelectorExpr: - return Render(j, node1) == Render(j, node2) + return Render(pass, node1) == Render(pass, node2) case *ast.IndexExpr: - return Render(j, node1) == Render(j, node2) + return Render(pass, node1) == Render(pass, node2) } return false } @@ -991,22 +959,22 @@ func (c *Checker) LintTrim(j *lint.Job) { return } switch { - case IsCallToAST(j, condCall, "strings.HasPrefix"): + case IsCallToAST(pass, condCall, "strings.HasPrefix"): pkg = "strings" fun = "HasPrefix" - case IsCallToAST(j, condCall, "strings.HasSuffix"): + case IsCallToAST(pass, condCall, "strings.HasSuffix"): pkg = "strings" fun = "HasSuffix" - case IsCallToAST(j, condCall, "strings.Contains"): + case IsCallToAST(pass, condCall, "strings.Contains"): pkg = "strings" fun = "Contains" - case IsCallToAST(j, condCall, "bytes.HasPrefix"): + case IsCallToAST(pass, condCall, "bytes.HasPrefix"): pkg = "bytes" fun = "HasPrefix" - case IsCallToAST(j, condCall, "bytes.HasSuffix"): + case IsCallToAST(pass, condCall, "bytes.HasSuffix"): pkg = "bytes" fun = "HasSuffix" - case IsCallToAST(j, condCall, "bytes.Contains"): + case IsCallToAST(pass, condCall, "bytes.Contains"): pkg = "bytes" fun = "Contains" default: @@ -1032,13 +1000,13 @@ func (c *Checker) LintTrim(j *lint.Job) { if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { return } - if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || - IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || - IsCallToAST(j, condCall, "strings.Contains") && IsCallToAST(j, rhs, "strings.Replace") || - IsCallToAST(j, condCall, "bytes.HasPrefix") && IsCallToAST(j, rhs, "bytes.TrimPrefix") || - IsCallToAST(j, condCall, "bytes.HasSuffix") && IsCallToAST(j, rhs, "bytes.TrimSuffix") || - IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) + if IsCallToAST(pass, condCall, "strings.HasPrefix") && IsCallToAST(pass, rhs, "strings.TrimPrefix") || + IsCallToAST(pass, condCall, "strings.HasSuffix") && IsCallToAST(pass, rhs, "strings.TrimSuffix") || + IsCallToAST(pass, condCall, "strings.Contains") && IsCallToAST(pass, rhs, "strings.Replace") || + IsCallToAST(pass, condCall, "bytes.HasPrefix") && IsCallToAST(pass, rhs, "bytes.TrimPrefix") || + IsCallToAST(pass, condCall, "bytes.HasSuffix") && IsCallToAST(pass, rhs, "bytes.TrimSuffix") || + IsCallToAST(pass, condCall, "bytes.Contains") && IsCallToAST(pass, rhs, "bytes.Replace") { + ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s", CallNameAST(pass, rhs)) } return case *ast.SliceExpr: @@ -1063,7 +1031,7 @@ func (c *Checker) LintTrim(j *lint.Job) { index = slice.Low case "HasSuffix": if slice.Low != nil { - n, ok := ExprToInt(j, slice.Low) + n, ok := ExprToInt(pass, slice.Low) if !ok || n != 0 { return } @@ -1092,8 +1060,8 @@ func (c *Checker) LintTrim(j *lint.Job) { if !ok { return } - s1, ok1 := ExprToString(j, lit) - s2, ok2 := ExprToString(j, condCall.Args[1]) + s1, ok1 := ExprToString(pass, lit) + s2, ok2 := ExprToString(pass, condCall.Args[1]) if !ok1 || !ok2 || s1 != s2 { return } @@ -1109,8 +1077,8 @@ func (c *Checker) LintTrim(j *lint.Job) { if pkg != "strings" { return } - string, ok1 := ExprToString(j, condCall.Args[1]) - int, ok2 := ExprToInt(j, slice.Low) + string, ok1 := ExprToString(pass, condCall.Args[1]) + int, ok2 := ExprToInt(pass, slice.Low) if !ok1 || !ok2 || int != int64(len(string)) { return } @@ -1136,13 +1104,14 @@ func (c *Checker) LintTrim(j *lint.Job) { case "HasSuffix": replacement = "TrimSuffix" } - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) + ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s.%s", pkg, replacement) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintLoopSlide(j *lint.Job) { +func LintLoopSlide(pass *analysis.Pass) (interface{}, error) { // TODO(dh): detect bs[i+offset] in addition to bs[offset+i] // TODO(dh): consider merging this function with LintLoopCopy // TODO(dh): detect length that is an expression, not a variable name @@ -1176,7 +1145,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } postvar, ok := post.X.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(postvar) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(postvar) != pass.TypesInfo.ObjectOf(initvar) { return } bin, ok := loop.Cond.(*ast.BinaryExpr) @@ -1184,7 +1153,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } binx, ok := bin.X.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(binx) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(binx) != pass.TypesInfo.ObjectOf(initvar) { return } biny, ok := bin.Y.(*ast.Ident) @@ -1213,8 +1182,8 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { if !ok { return } - obj1 := j.Pkg.TypesInfo.ObjectOf(bs1) - obj2 := j.Pkg.TypesInfo.ObjectOf(bs2) + obj1 := pass.TypesInfo.ObjectOf(bs1) + obj2 := pass.TypesInfo.ObjectOf(bs2) if obj1 != obj2 { return } @@ -1223,7 +1192,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { } index1, ok := lhs.Index.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(index1) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(index1) != pass.TypesInfo.ObjectOf(initvar) { return } index2, ok := rhs.Index.(*ast.BinaryExpr) @@ -1235,16 +1204,17 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } add2, ok := index2.Y.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(add2) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(add2) != pass.TypesInfo.ObjectOf(initvar) { return } - j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) + ReportfFG(pass, loop.Pos(), "should use copy(%s[:%s], %s[%s:]) instead", Render(pass, bs1), Render(pass, biny), Render(pass, bs1), Render(pass, add1)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintMakeLenCap(j *lint.Job) { +func LintMakeLenCap(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { @@ -1254,25 +1224,26 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { switch len(call.Args) { case 2: // make(T, len) - if _, ok := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { + if _, ok := pass.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { break } if IsZero(call.Args[Arg("make.size[0]")]) { - j.Errorf(call.Args[Arg("make.size[0]")], "should use make(%s) instead", Render(j, call.Args[Arg("make.t")])) + ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), "should use make(%s) instead", Render(pass, call.Args[Arg("make.t")])) } case 3: // make(T, len, cap) - if Render(j, call.Args[Arg("make.size[0]")]) == Render(j, call.Args[Arg("make.size[1]")]) { - j.Errorf(call.Args[Arg("make.size[0]")], + if Render(pass, call.Args[Arg("make.size[0]")]) == Render(pass, call.Args[Arg("make.size[1]")]) { + ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), "should use make(%s, %s) instead", - Render(j, call.Args[Arg("make.t")]), Render(j, call.Args[Arg("make.size[0]")])) + Render(pass, call.Args[Arg("make.t")]), Render(pass, call.Args[Arg("make.size[0]")])) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintAssertNotNil(j *lint.Job) { +func LintAssertNotNil(pass *analysis.Pass) (interface{}, error) { isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool { xbinop, ok := expr.(*ast.BinaryExpr) if !ok || xbinop.Op != token.NEQ { @@ -1282,7 +1253,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !ok || xident.Obj != ident.Obj { return false } - if !IsNil(j, xbinop.Y) { + if !IsNil(pass, xbinop.Y) { return false } return true @@ -1320,7 +1291,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { return } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } fn2 := func(node ast.Node) { // Check that outer ifstmt is an 'if x != nil {}' @@ -1345,7 +1316,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !ok { return } - if !IsNil(j, binop.Y) { + if !IsNil(pass, binop.Y) { return } @@ -1376,13 +1347,14 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !isOKCheck(assignIdent, ifstmt.Cond) { return } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) + return nil, nil } -func (c *Checker) LintDeclareAssign(j *lint.Job) { +func LintDeclareAssign(pass *analysis.Pass) (interface{}, error) { hasMultipleAssignments := func(root ast.Node, ident *ast.Ident) bool { num := 0 ast.Inspect(root, func(node ast.Node) bool { @@ -1440,20 +1412,21 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { continue } - if refersTo(j, assign.Rhs[0], ident) { + if refersTo(pass, assign.Rhs[0], ident) { continue } if hasMultipleAssignments(block, ident) { continue } - j.Errorf(decl, "should merge variable declaration with assignment on next line") + ReportfFG(pass, decl.Pos(), "should merge variable declaration with assignment on next line") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRedundantBreak(j *lint.Job) { +func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { clause := node.(*ast.CaseClause) if len(clause.Body) < 2 { @@ -1463,7 +1436,7 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { return } - j.Errorf(branch, "redundant break statement") + ReportfFG(pass, branch.Pos(), "redundant break statement") } fn2 := func(node ast.Node) { var ret *ast.FieldList @@ -1490,10 +1463,11 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { } // we don't need to check rst.Results as we already // checked x.Type.Results to be nil. - j.Errorf(rst, "redundant return statement") + ReportfFG(pass, rst.Pos(), "redundant return statement") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + return nil, nil } func isStringer(T types.Type) bool { @@ -1520,56 +1494,58 @@ func isStringer(T types.Type) bool { return true } -func (c *Checker) LintRedundantSprintf(j *lint.Job) { +func LintRedundantSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "fmt.Sprintf") { + if !IsCallToAST(pass, call, "fmt.Sprintf") { return } if len(call.Args) != 2 { return } - if s, ok := ExprToString(j, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { + if s, ok := ExprToString(pass, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { return } arg := call.Args[Arg("fmt.Sprintf.a[0]")] - typ := j.Pkg.TypesInfo.TypeOf(arg) + typ := pass.TypesInfo.TypeOf(arg) if isStringer(typ) { - j.Errorf(call, "should use String() instead of fmt.Sprintf") + pass.Reportf(call.Pos(), "should use String() instead of fmt.Sprintf") return } if typ.Underlying() == types.Universe.Lookup("string").Type() { if typ == types.Universe.Lookup("string").Type() { - j.Errorf(call, "the argument is already a string, there's no need to use fmt.Sprintf") + ReportfFG(pass, call.Pos(), "the argument is already a string, there's no need to use fmt.Sprintf") } else { - j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") + ReportfFG(pass, call.Pos(), "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { +func LintErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !IsCallToAST(j, node, "errors.New") { + if !IsCallToAST(pass, node, "errors.New") { return } call := node.(*ast.CallExpr) - if !IsCallToAST(j, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { + if !IsCallToAST(pass, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { return } - j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") + ReportfFG(pass, node.Pos(), "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(j) +func LintRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + return sharedcheck.CheckRangeStringRunes(pass) } -func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { +func LintNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { ifstmt := node.(*ast.IfStmt) cond, ok := ifstmt.Cond.(*ast.BinaryExpr) @@ -1577,7 +1553,7 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { return } - if cond.Op != token.NEQ || !IsNil(j, cond.Y) || len(ifstmt.Body.List) != 1 { + if cond.Op != token.NEQ || !IsNil(pass, cond.Y) || len(ifstmt.Body.List) != 1 { return } @@ -1596,15 +1572,16 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { if ifXIdent.Obj != rangeXIdent.Obj { return } - switch j.Pkg.TypesInfo.TypeOf(rangeXIdent).(type) { + switch pass.TypesInfo.TypeOf(rangeXIdent).(type) { case *types.Slice, *types.Map: - j.Errorf(node, "unnecessary nil check around range") + ReportfFG(pass, node.Pos(), "unnecessary nil check around range") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func isPermissibleSort(j *lint.Job, node ast.Node) bool { +func isPermissibleSort(pass *analysis.Pass, node ast.Node) bool { call := node.(*ast.CallExpr) typeconv, ok := call.Args[0].(*ast.CallExpr) if !ok { @@ -1615,7 +1592,7 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { if !ok { return true } - name := SelectorName(j, sel) + name := SelectorName(pass, sel) switch name { case "sort.IntSlice", "sort.Float64Slice", "sort.StringSlice": default: @@ -1625,7 +1602,12 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { return false } -func (c *Checker) LintSortHelpers(j *lint.Job) { +func LintSortHelpers(pass *analysis.Pass) (interface{}, error) { + type Error struct { + node lint.Positioner + msg string + } + var allErrors []Error fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { @@ -1640,27 +1622,23 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { return } - type Error struct { - node lint.Positioner - msg string - } var errors []Error permissible := false fnSorts := func(node ast.Node) bool { if permissible { return false } - if !IsCallToAST(j, node, "sort.Sort") { + if !IsCallToAST(pass, node, "sort.Sort") { return true } - if isPermissibleSort(j, node) { + if isPermissibleSort(pass, node) { permissible = true return false } call := node.(*ast.CallExpr) typeconv := call.Args[Arg("sort.Sort.data")].(*ast.CallExpr) sel := typeconv.Fun.(*ast.SelectorExpr) - name := SelectorName(j, sel) + name := SelectorName(pass, sel) switch name { case "sort.IntSlice": @@ -1677,15 +1655,24 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { if permissible { return } - for _, err := range errors { - j.Errorf(err.node, "%s", err.msg) - } - return + allErrors = append(allErrors, errors...) + } + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + sort.Slice(allErrors, func(i, j int) bool { + return allErrors[i].node.Pos() < allErrors[j].node.Pos() + }) + var prev token.Pos + for _, err := range allErrors { + if err.node.Pos() == prev { + continue + } + prev = err.node.Pos() + ReportfFG(pass, err.node.Pos(), "%s", err.msg) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + return nil, nil } -func (c *Checker) LintGuardedDelete(j *lint.Job) { +func LintGuardedDelete(pass *analysis.Pass) (interface{}, error) { isCommaOkMapIndex := func(stmt ast.Stmt) (b *ast.Ident, m ast.Expr, key ast.Expr, ok bool) { // Has to be of the form `_, = [] @@ -1707,7 +1694,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return nil, nil, nil, false } - if _, ok := j.Pkg.TypesInfo.TypeOf(index.X).(*types.Map); !ok { + if _, ok := pass.TypesInfo.TypeOf(index.X).(*types.Map); !ok { return nil, nil, nil, false } key = index.Index @@ -1729,25 +1716,26 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return } - if !IsCallToAST(j, call, "delete") { + if !IsCallToAST(pass, call, "delete") { return } b, m, key, ok := isCommaOkMapIndex(stmt.Init) if !ok { return } - if cond, ok := stmt.Cond.(*ast.Ident); !ok || j.Pkg.TypesInfo.ObjectOf(cond) != j.Pkg.TypesInfo.ObjectOf(b) { + if cond, ok := stmt.Cond.(*ast.Ident); !ok || pass.TypesInfo.ObjectOf(cond) != pass.TypesInfo.ObjectOf(b) { return } - if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { + if Render(pass, call.Args[0]) != Render(pass, m) || Render(pass, call.Args[1]) != Render(pass, key) { return } - j.Errorf(stmt, "unnecessary guard around call to delete") + ReportfFG(pass, stmt.Pos(), "unnecessary guard around call to delete") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { +func LintSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { stmt := node.(*ast.TypeSwitchStmt) if stmt.Init != nil { @@ -1764,7 +1752,7 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if !ok { return } - x := j.Pkg.TypesInfo.ObjectOf(ident) + x := pass.TypesInfo.ObjectOf(ident) var allOffenders []ast.Node for _, clause := range stmt.Body.List { clause := clause.(*ast.CaseClause) @@ -1783,12 +1771,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { hasUnrelatedAssertion = true return false } - if j.Pkg.TypesInfo.ObjectOf(ident) != x { + if pass.TypesInfo.ObjectOf(ident) != x { hasUnrelatedAssertion = true return false } - if !types.Identical(j.Pkg.TypesInfo.TypeOf(clause.List[0]), j.Pkg.TypesInfo.TypeOf(assert2.Type)) { + if !types.Identical(pass.TypesInfo.TypeOf(clause.List[0]), pass.TypesInfo.TypeOf(assert2.Type)) { hasUnrelatedAssertion = true return false } @@ -1807,11 +1795,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if len(allOffenders) != 0 { at := "" for _, offender := range allOffenders { - pos := lint.DisplayPosition(j.Pkg.Fset, offender.Pos()) + pos := lint.DisplayPosition(pass.Fset, offender.Pos()) at += "\n\t" + pos.String() } - j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) + ReportfFG(pass, expr.Pos(), "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(pass, ident), Render(pass, ident), at) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + return nil, nil } diff --git a/simple/lint_test.go b/simple/lint_test.go index 4a746768f..f8fa32e5b 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -3,9 +3,71 @@ package simple import ( "testing" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - testutil.TestAll(t, NewChecker(), "") + checks := map[string][]struct { + dir string + version string + }{ + "S1000": {{dir: "single-case-select"}}, + "S1001": {{dir: "copy"}}, + "S1002": {{dir: "bool-cmp"}}, + "S1003": {{dir: "contains"}}, + "S1004": {{dir: "compare"}}, + "S1005": { + {dir: "LintBlankOK"}, + {dir: "receive-blank"}, + {dir: "range_go13", version: "1.3"}, + {dir: "range_go14", version: "1.4"}, + }, + "S1006": { + {dir: "for-true"}, + {dir: "generated"}, + }, + "S1007": {{dir: "regexp-raw"}}, + "S1008": {{dir: "if-return"}}, + "S1009": {{dir: "nil-len"}}, + "S1010": {{dir: "slicing"}}, + "S1011": {{dir: "loop-append"}}, + "S1012": {{dir: "time-since"}}, + "S1016": { + {dir: "convert"}, + {dir: "convert_go17", version: "1.7"}, + {dir: "convert_go18", version: "1.8"}, + }, + "S1017": {{dir: "trim"}}, + "S1018": {{dir: "LintLoopSlide"}}, + "S1019": {{dir: "LintMakeLenCap"}}, + "S1020": {{dir: "LintAssertNotNil"}}, + "S1021": {{dir: "LintDeclareAssign"}}, + "S1023": { + {dir: "LintRedundantBreak"}, + {dir: "LintRedundantReturn"}, + }, + "S1024": { + {dir: "LimeTimeUntil_go17", version: "1.7"}, + {dir: "LimeTimeUntil_go18", version: "1.8"}, + }, + "S1025": {{dir: "LintRedundantSprintf"}}, + "S1028": {{dir: "LintErrorsNewSprintf"}}, + "S1029": {{dir: "LintRangeStringRunes"}}, + "S1030": {{dir: "LintBytesBufferConversions"}}, + "S1031": {{dir: "LintNilCheckAroundRange"}}, + "S1032": {{dir: "LintSortHelpers"}}, + "S1033": {{dir: "LintGuardedDelete"}}, + "S1034": {{dir: "LintSimplifyTypeSwitch"}}, + } + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) + } + } } diff --git a/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go b/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go index 7f32d7b27..359b9db68 100644 --- a/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go +++ b/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go @@ -3,7 +3,7 @@ package pkg import "time" func fn(t time.Time) { - t.Sub(time.Now()) // MATCH "time.Until" + t.Sub(time.Now()) // want `time\.Until` t.Sub(t) t2 := time.Now() t.Sub(t2) diff --git a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go index f4b6b50c0..f15d0842e 100644 --- a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go +++ b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go @@ -1,9 +1,9 @@ package pkg func fn(i interface{}, x interface{}) { - if _, ok := i.(string); ok && i != nil { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); ok && i != nil { // want `when ok is true, i can't be nil` } - if _, ok := i.(string); i != nil && ok { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); i != nil && ok { // want `when ok is true, i can't be nil` } if _, ok := i.(string); i != nil || ok { } @@ -12,7 +12,7 @@ func fn(i interface{}, x interface{}) { if _, ok := i.(string); i == nil && ok { } if i != nil { - if _, ok := i.(string); ok { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); ok { // want `when ok is true, i can't be nil` } } if i != nil { diff --git a/simple/testdata/src/LintBlankOK/LintBlankOK.go b/simple/testdata/src/LintBlankOK/LintBlankOK.go index bb3c76760..351328674 100644 --- a/simple/testdata/src/LintBlankOK/LintBlankOK.go +++ b/simple/testdata/src/LintBlankOK/LintBlankOK.go @@ -5,8 +5,8 @@ func fn() { var ch chan int var fn func() (int, bool) - x, _ := m[0] // MATCH "should write x := m[0] instead of x, _ := m[0]" - x, _ = <-ch // MATCH "should write x = <-ch instead of x, _ = <-ch" + x, _ := m[0] // want `should write x := m\[0\] instead of x, _ := m\[0\]` + x, _ = <-ch // want `should write x = <-ch instead of x, _ = <-ch` x, _ = fn() _ = x } diff --git a/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go b/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go index a21e403f9..de5d1b7da 100644 --- a/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go +++ b/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go @@ -6,12 +6,12 @@ import ( func fn() { buf := bytes.NewBufferString("str") - _ = string(buf.Bytes()) // MATCH "should use buf.String() instead of string(buf.Bytes())" - _ = []byte(buf.String()) // MATCH "should use buf.Bytes() instead of []byte(buf.String())" + _ = string(buf.Bytes()) // want `should use buf\.String\(\) instead of string\(buf\.Bytes\(\)\)` + _ = []byte(buf.String()) // want `should use buf\.Bytes\(\) instead of \[\]byte\(buf\.String\(\)\)` m := map[string]*bytes.Buffer{"key": buf} - _ = string(m["key"].Bytes()) // MATCH "should use m["key"].String() instead of string(m["key"].Bytes())" - _ = []byte(m["key"].String()) // MATCH "should use m["key"].Bytes() instead of []byte(m["key"].String())" + _ = string(m["key"].Bytes()) // want `should use m\["key"\]\.String\(\) instead of string\(m\["key"\]\.Bytes\(\)\)` + _ = []byte(m["key"].String()) // want `should use m\["key"\]\.Bytes\(\) instead of \[\]byte\(m\["key"\]\.String\(\)\)` string := func(_ interface{}) interface{} { return nil diff --git a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go index 424a3668f..66cdd6c9e 100644 --- a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go +++ b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go @@ -1,16 +1,16 @@ package pkg func fn() { - var x int // MATCH "should merge variable declaration with assignment on next line" + var x int // want `should merge variable declaration with assignment on next line` x = 1 _ = x - var y interface{} // MATCH "should merge variable declaration with assignment on next line" + var y interface{} // want `should merge variable declaration with assignment on next line` y = 1 _ = y if true { - var x string // MATCH "should merge variable declaration with assignment on next line" + var x string // want `should merge variable declaration with assignment on next line` x = "" _ = x } diff --git a/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go b/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go index d49cd418e..c05d4f519 100644 --- a/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go +++ b/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go @@ -8,5 +8,5 @@ import ( func fn() { _ = fmt.Errorf("%d", 0) _ = errors.New("") - _ = errors.New(fmt.Sprintf("%d", 0)) // MATCH "should use fmt.Errorf" + _ = errors.New(fmt.Sprintf("%d", 0)) // want `should use fmt\.Errorf` } diff --git a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go index 2a4f34332..020a80e04 100644 --- a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go +++ b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go @@ -2,7 +2,7 @@ package pkg func fn(m map[int]int) { - if _, ok := m[0]; ok { // MATCH "unnecessary guard" + if _, ok := m[0]; ok { // want `unnecessary guard` delete(m, 0) } if _, ok := m[0]; !ok { @@ -17,7 +17,7 @@ func fn(m map[int]int) { } var key int - if _, ok := m[key]; ok { // MATCH "unnecessary guard" + if _, ok := m[key]; ok { // want `unnecessary guard` delete(m, key) } if _, ok := m[key]; ok { diff --git a/simple/testdata/src/LintLoopSlide/LintLoopSlide.go b/simple/testdata/src/LintLoopSlide/LintLoopSlide.go index f3d95a48a..edb9c567a 100644 --- a/simple/testdata/src/LintLoopSlide/LintLoopSlide.go +++ b/simple/testdata/src/LintLoopSlide/LintLoopSlide.go @@ -5,7 +5,7 @@ func fn() { var bs []int var offset int - for i := 0; i < n; i++ { // MATCH "should use copy(bs[:n], bs[offset:]) instead" + for i := 0; i < n; i++ { // want `should use copy\(bs\[:n\], bs\[offset:\]\) instead` bs[i] = bs[offset+i] } diff --git a/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go b/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go index 918d8bb04..3c108a08c 100644 --- a/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go +++ b/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go @@ -9,11 +9,11 @@ func fn() { _ = make([]int, 0) // length is mandatory for slices, don't suggest removal _ = make(s, 0) // length is mandatory for slices, don't suggest removal _ = make(chan int, c) // constant of 0 may be due to debugging, math or platform-specific code - _ = make(chan int, 0) // MATCH "should use make(chan int) instead" - _ = make(ch, 0) // MATCH "should use make(ch) instead" - _ = make(map[int]int, 0) // MATCH "should use make(map[int]int) instead" - _ = make([]int, 1, 1) // MATCH "should use make([]int, 1) instead" - _ = make([]int, x, x) // MATCH "should use make([]int, x) instead" + _ = make(chan int, 0) // want `should use make\(chan int\) instead` + _ = make(ch, 0) // want `should use make\(ch\) instead` + _ = make(map[int]int, 0) // want `should use make\(map\[int\]int\) instead` + _ = make([]int, 1, 1) // want `should use make\(\[\]int, 1\) instead` + _ = make([]int, x, x) // want `should use make\(\[\]int, x\) instead` _ = make([]int, 1, 2) _ = make([]int, x, y) } diff --git a/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go b/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go index d18915261..045d5d0ae 100644 --- a/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go +++ b/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go @@ -18,14 +18,14 @@ func main() { } } - if str != nil { // MATCH /unnecessary nil check around range/ + if str != nil { // want `unnecessary nil check around range` for _, s := range str { s = s + "A" } } var nilMap map[string]int - if nilMap != nil { // MATCH /unnecessary nil check around range/ + if nilMap != nil { // want `unnecessary nil check around range` for key, value := range nilMap { nilMap[key] = value + 1 } diff --git a/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go b/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go index 59d349ac0..b6761a187 100644 --- a/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go +++ b/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go @@ -5,7 +5,7 @@ func fn(s string) { println(r) } - for _, r := range []rune(s) { // MATCH "should range over string" + for _, r := range []rune(s) { // want `should range over string` println(r) } @@ -15,7 +15,7 @@ func fn(s string) { } x := []rune(s) - for _, r := range x { // MATCH "should range over string" + for _, r := range x { // want `should range over string` println(r) } diff --git a/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go b/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go index 1fa78295b..c7d42f79d 100644 --- a/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go +++ b/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go @@ -4,7 +4,7 @@ func fn(x int) { switch x { case 1: println() - break // MATCH /redundant break/ + break // want `redundant break` case 2: println() case 3: diff --git a/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go b/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go index 90325af17..410eea20a 100644 --- a/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go +++ b/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go @@ -1,11 +1,11 @@ package pkg func fn1() { - return // MATCH /redundant return/ + return // want `redundant return` } func fn2(a int) { - return // MATCH /redundant return/ + return // want `redundant return` } func fn3() int { @@ -30,11 +30,11 @@ func fn6() { func fn7() { return println("foo") - return // MATCH /redundant return/ + return // want `redundant return` } func fn8() { _ = func() { - return // MATCH /redundant return/ + return // want `redundant return` } } diff --git a/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go b/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go index 7cd9dffd5..d2c43d820 100644 --- a/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go +++ b/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go @@ -21,14 +21,14 @@ func fn() { var t4 T4 var t5 T5 var t6 T6 - _ = fmt.Sprintf("%s", "test") // MATCH "is already a string" - _ = fmt.Sprintf("%s", t1) // MATCH "is a string" - _ = fmt.Sprintf("%s", t2) // MATCH "is a string" - _ = fmt.Sprintf("%s", t3) // MATCH "should use String() instead of fmt.Sprintf" - _ = fmt.Sprintf("%s", t3.String()) // MATCH "is already a string" + _ = fmt.Sprintf("%s", "test") // want `is already a string` + _ = fmt.Sprintf("%s", t1) // want `is a string` + _ = fmt.Sprintf("%s", t2) // want `is a string` + _ = fmt.Sprintf("%s", t3) // want `should use String\(\) instead of fmt\.Sprintf` + _ = fmt.Sprintf("%s", t3.String()) // want `is already a string` _ = fmt.Sprintf("%s", t4) _ = fmt.Sprintf("%s", t5) _ = fmt.Sprintf("%s %s", t1, t2) _ = fmt.Sprintf("%v", t1) - _ = fmt.Sprintf("%s", t6) // MATCH "should use String() instead of fmt.Sprintf" + _ = fmt.Sprintf("%s", t6) // want `should use String\(\) instead of fmt\.Sprintf` } diff --git a/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go index 275a388bb..892f8447e 100644 --- a/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go +++ b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go @@ -14,7 +14,7 @@ func fn(x, y interface{}) { case int: fmt.Println(x.(int), y.(int)) } - switch x.(type) { // MATCH "assigning the result of this type assertion" + switch x.(type) { // want `assigning the result of this type assertion` case int: fmt.Println(x.(int)) } diff --git a/simple/testdata/src/LintSortHelpers/LintSortHelpers.go b/simple/testdata/src/LintSortHelpers/LintSortHelpers.go index 442df89a7..fb7722932 100644 --- a/simple/testdata/src/LintSortHelpers/LintSortHelpers.go +++ b/simple/testdata/src/LintSortHelpers/LintSortHelpers.go @@ -10,17 +10,17 @@ func (s MyIntSlice) Swap(i, j int) {} func fn1() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } func fn2() { var b []float64 - sort.Sort(sort.Float64Slice(b)) // MATCH "sort.Float64s" + sort.Sort(sort.Float64Slice(b)) // want `sort\.Float64s` } func fn3() { var c []string - sort.Sort(sort.StringSlice(c)) // MATCH "sort.Strings" + sort.Sort(sort.StringSlice(c)) // want `sort\.Strings` } func fn4() { @@ -49,18 +49,18 @@ func fn7() { func fn8() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } func fn9() { func() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` }() } func fn10() { var a MyIntSlice - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } diff --git a/simple/testdata/src/bool-cmp/bool-cmp.go b/simple/testdata/src/bool-cmp/bool-cmp.go index 83224a956..0c73bc45f 100644 --- a/simple/testdata/src/bool-cmp/bool-cmp.go +++ b/simple/testdata/src/bool-cmp/bool-cmp.go @@ -9,42 +9,42 @@ func fn() { const t T = false if x == t { } - if fn1() == true { // MATCH "simplified to fn1()" + if fn1() == true { // want `simplified to fn1\(\)` } - if fn1() != true { // MATCH "simplified to !fn1()" + if fn1() != true { // want `simplified to !fn1\(\)` } - if fn1() == false { // MATCH "simplified to !fn1()" + if fn1() == false { // want `simplified to !fn1\(\)` } - if fn1() != false { // MATCH "simplified to fn1()" + if fn1() != false { // want `simplified to fn1\(\)` } - if fn1() && (fn1() || fn1()) || (fn1() && fn1()) == true { // MATCH "simplified to (fn1() && fn1())" + if fn1() && (fn1() || fn1()) || (fn1() && fn1()) == true { // want `simplified to \(fn1\(\) && fn1\(\)\)` } - if (fn1() && fn2()) == false { // MATCH "simplified to !(fn1() && fn2())" + if (fn1() && fn2()) == false { // want `simplified to !\(fn1\(\) && fn2\(\)\)` } var y bool - for y != true { // MATCH /simplified to !y/ + for y != true { // want `simplified to !y` } - if !y == true { // MATCH /simplified to !y/ + if !y == true { // want `simplified to !y` } - if !y == false { // MATCH /simplified to y/ + if !y == false { // want `simplified to y` } - if !y != true { // MATCH /simplified to y/ + if !y != true { // want `simplified to y` } - if !y != false { // MATCH /simplified to !y/ + if !y != false { // want `simplified to !y` } - if !!y == false { // MATCH /simplified to !y/ + if !!y == false { // want `simplified to !y` } - if !!!y == false { // MATCH /simplified to y/ + if !!!y == false { // want `simplified to y` } - if !!y == true { // MATCH /simplified to y/ + if !!y == true { // want `simplified to y` } - if !!!y == true { // MATCH /simplified to !y/ + if !!!y == true { // want `simplified to !y` } - if !!y != true { // MATCH /simplified to !y/ + if !!y != true { // want `simplified to !y` } - if !!!y != true { // MATCH /simplified to y/ + if !!!y != true { // want `simplified to y` } if !y == !false { // not matched because we expect true/false on one side, not !false } diff --git a/simple/testdata/src/compare/compare.go b/simple/testdata/src/compare/compare.go index 18d4b17c9..435191635 100644 --- a/simple/testdata/src/compare/compare.go +++ b/simple/testdata/src/compare/compare.go @@ -3,8 +3,8 @@ package pkg import "bytes" func fn() { - _ = bytes.Compare(nil, nil) == 0 // MATCH / bytes.Equal/ - _ = bytes.Compare(nil, nil) != 0 // MATCH /!bytes.Equal/ + _ = bytes.Compare(nil, nil) == 0 // want ` bytes.Equal` + _ = bytes.Compare(nil, nil) != 0 // want `!bytes.Equal` _ = bytes.Compare(nil, nil) > 0 _ = bytes.Compare(nil, nil) < 0 } diff --git a/simple/testdata/src/contains/contains.go b/simple/testdata/src/contains/contains.go index eaf34d312..a509d91e0 100644 --- a/simple/testdata/src/contains/contains.go +++ b/simple/testdata/src/contains/contains.go @@ -6,34 +6,34 @@ import ( ) func fn() { - _ = strings.IndexRune("", 'x') > -1 // MATCH / strings.ContainsRune/ - _ = strings.IndexRune("", 'x') >= 0 // MATCH / strings.ContainsRune/ + _ = strings.IndexRune("", 'x') > -1 // want ` strings\.ContainsRune` + _ = strings.IndexRune("", 'x') >= 0 // want ` strings\.ContainsRune` _ = strings.IndexRune("", 'x') > 0 _ = strings.IndexRune("", 'x') >= -1 - _ = strings.IndexRune("", 'x') != -1 // MATCH / strings.ContainsRune/ - _ = strings.IndexRune("", 'x') == -1 // MATCH /!strings.ContainsRune/ + _ = strings.IndexRune("", 'x') != -1 // want ` strings\.ContainsRune` + _ = strings.IndexRune("", 'x') == -1 // want `!strings\.ContainsRune` _ = strings.IndexRune("", 'x') != 0 - _ = strings.IndexRune("", 'x') < 0 // MATCH /!strings.ContainsRune/ + _ = strings.IndexRune("", 'x') < 0 // want `!strings\.ContainsRune` - _ = strings.IndexAny("", "") > -1 // MATCH / strings.ContainsAny/ - _ = strings.IndexAny("", "") >= 0 // MATCH / strings.ContainsAny/ + _ = strings.IndexAny("", "") > -1 // want ` strings\.ContainsAny` + _ = strings.IndexAny("", "") >= 0 // want ` strings\.ContainsAny` _ = strings.IndexAny("", "") > 0 _ = strings.IndexAny("", "") >= -1 - _ = strings.IndexAny("", "") != -1 // MATCH / strings.ContainsAny/ - _ = strings.IndexAny("", "") == -1 // MATCH /!strings.ContainsAny/ + _ = strings.IndexAny("", "") != -1 // want ` strings\.ContainsAny` + _ = strings.IndexAny("", "") == -1 // want `!strings\.ContainsAny` _ = strings.IndexAny("", "") != 0 - _ = strings.IndexAny("", "") < 0 // MATCH /!strings.ContainsAny/ + _ = strings.IndexAny("", "") < 0 // want `!strings\.ContainsAny` - _ = strings.Index("", "") > -1 // MATCH / strings.Contains/ - _ = strings.Index("", "") >= 0 // MATCH / strings.Contains/ + _ = strings.Index("", "") > -1 // want ` strings\.Contains` + _ = strings.Index("", "") >= 0 // want ` strings\.Contains` _ = strings.Index("", "") > 0 _ = strings.Index("", "") >= -1 - _ = strings.Index("", "") != -1 // MATCH / strings.Contains/ - _ = strings.Index("", "") == -1 // MATCH /!strings.Contains/ + _ = strings.Index("", "") != -1 // want ` strings\.Contains` + _ = strings.Index("", "") == -1 // want `!strings\.Contains` _ = strings.Index("", "") != 0 - _ = strings.Index("", "") < 0 // MATCH /!strings.Contains/ + _ = strings.Index("", "") < 0 // want `!strings\.Contains` - _ = bytes.IndexRune(nil, 'x') > -1 // MATCH / bytes.ContainsRune/ - _ = bytes.IndexAny(nil, "") > -1 // MATCH / bytes.ContainsAny/ - _ = bytes.Index(nil, nil) > -1 // MATCH / bytes.Contains/ + _ = bytes.IndexRune(nil, 'x') > -1 // want ` bytes\.ContainsRune` + _ = bytes.IndexAny(nil, "") > -1 // want ` bytes\.ContainsAny` + _ = bytes.Index(nil, nil) > -1 // want ` bytes\.Contains` } diff --git a/simple/testdata/src/convert/convert.go b/simple/testdata/src/convert/convert.go index 4b20138ac..614105bd7 100644 --- a/simple/testdata/src/convert/convert.go +++ b/simple/testdata/src/convert/convert.go @@ -15,10 +15,10 @@ type t3 t1 func fn() { v1 := t1{1, 2} v2 := t2{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ - _ = t2{a: v1.a, b: v1.b} // MATCH /should convert v1/ - _ = t2{b: v1.b, a: v1.a} // MATCH /should convert v1/ - _ = t3{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` + _ = t2{a: v1.a, b: v1.b} // want `should convert v1` + _ = t2{b: v1.b, a: v1.a} // want `should convert v1` + _ = t3{v1.a, v1.b} // want `should convert v1` _ = t3{v1.a, v2.b} diff --git a/simple/testdata/src/convert_go17/convert.go b/simple/testdata/src/convert_go17/convert.go index 0ff30ef36..2afcf6a1a 100644 --- a/simple/testdata/src/convert_go17/convert.go +++ b/simple/testdata/src/convert_go17/convert.go @@ -17,6 +17,6 @@ type t3 struct { func fn() { v1 := t1{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` _ = t3{v1.a, v1.b} } diff --git a/simple/testdata/src/convert_go18/convert.go b/simple/testdata/src/convert_go18/convert.go index e9887bcbe..f90244970 100644 --- a/simple/testdata/src/convert_go18/convert.go +++ b/simple/testdata/src/convert_go18/convert.go @@ -17,6 +17,6 @@ type t3 struct { func fn() { v1 := t1{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ - _ = t3{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` + _ = t3{v1.a, v1.b} // want `should convert v1` } diff --git a/simple/testdata/src/copy/copy.go b/simple/testdata/src/copy/copy.go index d4c130b2c..f8396f08b 100644 --- a/simple/testdata/src/copy/copy.go +++ b/simple/testdata/src/copy/copy.go @@ -2,11 +2,11 @@ package pkg func fn() { var b1, b2 []byte - for i, v := range b1 { // MATCH /should use copy/ + for i, v := range b1 { // want `should use copy` b2[i] = v } - for i := range b1 { // MATCH /should use copy/ + for i := range b1 { // want `should use copy` b2[i] = b1[i] } @@ -18,7 +18,7 @@ func fn() { } var b3, b4 []*byte - for i := range b3 { // MATCH /should use copy/ + for i := range b3 { // want `should use copy` b4[i] = b3[i] } diff --git a/simple/testdata/src/for-true/for-true.go b/simple/testdata/src/for-true/for-true.go index b806eaeff..f3cd0b3f2 100644 --- a/simple/testdata/src/for-true/for-true.go +++ b/simple/testdata/src/for-true/for-true.go @@ -3,7 +3,7 @@ package pkg func fn() { for false { } - for true { // MATCH /should use for/ + for true { // want `should use for` } for { } diff --git a/simple/testdata/src/generated/input.go b/simple/testdata/src/generated/input.go index 25622287f..985274180 100644 --- a/simple/testdata/src/generated/input.go +++ b/simple/testdata/src/generated/input.go @@ -1,6 +1,6 @@ package pkg -// MATCH "should use for {}" +// want `should use for \{\}` // the error is produced by generated.go, which pretends that its // broken code came from this file. diff --git a/simple/testdata/src/if-return/if-return.go b/simple/testdata/src/if-return/if-return.go index 28c798a69..6b4c6a8ab 100644 --- a/simple/testdata/src/if-return/if-return.go +++ b/simple/testdata/src/if-return/if-return.go @@ -3,7 +3,7 @@ package pkg func fn() bool { return true } func fn1() bool { x := true - if x { // MATCH /should use 'return '/ + if x { // want `should use 'return '` return true } return false @@ -31,21 +31,21 @@ func fn3() int { func fn4() bool { return true } func fn5() bool { - if fn() { // MATCH /should use 'return '/ + if fn() { // want `should use 'return '` return false } return true } func fn6() bool { - if fn3() != fn3() { // MATCH /should use 'return '/ + if fn3() != fn3() { // want `should use 'return '` return true } return false } func fn7() bool { - if 1 > 2 { // MATCH /should use 'return '/ + if 1 > 2 { // want `should use 'return '` return true } return false diff --git a/simple/testdata/src/loop-append/loop-append.go b/simple/testdata/src/loop-append/loop-append.go index 2f4eefd73..61d23046f 100644 --- a/simple/testdata/src/loop-append/loop-append.go +++ b/simple/testdata/src/loop-append/loop-append.go @@ -13,7 +13,7 @@ func fn1() { } var a, b []int - for _, v := range a { // MATCH /should replace loop/ + for _, v := range a { // want `should replace loop` b = append(b, v) } diff --git a/simple/testdata/src/nil-len/nil-len.go b/simple/testdata/src/nil-len/nil-len.go index 2de90d4c3..08fcfbb54 100644 --- a/simple/testdata/src/nil-len/nil-len.go +++ b/simple/testdata/src/nil-len/nil-len.go @@ -6,26 +6,26 @@ func fn() { var m map[int]int var ch chan int - if s == nil || len(s) == 0 { // MATCH /should omit nil check/ + if s == nil || len(s) == 0 { // want `should omit nil check` } - if m == nil || len(m) == 0 { // MATCH /should omit nil check/ + if m == nil || len(m) == 0 { // want `should omit nil check` } - if ch == nil || len(ch) == 0 { // MATCH /should omit nil check/ + if ch == nil || len(ch) == 0 { // want `should omit nil check` } - if s != nil && len(s) != 0 { // MATCH /should omit nil check/ + if s != nil && len(s) != 0 { // want `should omit nil check` } - if m != nil && len(m) > 0 { // MATCH /should omit nil check/ + if m != nil && len(m) > 0 { // want `should omit nil check` } - if s != nil && len(s) > 5 { // MATCH /should omit nil check/ + if s != nil && len(s) > 5 { // want `should omit nil check` } - if s != nil && len(s) >= 5 { // MATCH /should omit nil check/ + if s != nil && len(s) >= 5 { // want `should omit nil check` } const five = 5 - if s != nil && len(s) == five { // MATCH /should omit nil check/ + if s != nil && len(s) == five { // want `should omit nil check` } - if ch != nil && len(ch) == 5 { // MATCH /should omit nil check/ + if ch != nil && len(ch) == 5 { // want `should omit nil check` } if pa == nil || len(pa) == 0 { // nil check cannot be removed with pointer to an array diff --git a/simple/testdata/src/range_go14/range_go14.go b/simple/testdata/src/range_go14/range_go14.go index f1b285268..8555bc7e1 100644 --- a/simple/testdata/src/range_go14/range_go14.go +++ b/simple/testdata/src/range_go14/range_go14.go @@ -4,19 +4,19 @@ func fn() { var m map[string]int // with := - for x, _ := range m { // MATCH /should omit value from range/ + for x, _ := range m { // want `should omit value from range` _ = x } // with = var y string _ = y - for y, _ = range m { // MATCH /should omit value from range/ + for y, _ = range m { // want `should omit value from range` } - for _ = range m { // MATCH /should omit values.*range.*equivalent.*for range/ + for _ = range m { // want `should omit values.*range.*equivalent.*for range` } - for _, _ = range m { // MATCH /should omit values.*range.*equivalent.*for range/ + for _, _ = range m { // want `should omit values.*range.*equivalent.*for range` } // all OK: diff --git a/simple/testdata/src/receive-blank/receive-blank.go b/simple/testdata/src/receive-blank/receive-blank.go index 23c8a618f..703212034 100644 --- a/simple/testdata/src/receive-blank/receive-blank.go +++ b/simple/testdata/src/receive-blank/receive-blank.go @@ -3,13 +3,13 @@ package pkg func fn() { var ch chan int <-ch - _ = <-ch // MATCH /_ = <-ch/ + _ = <-ch // want `_ = <-ch` select { case <-ch: - case _ = <-ch: // MATCH /_ = <-ch/ + case _ = <-ch: // want `_ = <-ch` } x := <-ch - y, _ := <-ch, <-ch // MATCH /_ = <-ch/ - _, z := <-ch, <-ch // MATCH /_ = <-ch/ + y, _ := <-ch, <-ch // want `_ = <-ch` + _, z := <-ch, <-ch // want `_ = <-ch` _, _, _ = x, y, z } diff --git a/simple/testdata/src/regexp-raw/regexp-raw.go b/simple/testdata/src/regexp-raw/regexp-raw.go index ec92a0d7b..87fcca60e 100644 --- a/simple/testdata/src/regexp-raw/regexp-raw.go +++ b/simple/testdata/src/regexp-raw/regexp-raw.go @@ -8,8 +8,8 @@ func fn() { x := "abc" const y = "abc" regexp.MustCompile(`\\.`) - regexp.MustCompile("\\.") // MATCH /should use raw string.+\.MustCompile/ - regexp.Compile("\\.") // MATCH /should use raw string.+\.Compile/ + regexp.MustCompile("\\.") // want `should use raw string.+\.MustCompile` + regexp.Compile("\\.") // want `should use raw string.+\.Compile` regexp.Compile("\\.`") regexp.MustCompile("(?m:^lease (.+?) {\n((?s).+?)\\n}\n)") regexp.MustCompile("\\*/[ \t\n\r\f\v]*;") diff --git a/simple/testdata/src/single-case-select/single-case-select.go b/simple/testdata/src/single-case-select/single-case-select.go index efaced137..6b53458af 100644 --- a/simple/testdata/src/single-case-select/single-case-select.go +++ b/simple/testdata/src/single-case-select/single-case-select.go @@ -2,18 +2,18 @@ package pkg func fn() { var ch chan int - select { // MATCH /should use a simple channel send/ + select { // want `should use a simple channel send` case <-ch: } outer: - for { // MATCH /should use for range/ + for { // want `should use for range` select { case <-ch: break outer } } - for { // MATCH /should use for range/ + for { // want `should use for range` select { case x := <-ch: _ = x @@ -21,7 +21,7 @@ outer: } for { - select { // MATCH /should use a simple channel send/ + select { // want `should use a simple channel send` case ch <- 0: } } diff --git a/simple/testdata/src/slicing/slicing.go b/simple/testdata/src/slicing/slicing.go index f49fb705e..a1de03d24 100644 --- a/simple/testdata/src/slicing/slicing.go +++ b/simple/testdata/src/slicing/slicing.go @@ -2,7 +2,7 @@ package pkg func fn() { var s []int - _ = s[:len(s)] // MATCH /omit second index/ + _ = s[:len(s)] // want `omit second index` len := func(s []int) int { return -1 } _ = s[:len(s)] diff --git a/simple/testdata/src/time-since/time-since.go b/simple/testdata/src/time-since/time-since.go index fb3a773ea..d8fb09abf 100644 --- a/simple/testdata/src/time-since/time-since.go +++ b/simple/testdata/src/time-since/time-since.go @@ -4,6 +4,6 @@ import "time" func fn() { t1 := time.Now() - _ = time.Now().Sub(t1) // MATCH "time.Since" + _ = time.Now().Sub(t1) // want `time\.Since` _ = time.Date(0, 0, 0, 0, 0, 0, 0, nil).Sub(t1) } diff --git a/simple/testdata/src/trim/trim.go b/simple/testdata/src/trim/trim.go index ccab6c496..bb7060c62 100644 --- a/simple/testdata/src/trim/trim.go +++ b/simple/testdata/src/trim/trim.go @@ -17,11 +17,11 @@ func fn() { var id1 = "a string value" var id2 string - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len(s1):] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = strings.TrimPrefix(id1, s1) } @@ -29,52 +29,52 @@ func fn() { id1 = strings.TrimPrefix(id1, s2) } - if strings.Contains(id1, s1) { // MATCH /should replace.*with.*strings.Replace/ + if strings.Contains(id1, s1) { // want `should replace.*with.*strings\.Replace` id1 = strings.Replace(id1, s1, "something", 123) } - if strings.HasSuffix(id1, s2) { // MATCH /should replace.*with.*strings.TrimSuffix/ + if strings.HasSuffix(id1, s2) { // want `should replace.*with.*strings\.TrimSuffix` id1 = id1[:len(id1)-len(s2)] } var x, y []string var i int - if strings.HasPrefix(x[i], s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(x[i], s1) { // want `should replace.*with.*strings\.TrimPrefix` x[i] = x[i][len(s1):] } - if strings.HasPrefix(x[i], y[i]) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(x[i], y[i]) { // want `should replace.*with.*strings\.TrimPrefix` x[i] = x[i][len(y[i]):] } var t struct{ x string } - if strings.HasPrefix(t.x, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(t.x, s1) { // want `should replace.*with.*strings\.TrimPrefix` t.x = t.x[len(s1):] } - if strings.HasPrefix(id1, "test") { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, "test") { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len("test"):] } - if strings.HasPrefix(id1, "test") { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, "test") { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[4:] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[14:] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[n:] } var b1, b2 []byte - if bytes.HasPrefix(b1, b2) { // MATCH /should replace.*with.*bytes.TrimPrefix/ + if bytes.HasPrefix(b1, b2) { // want `should replace.*with.*bytes\.TrimPrefix` b1 = b1[len(b2):] } id3 := s2 - if strings.HasPrefix(id1, id3) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, id3) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len(id3):] } diff --git a/ssa/func.go b/ssa/func.go index 53635ba01..222eea641 100644 --- a/ssa/func.go +++ b/ssa/func.go @@ -328,6 +328,70 @@ func (f *Function) finishBody() { } f.Locals = f.Locals[:j] + // comma-ok receiving from a time.Tick channel will never return + // ok == false, so any branching on the value of ok can be + // replaced with an unconditional jump. This will primarily match + // `for range time.Tick(x)` loops, but it can also match + // user-written code. + for _, block := range f.Blocks { + if len(block.Instrs) < 3 { + continue + } + if len(block.Succs) != 2 { + continue + } + var instrs []*Instruction + for i, ins := range block.Instrs { + if _, ok := ins.(*DebugRef); ok { + continue + } + instrs = append(instrs, &block.Instrs[i]) + } + + for i, ins := range instrs { + unop, ok := (*ins).(*UnOp) + if !ok || unop.Op != token.ARROW { + continue + } + call, ok := unop.X.(*Call) + if !ok { + continue + } + if call.Common().IsInvoke() { + continue + } + + // OPT(dh): surely there is a more efficient way of doing + // this, than using FullName. We should already have + // resolved time.Tick somewhere? + v, ok := call.Common().Value.(*Function) + if !ok { + continue + } + t, ok := v.Object().(*types.Func) + if !ok { + continue + } + if t.FullName() != "time.Tick" { + continue + } + ex, ok := (*instrs[i+1]).(*Extract) + if !ok || ex.Tuple != unop || ex.Index != 1 { + continue + } + + ifstmt, ok := (*instrs[i+2]).(*If) + if !ok || ifstmt.Cond != ex { + continue + } + + *instrs[i+2] = NewJump(block) + succ := block.Succs[1] + block.Succs = block.Succs[0:1] + succ.RemovePred(block) + } + } + optimizeBlocks(f) buildReferrers(f) diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go new file mode 100644 index 000000000..b62e5fec7 --- /dev/null +++ b/staticcheck/analysis.go @@ -0,0 +1,527 @@ +package staticcheck + +import ( + "flag" + + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "SA1000": { + Name: "SA1000", + Run: callChecker(checkRegexpRules), + Doc: docSA1000, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1001": { + Name: "SA1001", + Run: CheckTemplate, + Doc: docSA1001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1002": { + Name: "SA1002", + Run: callChecker(checkTimeParseRules), + Doc: docSA1002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1003": { + Name: "SA1003", + Run: callChecker(checkEncodingBinaryRules), + Doc: docSA1003, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1004": { + Name: "SA1004", + Run: CheckTimeSleepConstant, + Doc: docSA1004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1005": { + Name: "SA1005", + Run: CheckExec, + Doc: docSA1005, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1006": { + Name: "SA1006", + Run: CheckUnsafePrintf, + Doc: docSA1006, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1007": { + Name: "SA1007", + Run: callChecker(checkURLsRules), + Doc: docSA1007, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1008": { + Name: "SA1008", + Run: CheckCanonicalHeaderKey, + Doc: docSA1008, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1010": { + Name: "SA1010", + Run: callChecker(checkRegexpFindAllRules), + Doc: docSA1010, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1011": { + Name: "SA1011", + Run: callChecker(checkUTF8CutsetRules), + Doc: docSA1011, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1012": { + Name: "SA1012", + Run: CheckNilContext, + Doc: docSA1012, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1013": { + Name: "SA1013", + Run: CheckSeeker, + Doc: docSA1013, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1014": { + Name: "SA1014", + Run: callChecker(checkUnmarshalPointerRules), + Doc: docSA1014, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1015": { + Name: "SA1015", + Run: CheckLeakyTimeTick, + Doc: docSA1015, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1016": { + Name: "SA1016", + Run: CheckUntrappableSignal, + Doc: docSA1016, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1017": { + Name: "SA1017", + Run: callChecker(checkUnbufferedSignalChanRules), + Doc: docSA1017, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1018": { + Name: "SA1018", + Run: callChecker(checkStringsReplaceZeroRules), + Doc: docSA1018, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1019": { + Name: "SA1019", + Run: CheckDeprecated, + Doc: docSA1019, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, + Flags: newFlagSet(), + }, + "SA1020": { + Name: "SA1020", + Run: callChecker(checkListenAddressRules), + Doc: docSA1020, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1021": { + Name: "SA1021", + Run: callChecker(checkBytesEqualIPRules), + Doc: docSA1021, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1023": { + Name: "SA1023", + Run: CheckWriterBufferModified, + Doc: docSA1023, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1024": { + Name: "SA1024", + Run: callChecker(checkUniqueCutsetRules), + Doc: docSA1024, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1025": { + Name: "SA1025", + Run: CheckTimerResetReturnValue, + Doc: docSA1025, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1026": { + Name: "SA1026", + Run: callChecker(checkUnsupportedMarshal), + Doc: docSA1026, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1027": { + Name: "SA1027", + Run: callChecker(checkAtomicAlignment), + Doc: docSA1027, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + + "SA2000": { + Name: "SA2000", + Run: CheckWaitgroupAdd, + Doc: docSA2000, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA2001": { + Name: "SA2001", + Run: CheckEmptyCriticalSection, + Doc: docSA2001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA2002": { + Name: "SA2002", + Run: CheckConcurrentTesting, + Doc: docSA2002, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA2003": { + Name: "SA2003", + Run: CheckDeferLock, + Doc: docSA2003, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + + "SA3000": { + Name: "SA3000", + Run: CheckTestMainExit, + Doc: docSA3000, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA3001": { + Name: "SA3001", + Run: CheckBenchmarkN, + Doc: docSA3001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + + "SA4000": { + Name: "SA4000", + Run: CheckLhsRhsIdentical, + Doc: docSA4000, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA4001": { + Name: "SA4001", + Run: CheckIneffectiveCopy, + Doc: docSA4001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4002": { + Name: "SA4002", + Run: CheckDiffSizeComparison, + Doc: docSA4002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA4003": { + Name: "SA4003", + Run: CheckExtremeComparison, + Doc: docSA4003, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4004": { + Name: "SA4004", + Run: CheckIneffectiveLoop, + Doc: docSA4004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4006": { + Name: "SA4006", + Run: CheckUnreadVariableValues, + Doc: docSA4006, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4008": { + Name: "SA4008", + Run: CheckLoopCondition, + Doc: docSA4008, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4009": { + Name: "SA4009", + Run: CheckArgOverwritten, + Doc: docSA4009, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4010": { + Name: "SA4010", + Run: CheckIneffectiveAppend, + Doc: docSA4010, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4011": { + Name: "SA4011", + Run: CheckScopedBreak, + Doc: docSA4011, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4012": { + Name: "SA4012", + Run: CheckNaNComparison, + Doc: docSA4012, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4013": { + Name: "SA4013", + Run: CheckDoubleNegation, + Doc: docSA4013, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4014": { + Name: "SA4014", + Run: CheckRepeatedIfElse, + Doc: docSA4014, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4015": { + Name: "SA4015", + Run: callChecker(checkMathIntRules), + Doc: docSA4015, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA4016": { + Name: "SA4016", + Run: CheckSillyBitwiseOps, + Doc: docSA4016, + Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "SA4017": { + Name: "SA4017", + Run: CheckPureFunctions, + Doc: docSA4017, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + FactTypes: []analysis.Fact{(*IsPure)(nil)}, + Flags: newFlagSet(), + }, + "SA4018": { + Name: "SA4018", + Run: CheckSelfAssignment, + Doc: docSA4018, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "SA4019": { + Name: "SA4019", + Run: CheckDuplicateBuildConstraints, + Doc: docSA4019, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA4020": { + Name: "SA4020", + Run: CheckUnreachableTypeCases, + Doc: docSA4020, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4021": { + Name: "SA4021", + Run: CheckSingleArgAppend, + Doc: docSA4021, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + + "SA5000": { + Name: "SA5000", + Run: CheckNilMaps, + Doc: docSA5000, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5001": { + Name: "SA5001", + Run: CheckEarlyDefer, + Doc: docSA5001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5002": { + Name: "SA5002", + Run: CheckInfiniteEmptyLoop, + Doc: docSA5002, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5003": { + Name: "SA5003", + Run: CheckDeferInInfiniteLoop, + Doc: docSA5003, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5004": { + Name: "SA5004", + Run: CheckLoopEmptyDefault, + Doc: docSA5004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5005": { + Name: "SA5005", + Run: CheckCyclicFinalizer, + Doc: docSA5005, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5007": { + Name: "SA5007", + Run: CheckInfiniteRecursion, + Doc: docSA5007, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5008": { + Name: "SA5008", + Run: CheckStructTags, + Doc: docSA5008, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5009": { + Name: "SA5009", + Run: callChecker(checkPrintfRules), + Doc: docSA5009, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + + "SA6000": { + Name: "SA6000", + Run: callChecker(checkRegexpMatchLoopRules), + Doc: docSA6000, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA6001": { + Name: "SA6001", + Run: CheckMapBytesKey, + Doc: docSA6001, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA6002": { + Name: "SA6002", + Run: callChecker(checkSyncPoolValueRules), + Doc: docSA6002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA6003": { + Name: "SA6003", + Run: CheckRangeStringRunes, + Doc: docSA6003, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA6005": { + Name: "SA6005", + Run: CheckToLowerToUpperComparison, + Doc: docSA6005, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + + "SA9001": { + Name: "SA9001", + Run: CheckDubiousDeferInChannelRangeLoop, + Doc: docSA9001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA9002": { + Name: "SA9002", + Run: CheckNonOctalFileMode, + Doc: docSA9002, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA9003": { + Name: "SA9003", + Run: CheckEmptyBranch, + Doc: docSA9003, + Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA9004": { + Name: "SA9004", + Run: CheckMissingEnumTypesInDeclaration, + Doc: docSA9004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + // Filtering generated code because it may include empty structs generated from data models. + "SA9005": { + Name: "SA9005", + Run: callChecker(checkNoopMarshal), + Doc: docSA9005, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, +} diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 07a39ef45..e0153e210 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -328,6 +328,7 @@ Available since 2017.1 ` +//lint:ignore U1000 This check is currently disabled var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver? Available since @@ -543,6 +544,7 @@ Available since 2017.1 ` +//lint:ignore U1000 This check is currently disabled var docSA5006 = `Slice index out of bounds Available since @@ -565,6 +567,18 @@ Available since 2017.1 ` +var docSA5008 = `Invalid struct tag + +Available since + Unreleased +` + +var docSA5009 = `Invalid Printf call + +Available since + Unreleased +` + var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile Available since diff --git a/staticcheck/knowledge.go b/staticcheck/knowledge.go new file mode 100644 index 000000000..4c12b866a --- /dev/null +++ b/staticcheck/knowledge.go @@ -0,0 +1,25 @@ +package staticcheck + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/staticcheck/vrp" +) + +var valueRangesAnalyzer = &analysis.Analyzer{ + Name: "vrp", + Doc: "calculate value ranges of functions", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*ssa.Function]vrp.Ranges{} + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + vr := vrp.BuildGraph(ssafn).Solve() + m[ssafn] = vr + } + return m, nil + }, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + ResultType: reflect.TypeOf(map[*ssa.Function]vrp.Ranges{}), +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index bad1efb1e..83ef9b38b 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -15,13 +15,13 @@ import ( "sort" "strconv" "strings" - "sync" texttemplate "text/template" "unicode" . "honnef.co/go/tools/arg" "honnef.co/go/tools/deprecated" "honnef.co/go/tools/functions" + "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" @@ -30,8 +30,10 @@ import ( "honnef.co/go/tools/ssautil" "honnef.co/go/tools/staticcheck/vrp" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ast/inspector" ) func validRegexp(call *Call) { @@ -106,7 +108,7 @@ var ( checkEncodingBinaryRules = map[string]CallCheck{ "encoding/binary.Write": func(call *Call) { arg := call.Args[Arg("encoding/binary.Write.data")] - if !CanBinaryMarshal(call.Job, arg.Value) { + if !CanBinaryMarshal(call.Pass, arg.Value) { arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) } }, @@ -311,8 +313,7 @@ var verbs = [...]verbFlag{ } func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { - var elem func(T types.Type, verb rune) ([]types.Type, bool) - elem = func(T types.Type, verb rune) ([]types.Type, bool) { + elem := func(T types.Type, verb rune) ([]types.Type, bool) { if verbs[verb]&noRecurse != 0 { return []types.Type{T}, false } @@ -623,7 +624,7 @@ func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { } func checkAtomicAlignmentImpl(call *Call) { - sizes := call.Job.Pkg.TypesSizes + sizes := call.Pass.TypesSizes if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { // Not running on a 32-bit platform return @@ -650,6 +651,9 @@ func checkAtomicAlignmentImpl(call *Call) { func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { return func(call *Call) { + if IsGenerated(call.Pass, call.Instr.Pos()) { + return + } arg := call.Args[argN] T := arg.Value.Value.Type() Ts, ok := Dereference(T).Underlying().(*types.Struct) @@ -731,108 +735,12 @@ func fieldPath(start types.Type, indices []int) string { return p } -type Checker struct { - CheckGenerated bool - funcDescs *functions.Descriptions - deprecatedPkgs map[*types.Package]string - deprecatedObjs map[types.Object]string -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "staticcheck" } -func (*Checker) Prefix() string { return "SA" } - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules), Doc: docSA1000}, - {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate, Doc: docSA1001}, - {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules), Doc: docSA1002}, - {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules), Doc: docSA1003}, - {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant, Doc: docSA1004}, - {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec, Doc: docSA1005}, - {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf, Doc: docSA1006}, - {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules), Doc: docSA1007}, - {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey, Doc: docSA1008}, - {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules), Doc: docSA1010}, - {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules), Doc: docSA1011}, - {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext, Doc: docSA1012}, - {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker, Doc: docSA1013}, - {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules), Doc: docSA1014}, - {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick, Doc: docSA1015}, - {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal, Doc: docSA1016}, - {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules), Doc: docSA1017}, - {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules), Doc: docSA1018}, - {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated, Doc: docSA1019}, - {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules), Doc: docSA1020}, - {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules), Doc: docSA1021}, - {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified, Doc: docSA1023}, - {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, - {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, - {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: docSA1026}, - {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: docSA1027}, - - {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, - {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, - {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting, Doc: docSA2002}, - {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock, Doc: docSA2003}, - - {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit, Doc: docSA3000}, - {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN, Doc: docSA3001}, - - {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical, Doc: docSA4000}, - {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy, Doc: docSA4001}, - {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison, Doc: docSA4002}, - {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison, Doc: docSA4003}, - {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop, Doc: docSA4004}, - {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues, Doc: docSA4006}, - {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition, Doc: docSA4008}, - {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten, Doc: docSA4009}, - {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend, Doc: docSA4010}, - {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak, Doc: docSA4011}, - {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison, Doc: docSA4012}, - {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation, Doc: docSA4013}, - {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse, Doc: docSA4014}, - {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules), Doc: docSA4015}, - {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps, Doc: docSA4016}, - {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions, Doc: docSA4017}, - {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment, Doc: docSA4018}, - {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints, Doc: docSA4019}, - {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases, Doc: docSA4020}, - {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend, Doc: docSA4021}, - - {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps, Doc: docSA5000}, - {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer, Doc: docSA5001}, - {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop, Doc: docSA5002}, - {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop, Doc: docSA5003}, - {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault, Doc: docSA5004}, - {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, - {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, - {ID: "SA5008", FilterGenerated: false, Fn: c.CheckStructTags, Doc: ``}, - {ID: "SA5009", FilterGenerated: false, Fn: c.callChecker(checkPrintfRules), Doc: ``}, - - {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, - {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, - {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules), Doc: docSA6002}, - {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes, Doc: docSA6003}, - // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp, Doc: docSA6004}, - {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison, Doc: docSA6005}, - - {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop, Doc: docSA9001}, - {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode, Doc: docSA9002}, - {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch, Doc: docSA9003}, - {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration, Doc: docSA9004}, - // Filtering generated code because it may include empty structs generated from data models. - {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal), Doc: docSA9005}, - } - - // "SA5006": c.CheckSliceOutOfBounds, - // "SA4007": c.CheckPredeterminedBooleanExprs, -} - -func (c *Checker) findDeprecated(prog *lint.Program) { +type IsDeprecated struct{ Msg string } + +func (*IsDeprecated) AFact() {} +func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } + +func checkDeprecatedMark(pass *analysis.Pass) { var names []*ast.Ident extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { @@ -851,116 +759,90 @@ func (c *Checker) findDeprecated(prog *lint.Program) { } return "" } - doDocs := func(pkg *packages.Package, names []*ast.Ident, docs []*ast.CommentGroup) { + doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { alt := extractDeprecatedMessage(docs) if alt == "" { return } for _, name := range names { - obj := pkg.TypesInfo.ObjectOf(name) - c.deprecatedObjs[obj] = alt + obj := pass.TypesInfo.ObjectOf(name) + pass.ExportObjectFact(obj, &IsDeprecated{alt}) } } - for _, pkg := range prog.AllPackages { - var docs []*ast.CommentGroup - for _, f := range pkg.Syntax { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pkg.PkgPath != "syscall" { - c.deprecatedPkgs[pkg.Types] = alt - } + var docs []*ast.CommentGroup + for _, f := range pass.Files { + docs = append(docs, f.Doc) + } + if alt := extractDeprecatedMessage(docs); alt != "" { + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pass.Pkg.Path() != "syscall" { + pass.ExportPackageFact(&IsDeprecated{alt}) } + } - docs = docs[:0] - for _, f := range pkg.Syntax { - fn := func(node ast.Node) bool { - if node == nil { - return true - } - var ret bool - switch node := node.(type) { - case *ast.GenDecl: - switch node.Tok { - case token.TYPE, token.CONST, token.VAR: - docs = append(docs, node.Doc) - return true - default: - return false - } - case *ast.FuncDecl: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = false - case *ast.TypeSpec: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = true - case *ast.ValueSpec: + docs = docs[:0] + for _, f := range pass.Files { + fn := func(node ast.Node) bool { + if node == nil { + return true + } + var ret bool + switch node := node.(type) { + case *ast.GenDecl: + switch node.Tok { + case token.TYPE, token.CONST, token.VAR: docs = append(docs, node.Doc) - names = node.Names - ret = false - case *ast.File: return true - case *ast.StructType: - for _, field := range node.Fields.List { - doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - case *ast.InterfaceType: - for _, field := range node.Methods.List { - doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) - } - return false default: return false } - if len(names) == 0 || len(docs) == 0 { - return ret + case *ast.FuncDecl: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = false + case *ast.TypeSpec: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = true + case *ast.ValueSpec: + docs = append(docs, node.Doc) + names = node.Names + ret = false + case *ast.File: + return true + case *ast.StructType: + for _, field := range node.Fields.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + case *ast.InterfaceType: + for _, field := range node.Methods.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) } - doDocs(pkg, names, docs) - - docs = docs[:0] - names = nil + return false + default: + return false + } + if len(names) == 0 || len(docs) == 0 { return ret } - ast.Inspect(f, fn) - } - } -} + doDocs(names, docs) -func (c *Checker) Init(prog *lint.Program) { - wg := &sync.WaitGroup{} - wg.Add(2) - go func() { - c.funcDescs = functions.NewDescriptions(prog.SSA) - for _, fn := range prog.AllFunctions { - if fn.Blocks != nil { - applyStdlibKnowledge(fn) - ssa.OptimizeBlocks(fn) - } + docs = docs[:0] + names = nil + return ret } - wg.Done() - }() - - go func() { - c.deprecatedPkgs = map[*types.Package]string{} - c.deprecatedObjs = map[types.Object]string{} - c.findDeprecated(prog) - wg.Done() - }() - - wg.Wait() + ast.Inspect(f, fn) + } } -func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { - sets := c.funcDescs.Get(b.Parent()).Loops +func isInLoop(b *ssa.BasicBlock) bool { + sets := functions.FindLoops(b.Parent()) for _, set := range sets { if set[b] { return true @@ -969,105 +851,51 @@ func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { return false } -func applyStdlibKnowledge(fn *ssa.Function) { - if len(fn.Blocks) == 0 { - return - } - - // comma-ok receiving from a time.Tick channel will never return - // ok == false, so any branching on the value of ok can be - // replaced with an unconditional jump. This will primarily match - // `for range time.Tick(x)` loops, but it can also match - // user-written code. - for _, block := range fn.Blocks { - if len(block.Instrs) < 3 { - continue - } - if len(block.Succs) != 2 { - continue - } - var instrs []*ssa.Instruction - for i, ins := range block.Instrs { - if _, ok := ins.(*ssa.DebugRef); ok { - continue - } - instrs = append(instrs, &block.Instrs[i]) - } - - for i, ins := range instrs { - unop, ok := (*ins).(*ssa.UnOp) - if !ok || unop.Op != token.ARROW { - continue - } - call, ok := unop.X.(*ssa.Call) - if !ok { - continue - } - if !IsCallTo(call.Common(), "time.Tick") { - continue - } - ex, ok := (*instrs[i+1]).(*ssa.Extract) - if !ok || ex.Tuple != unop || ex.Index != 1 { - continue - } - - ifstmt, ok := (*instrs[i+2]).(*ssa.If) - if !ok || ifstmt.Cond != ex { - continue - } - - *instrs[i+2] = ssa.NewJump(block) - succ := block.Succs[1] - block.Succs = block.Succs[0:1] - succ.RemovePred(block) - } - } -} - -func (c *Checker) CheckUntrappableSignal(j *lint.Job) { +func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAnyAST(j, call, + if !IsCallToAnyAST(pass, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { return } for _, arg := range call.Args { - if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { + if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") { arg = conv.Args[0] } - if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") { - j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(j, arg)) + if isName(pass, arg, "os.Kill") || isName(pass, arg, "syscall.SIGKILL") { + pass.Reportf(arg.Pos(), "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(pass, arg)) } - if isName(j, arg, "syscall.SIGSTOP") { - j.Errorf(arg, "%s signal cannot be trapped", Render(j, arg)) + if isName(pass, arg, "syscall.SIGSTOP") { + pass.Reportf(arg.Pos(), "%s signal cannot be trapped", Render(pass, arg)) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTemplate(j *lint.Job) { +func CheckTemplate(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) var kind string - if IsCallToAST(j, call, "(*text/template.Template).Parse") { + if IsCallToAST(pass, call, "(*text/template.Template).Parse") { kind = "text" - } else if IsCallToAST(j, call, "(*html/template.Template).Parse") { + } else if IsCallToAST(pass, call, "(*html/template.Template).Parse") { kind = "html" } else { return } sel := call.Fun.(*ast.SelectorExpr) - if !IsCallToAST(j, sel.X, "text/template.New") && - !IsCallToAST(j, sel.X, "html/template.New") { + if !IsCallToAST(pass, sel.X, "text/template.New") && + !IsCallToAST(pass, sel.X, "html/template.New") { // TODO(dh): this is a cheap workaround for templates with // different delims. A better solution with less false // negatives would use data flow analysis to see where the // template comes from and where it has been return } - s, ok := ExprToString(j, call.Args[Arg("(*text/template.Template).Parse.text")]) + s, ok := ExprToString(pass, call.Args[Arg("(*text/template.Template).Parse.text")]) if !ok { return } @@ -1081,17 +909,18 @@ func (c *Checker) CheckTemplate(j *lint.Job) { if err != nil { // TODO(dominikh): whitelist other parse errors, if any if strings.Contains(err.Error(), "unexpected") { - j.Errorf(call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) + pass.Reportf(call.Args[Arg("(*text/template.Template).Parse.text")].Pos(), "%s", err) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { +func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "time.Sleep") { + if !IsCallToAST(pass, call, "time.Sleep") { return } lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) @@ -1112,13 +941,14 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { if n != 1 { recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) } - j.Errorf(call.Args[Arg("time.Sleep.d")], + pass.Reportf(call.Args[Arg("time.Sleep.d")].Pos(), "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { +func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { g := node.(*ast.GoStmt) fun, ok := g.Call.Fun.(*ast.FuncLit) @@ -1140,19 +970,20 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { if !ok { return } - fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return } if lint.FuncName(fn) == "(*sync.WaitGroup).Add" { - j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", - Render(j, stmt)) + pass.Reportf(sel.Pos(), "should call %s before starting the goroutine to avoid a race", + Render(pass, stmt)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { +func CheckInfiniteEmptyLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if len(loop.Body.List) != 0 || loop.Post != nil { @@ -1181,21 +1012,22 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { return } if ident, ok := loop.Cond.(*ast.Ident); ok { - if k, ok := j.Pkg.TypesInfo.ObjectOf(ident).(*types.Const); ok { + if k, ok := pass.TypesInfo.ObjectOf(ident).(*types.Const); ok { if !constant.BoolVal(k.Val()) { // don't flag `for false {}` loops. They're a debug aid. return } } } - j.Errorf(loop, "loop condition never changes or has a race condition") + pass.Reportf(loop.Pos(), "loop condition never changes or has a race condition") } - j.Errorf(loop, "this loop will spin, using 100%% CPU") + pass.Reportf(loop.Pos(), "this loop will spin, using 100%% CPU") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { +func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { mightExit := false var defers []ast.Stmt @@ -1228,16 +1060,17 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { return } for _, stmt := range defers { - j.Errorf(stmt, "defers in this infinite loop will never run") + pass.Reportf(stmt.Pos(), "defers in this infinite loop will never run") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { +func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) - typ := j.Pkg.TypesInfo.TypeOf(loop.X) + typ := pass.TypesInfo.TypeOf(loop.X) _, ok := typ.Underlying().(*types.Chan) if !ok { return @@ -1245,7 +1078,7 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { fn2 := func(node ast.Node) bool { switch stmt := node.(type) { case *ast.DeferStmt: - j.Errorf(stmt, "defers in this range loop won't run unless the channel gets closed") + pass.Reportf(stmt.Pos(), "defers in this range loop won't run unless the channel gets closed") case *ast.FuncLit: // Don't look into function bodies return false @@ -1254,16 +1087,17 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTestMainExit(j *lint.Job) { +func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !isTestMain(j, node) { + if !isTestMain(pass, node) { return } - arg := j.Pkg.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + arg := pass.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) callsRun := false fn2 := func(node ast.Node) bool { call, ok := node.(*ast.CallExpr) @@ -1278,7 +1112,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !ok { return true } - if arg != j.Pkg.TypesInfo.ObjectOf(ident) { + if arg != pass.TypesInfo.ObjectOf(ident) { return true } if sel.Sel.Name == "Run" { @@ -1291,7 +1125,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { callsExit := false fn3 := func(node ast.Node) bool { - if IsCallToAST(j, node, "os.Exit") { + if IsCallToAST(pass, node, "os.Exit") { callsExit = true return false } @@ -1299,13 +1133,14 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { } ast.Inspect(node.(*ast.FuncDecl).Body, fn3) if !callsExit && callsRun { - j.Errorf(node, "TestMain should call os.Exit to set exit code") + pass.Reportf(node.Pos(), "TestMain should call os.Exit to set exit code") } } - j.Pkg.Inspector.Preorder(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(nil, fn) + return nil, nil } -func isTestMain(j *lint.Job, node ast.Node) bool { +func isTestMain(pass *analysis.Pass, node ast.Node) bool { decl, ok := node.(*ast.FuncDecl) if !ok { return false @@ -1320,29 +1155,30 @@ func isTestMain(j *lint.Job, node ast.Node) bool { if len(arg.Names) != 1 { return false } - return IsOfType(j, arg.Type, "*testing.M") + return IsOfType(pass, arg.Type, "*testing.M") } -func (c *Checker) CheckExec(j *lint.Job) { +func CheckExec(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "os/exec.Command") { + if !IsCallToAST(pass, call, "os/exec.Command") { return } - val, ok := ExprToString(j, call.Args[Arg("os/exec.Command.name")]) + val, ok := ExprToString(pass, call.Args[Arg("os/exec.Command.name")]) if !ok { return } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { return } - j.Errorf(call.Args[Arg("os/exec.Command.name")], + pass.Reportf(call.Args[Arg("os/exec.Command.name")].Pos(), "first argument to exec.Command looks like a shell command, but a program name or path are expected") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { +func CheckLoopEmptyDefault(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { @@ -1354,19 +1190,20 @@ func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { } for _, c := range sel.Body.List { if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { - j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") + pass.Reportf(comm.Pos(), "should not have an empty default case in a for+select loop. The loop will spin.") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { +func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { op := node.(*ast.BinaryExpr) switch op.Op { case token.EQL, token.NEQ: - if basic, ok := j.Pkg.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { + if basic, ok := pass.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN return @@ -1380,12 +1217,12 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { return } - if Render(j, op.X) != Render(j, op.Y) { + if Render(pass, op.X) != Render(pass, op.Y) { return } l1, ok1 := op.X.(*ast.BasicLit) l2, ok2 := op.Y.(*ast.BasicLit) - if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(j.File(l1)) { + if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(pass, l1.Pos()) { // cgo generates the following function call: // _cgoCheckPointer(_cgoBase0, 0 == 0) – it uses 0 == 0 // instead of true in case the user shadowed the @@ -1398,12 +1235,13 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { // 0 == 0 are slim. return } - j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) + pass.Reportf(op.Pos(), "identical expressions on the left and right side of the '%s' operator", op.Op) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckScopedBreak(j *lint.Job) { +func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { @@ -1453,21 +1291,22 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { continue } - j.Errorf(branch, "ineffective break statement. Did you mean to break out of the outer loop?") + pass.Reportf(branch.Pos(), "ineffective break statement. Did you mean to break out of the outer loop?") } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnsafePrintf(j *lint.Job) { +func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) var arg int - if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + if IsCallToAnyAST(pass, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { arg = Arg("fmt.Printf.format") - } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { + } else if IsCallToAnyAST(pass, call, "fmt.Fprintf") { arg = Arg("fmt.Fprintf.format") } else { return @@ -1480,13 +1319,14 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { default: return } - j.Errorf(call.Args[arg], + pass.Reportf(call.Args[arg].Pos(), "printf-style function with dynamic format string and no further arguments should use print-style function instead") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckEarlyDefer(j *lint.Job) { +func CheckEarlyDefer(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { block := node.(*ast.BlockStmt) if len(block.List) < 2 { @@ -1513,7 +1353,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if !ok { continue } - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { continue } @@ -1548,10 +1388,11 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if sel.Sel.Name != "Close" { continue } - j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) + pass.Reportf(def.Pos(), "should check returned error before deferring %s", Render(pass, def.Call)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1563,7 +1404,7 @@ func selectorX(sel *ast.SelectorExpr) ast.Node { } } -func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { +func CheckEmptyCriticalSection(pass *analysis.Pass) (interface{}, error) { // Initially it might seem like this check would be easier to // implement in SSA. After all, we're only checking for two // consecutive method calls. In reality, however, there may be any @@ -1589,7 +1430,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return nil, "", false } - fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return nil, "", false } @@ -1610,44 +1451,47 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { sel1, method1, ok1 := mutexParams(block.List[i]) sel2, method2, ok2 := mutexParams(block.List[i+1]) - if !ok1 || !ok2 || Render(j, sel1) != Render(j, sel2) { + if !ok1 || !ok2 || Render(pass, sel1) != Render(pass, sel2) { continue } if (method1 == "Lock" && method2 == "Unlock") || (method1 == "RLock" && method2 == "RUnlock") { - j.Errorf(block.List[i+1], "empty critical section") + pass.Reportf(block.List[i+1].Pos(), "empty critical section") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't // want to flag. var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) -func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { +func CheckIneffectiveCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { if unary, ok := node.(*ast.UnaryExpr); ok { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { ident, ok := star.X.(*ast.Ident) if !ok || !cgoIdent.MatchString(ident.Name) { - j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") + pass.Reportf(unary.Pos(), "&*x will be simplified to x. It will not copy x.") } } } if star, ok := node.(*ast.StarExpr); ok { if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { - j.Errorf(star, "*&x will be simplified to x. It will not copy x.") + pass.Reportf(star.Pos(), "*&x will be simplified to x. It will not copy x.") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckDiffSizeComparison(pass *analysis.Pass) (interface{}, error) { + ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges) + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range ssafn.Blocks { for _, ins := range b.Instrs { binop, ok := ins.(*ssa.BinOp) @@ -1662,22 +1506,26 @@ func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { if !ok1 && !ok2 { continue } - r := c.funcDescs.Get(ssafn).Ranges + r := ranges[ssafn] r1, ok1 := r.Get(binop.X).(vrp.StringInterval) r2, ok2 := r.Get(binop.Y).(vrp.StringInterval) if !ok1 || !ok2 { continue } if r1.Length.Intersection(r2.Length).Empty() { - j.Errorf(binop, "comparing strings of different sizes for equality will always return false") + pass.Reportf(binop.Pos(), "comparing strings of different sizes for equality will always return false") } } } } + return nil, nil } -func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { - fn := func(node ast.Node, _ bool) bool { +func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { + fn := func(node ast.Node, push bool) bool { + if !push { + return false + } assign, ok := node.(*ast.AssignStmt) if ok { // TODO(dh): This risks missing some Header reads, for @@ -1688,7 +1536,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { continue } - if IsOfType(j, op.X, "net/http.Header") { + if IsOfType(pass, op.X, "net/http.Header") { return false } } @@ -1698,23 +1546,24 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { return true } - if !IsOfType(j, op.X, "net/http.Header") { + if !IsOfType(pass, op.X, "net/http.Header") { return true } - s, ok := ExprToString(j, op.Index) + s, ok := ExprToString(pass, op.Index) if !ok { return true } if s == http.CanonicalHeaderKey(s) { return true } - j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) + pass.Reportf(op.Pos(), "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - j.Pkg.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckBenchmarkN(j *lint.Job) { +func CheckBenchmarkN(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { @@ -1727,16 +1576,17 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { if sel.Sel.Name != "N" { return } - if !IsOfType(j, sel.X, "*testing.B") { + if !IsOfType(pass, sel.X, "*testing.B") { return } - j.Errorf(assign, "should not assign to %s", Render(j, sel)) + pass.Reportf(assign.Pos(), "should not assign to %s", Render(pass, sel)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if IsExample(ssafn) { continue } @@ -1776,7 +1626,7 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { continue } - j.Errorf(lhs, "this value of %s is never used", lhs) + pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) } } return true @@ -1797,16 +1647,17 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { return true } if len(FilterDebug(*refs)) == 0 { - j.Errorf(lhs, "this value of %s is never used", lhs) + pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) } } return true }) } + return nil, nil } -func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckPredeterminedBooleanExprs(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ssabinop, ok := ins.(*ssa.BinOp) @@ -1841,16 +1692,17 @@ func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { } b := trues != 0 if trues == 0 || trues == len(xs)*len(ys) { - j.Errorf(ssabinop, "binary expression is always %t for all possible values (%s %s %s)", + pass.Reportf(ssabinop.Pos(), "binary expression is always %t for all possible values (%s %s %s)", b, xs, ssabinop.Op, ys) } } } } + return nil, nil } -func (c *Checker) CheckNilMaps(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckNilMaps(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { mu, ok := ins.(*ssa.MapUpdate) @@ -1864,24 +1716,25 @@ func (c *Checker) CheckNilMaps(j *lint.Job) { if c.Value != nil { continue } - j.Errorf(mu, "assignment to nil map") + pass.Reportf(mu.Pos(), "assignment to nil map") } } } + return nil, nil } -func (c *Checker) CheckExtremeComparison(j *lint.Job) { +func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { isobj := func(expr ast.Expr, name string) bool { sel, ok := expr.(*ast.SelectorExpr) if !ok { return false } - return IsObject(j.Pkg.TypesInfo.ObjectOf(sel.Sel), name) + return IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name) } fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) - tx := j.Pkg.TypesInfo.TypeOf(expr.X) + tx := pass.TypesInfo.TypeOf(expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { return @@ -1921,35 +1774,36 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) || (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) { - j.Errorf(expr, "no value of type %s is greater than %s", basic, max) + pass.Reportf(expr.Pos(), "no value of type %s is greater than %s", basic, max) } if expr.Op == token.LEQ && isobj(expr.Y, max) || expr.Op == token.GEQ && isobj(expr.X, max) { - j.Errorf(expr, "every value of type %s is <= %s", basic, max) + pass.Reportf(expr.Pos(), "every value of type %s is <= %s", basic, max) } if (basic.Info() & types.IsUnsigned) != 0 { if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") || (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") { - j.Errorf(expr, "no value of type %s is less than 0", basic) + pass.Reportf(expr.Pos(), "no value of type %s is less than 0", basic) } if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") || expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") { - j.Errorf(expr, "every value of type %s is >= 0", basic) + pass.Reportf(expr.Pos(), "every value of type %s is >= 0", basic) } } else { if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) || (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) { - j.Errorf(expr, "no value of type %s is less than %s", basic, min) + pass.Reportf(expr.Pos(), "no value of type %s is less than %s", basic, min) } if expr.Op == token.GEQ && isobj(expr.Y, min) || expr.Op == token.LEQ && isobj(expr.X, min) { - j.Errorf(expr, "every value of type %s is >= %s", basic, min) + pass.Reportf(expr.Pos(), "every value of type %s is >= %s", basic, min) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1993,8 +1847,8 @@ func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ss return uniq, true } -func (c *Checker) CheckLoopCondition(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { loop, ok := node.(*ast.ForStmt) if !ok { @@ -2050,16 +1904,17 @@ func (c *Checker) CheckLoopCondition(j *lint.Job) { case *ssa.UnOp: return true } - j.Errorf(cond, "variable in loop condition never changes") + pass.Reportf(cond.Pos(), "variable in loop condition never changes") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckArgOverwritten(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { var typ *ast.FuncType var body *ast.BlockStmt @@ -2079,7 +1934,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } for _, field := range typ.Params.List { for _, arg := range field.Names { - obj := j.Pkg.TypesInfo.ObjectOf(arg) + obj := pass.TypesInfo.ObjectOf(arg) var ssaobj *ssa.Parameter for _, param := range ssafn.Params { if param.Object() == obj { @@ -2109,7 +1964,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { if !ok { continue } - if j.Pkg.TypesInfo.ObjectOf(ident) == obj { + if pass.TypesInfo.ObjectOf(ident) == obj { assigned = true return false } @@ -2117,7 +1972,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { return true }) if assigned { - j.Errorf(arg, "argument %s is overwritten before first use", arg) + pass.Reportf(arg.Pos(), "argument %s is overwritten before first use", arg) } } } @@ -2125,9 +1980,10 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { +func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { // This check detects some, but not all unconditional loop exits. // We give up in the following cases: // @@ -2167,7 +2023,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { body = node.Body loop = node case *ast.RangeStmt: - typ := j.Pkg.TypesInfo.TypeOf(node.X) + typ := pass.TypesInfo.TypeOf(node.X) if _, ok := typ.Underlying().(*types.Map); ok { // looping once over a map is a valid pattern for // getting an arbitrary element. @@ -2227,24 +2083,25 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { return true }) if unconditionalExit != nil { - j.Errorf(unconditionalExit, "the surrounding loop is unconditionally terminated") + pass.Reportf(unconditionalExit.Pos(), "the surrounding loop is unconditionally terminated") } return true }) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckNilContext(j *lint.Job) { +func CheckNilContext(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if len(call.Args) == 0 { return } - if typ, ok := j.Pkg.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + if typ, ok := pass.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { return } - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2254,13 +2111,14 @@ func (c *Checker) CheckNilContext(j *lint.Job) { if !IsType(sig.Params().At(0).Type(), "context.Context") { return } - j.Errorf(call.Args[0], + pass.Reportf(call.Args[0].Pos(), "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSeeker(j *lint.Job) { +func CheckSeeker(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) @@ -2289,12 +2147,13 @@ func (c *Checker) CheckSeeker(j *lint.Job) { if pkg.Name != "io" { return } - j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") + pass.Reportf(call.Pos(), "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { +func CheckIneffectiveAppend(pass *analysis.Pass) (interface{}, error) { isAppend := func(ins ssa.Value) bool { call, ok := ins.(*ssa.Call) if !ok { @@ -2309,7 +2168,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { return true } - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { val, ok := ins.(ssa.Value) @@ -2353,15 +2212,16 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { } walkRefs(*refs) if !isUsed { - j.Errorf(ins, "this result of append is never used, except maybe in other appends") + pass.Reportf(ins.Pos(), "this result of append is never used, except maybe in other appends") } } } } + return nil, nil } -func (c *Checker) CheckConcurrentTesting(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { gostmt, ok := ins.(*ssa.Go) @@ -2410,53 +2270,68 @@ func (c *Checker) CheckConcurrentTesting(j *lint.Job) { default: continue } - j.Errorf(gostmt, "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) + pass.Reportf(gostmt.Pos(), "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) } } } } } + return nil, nil } -func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { - continue - } - arg0 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.obj")] - if iface, ok := arg0.(*ssa.MakeInterface); ok { - arg0 = iface.X - } - unop, ok := arg0.(*ssa.UnOp) - if !ok { - continue - } - v, ok := unop.X.(*ssa.Alloc) - if !ok { - continue - } - arg1 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] - if iface, ok := arg1.(*ssa.MakeInterface); ok { - arg1 = iface.X - } - mc, ok := arg1.(*ssa.MakeClosure) - if !ok { - continue - } - for _, b := range mc.Bindings { - if b == v { - pos := lint.DisplayPosition(j.Pkg.Fset, mc.Fn.Pos()) - j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) +func eachCall(ssafn *ssa.Function, fn func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function)) { + for _, b := range ssafn.Blocks { + for _, instr := range b.Instrs { + if site, ok := instr.(ssa.CallInstruction); ok { + if g := site.Common().StaticCallee(); g != nil { + fn(ssafn, site, g) } } } } } -func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) { + fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + if callee.RelString(nil) != "runtime.SetFinalizer" { + return + } + arg0 := site.Common().Args[Arg("runtime.SetFinalizer.obj")] + if iface, ok := arg0.(*ssa.MakeInterface); ok { + arg0 = iface.X + } + unop, ok := arg0.(*ssa.UnOp) + if !ok { + return + } + v, ok := unop.X.(*ssa.Alloc) + if !ok { + return + } + arg1 := site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] + if iface, ok := arg1.(*ssa.MakeInterface); ok { + arg1 = iface.X + } + mc, ok := arg1.(*ssa.MakeClosure) + if !ok { + return + } + for _, b := range mc.Bindings { + if b == v { + pos := lint.DisplayPosition(pass.Fset, mc.Fn.Pos()) + pass.Reportf(site.Pos(), "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) + } + } + } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, fn) + } + return nil, nil +} + +/* +func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ia, ok := ins.(*ssa.IndexAddr) @@ -2472,15 +2347,17 @@ func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { continue } if idxr.Lower.Cmp(sr.Length.Upper) >= 0 { - j.Errorf(ia, "index out of bounds") + pass.Reportf(ia.Pos(), "index out of bounds") } } } } + return nil, nil } +*/ -func (c *Checker) CheckDeferLock(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckDeferLock(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { instrs := FilterDebug(block.Instrs) if len(instrs) < 2 { @@ -2512,13 +2389,14 @@ func (c *Checker) CheckDeferLock(j *lint.Job) { case "RLock": alt = "RUnlock" } - j.Errorf(nins, "deferring %s right after having locked already; did you mean to defer %s?", name, alt) + pass.Reportf(nins.Pos(), "deferring %s right after having locked already; did you mean to defer %s?", name, alt) } } } + return nil, nil } -func (c *Checker) CheckNaNComparison(j *lint.Job) { +func CheckNaNComparison(pass *analysis.Pass) (interface{}, error) { isNaN := func(v ssa.Value) bool { call, ok := v.(*ssa.Call) if !ok { @@ -2526,7 +2404,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } return IsCallTo(call.Common(), "math.NaN") } - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2534,27 +2412,27 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { continue } if isNaN(ins.X) || isNaN(ins.Y) { - j.Errorf(ins, "no value is equal to NaN, not even NaN itself") + pass.Reportf(ins.Pos(), "no value is equal to NaN, not even NaN itself") } } } } + return nil, nil } -func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee != node { - continue +func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + if callee != ssafn { + return } - if _, ok := edge.Site.(*ssa.Go); ok { + if _, ok := site.(*ssa.Go); ok { // Recursively spawning goroutines doesn't consume // stack space infinitely, so don't flag it. - continue + return } - block := edge.Site.Block() + block := site.Block() canReturn := false for _, b := range ssafn.Blocks { if block.Dominates(b) { @@ -2569,11 +2447,12 @@ func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { } } if canReturn { - continue + return } - j.Errorf(edge.Site, "infinite recursive call") - } + pass.Reportf(site.Pos(), "infinite recursive call") + }) } + return nil, nil } func objectName(obj types.Object) string { @@ -2591,20 +2470,20 @@ func objectName(obj types.Object) string { return name } -func isName(j *lint.Job, expr ast.Expr, name string) bool { +func isName(pass *analysis.Pass, expr ast.Expr, name string) bool { var obj types.Object switch expr := expr.(type) { case *ast.Ident: - obj = j.Pkg.TypesInfo.ObjectOf(expr) + obj = pass.TypesInfo.ObjectOf(expr) case *ast.SelectorExpr: - obj = j.Pkg.TypesInfo.ObjectOf(expr.Sel) + obj = pass.TypesInfo.ObjectOf(expr.Sel) } return objectName(obj) == name } -func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - if IsInMain(j, ssafn) || IsInTest(j, ssafn) { +func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInMain(pass, ssafn) || IsInTest(pass, ssafn) { continue } for _, block := range ssafn.Blocks { @@ -2613,16 +2492,17 @@ func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { if !ok || !IsCallTo(call.Common(), "time.Tick") { continue } - if c.funcDescs.Get(call.Parent()).Infinite { + if !functions.Terminates(call.Parent()) { continue } - j.Errorf(call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") + pass.Reportf(call.Pos(), "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") } } } + return nil, nil } -func (c *Checker) CheckDoubleNegation(j *lint.Job) { +func CheckDoubleNegation(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { unary1 := node.(*ast.UnaryExpr) unary2, ok := unary1.X.(*ast.UnaryExpr) @@ -2632,9 +2512,10 @@ func (c *Checker) CheckDoubleNegation(j *lint.Job) { if unary1.Op != token.NOT || unary2.Op != token.NOT { return } - j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") + pass.Reportf(unary1.Pos(), "negating a boolean twice has no effect; is this a typo?") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) + return nil, nil } func hasSideEffects(node ast.Node) bool { @@ -2655,7 +2536,7 @@ func hasSideEffects(node ast.Node) bool { return dynamic } -func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { +func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) { seen := map[ast.Node]bool{} var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) @@ -2686,18 +2567,19 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } counts := map[string]int{} for _, cond := range conds { - s := Render(j, cond) + s := Render(pass, cond) counts[s]++ if counts[s] == 2 { - j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") + pass.Reportf(cond.Pos(), "this condition occurs multiple times in this if/else if chain") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2715,7 +2597,7 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { // of a pattern, x<<0, x<<8, x<<16, ... continue } - path, _ := astutil.PathEnclosingInterval(j.File(ins), ins.Pos(), ins.Pos()) + path, _ := astutil.PathEnclosingInterval(File(pass, ins), ins.Pos(), ins.Pos()) if len(path) == 0 { continue } @@ -2725,19 +2607,20 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { switch ins.Op { case token.AND: - j.Errorf(ins, "x & 0 always equals 0") + pass.Reportf(ins.Pos(), "x & 0 always equals 0") case token.OR, token.XOR: - j.Errorf(ins, "x %s 0 always equals x", ins.Op) + pass.Reportf(ins.Pos(), "x %s 0 always equals x", ins.Op) } } } } + return nil, nil } -func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { +func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2764,17 +2647,165 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { if err != nil { continue } - j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + pass.Reportf(call.Args[i].Pos(), "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + } + } + } + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil +} + +type IsPure struct{} + +func (*IsPure) AFact() {} +func (*IsPure) String() string { return "IsPure" } + +var pureStdlib = map[string]struct{}{ + "errors.New": {}, + "fmt.Errorf": {}, + "fmt.Sprintf": {}, + "fmt.Sprint": {}, + "sort.Reverse": {}, + "strings.Map": {}, + "strings.Repeat": {}, + "strings.Replace": {}, + "strings.Title": {}, + "strings.ToLower": {}, + "strings.ToLowerSpecial": {}, + "strings.ToTitle": {}, + "strings.ToTitleSpecial": {}, + "strings.ToUpper": {}, + "strings.ToUpperSpecial": {}, + "strings.Trim": {}, + "strings.TrimFunc": {}, + "strings.TrimLeft": {}, + "strings.TrimLeftFunc": {}, + "strings.TrimPrefix": {}, + "strings.TrimRight": {}, + "strings.TrimRightFunc": {}, + "strings.TrimSpace": {}, + "strings.TrimSuffix": {}, + "(*net/http.Request).WithContext": {}, +} + +func checkPureFunctionsMark(pass *analysis.Pass) { + seen := map[*ssa.Function]struct{}{} + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + var check func(ssafn *ssa.Function) (ret bool) + check = func(ssafn *ssa.Function) (ret bool) { + if ssafn.Object() == nil { + // TODO(dh): support closures + return false + } + if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) { + return true + } + if ssafn.Pkg != ssapkg { + // Function is in another package but wasn't marked as + // pure, ergo it isn't pure + return false + } + // Break recursion + if _, ok := seen[ssafn]; ok { + return false + } + + seen[ssafn] = struct{}{} + defer func() { + if ret { + pass.ExportObjectFact(ssafn.Object(), &IsPure{}) + } + }() + + if functions.IsStub(ssafn) { + return false + } + + if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok { + return true + } + + if ssafn.Signature.Results().Len() == 0 { + // A function with no return values is empty or is doing some + // work we cannot see (for example because of build tags); + // don't consider it pure. + return false + } + + for _, param := range ssafn.Params { + if _, ok := param.Type().Underlying().(*types.Basic); !ok { + return false + } + } + + if ssafn.Blocks == nil { + return false + } + checkCall := func(common *ssa.CallCommon) bool { + if common.IsInvoke() { + return false + } + builtin, ok := common.Value.(*ssa.Builtin) + if !ok { + if common.StaticCallee() != ssafn { + if common.StaticCallee() == nil { + return false + } + if !check(common.StaticCallee()) { + return false + } + } + } else { + switch builtin.Name() { + case "len", "cap", "make", "new": + default: + return false + } + } + return true + } + for _, b := range ssafn.Blocks { + for _, ins := range b.Instrs { + switch ins := ins.(type) { + case *ssa.Call: + if !checkCall(ins.Common()) { + return false + } + case *ssa.Defer: + if !checkCall(&ins.Call) { + return false + } + case *ssa.Select: + return false + case *ssa.Send: + return false + case *ssa.Go: + return false + case *ssa.Panic: + return false + case *ssa.Store: + return false + case *ssa.FieldAddr: + return false + case *ssa.UnOp: + if ins.Op == token.MUL || ins.Op == token.AND { + return false + } + } } } + return true + } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + check(ssafn) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } -func (c *Checker) CheckPureFunctions(j *lint.Job) { +func CheckPureFunctions(pass *analysis.Pass) (interface{}, error) { + checkPureFunctionsMark(pass) fnLoop: - for _, ssafn := range j.Pkg.InitialFunctions { - if IsInTest(j, ssafn) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInTest(pass, ssafn) { params := ssafn.Signature.Params() for i := 0; i < params.Len(); i++ { param := params.At(i) @@ -2805,157 +2836,174 @@ fnLoop: if callee == nil { continue } - if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub { - j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name()) + if callee.Object() == nil { + // TODO(dh): support anonymous functions + continue + } + if pass.ImportObjectFact(callee.Object(), new(IsPure)) { + pass.Reportf(ins.Pos(), "%s is a pure function but its return value is ignored", callee.Name()) continue } } } } + return nil, nil } -func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := j.Pkg.TypesInfo.ObjectOf(ident) +func isDeprecated(pass *analysis.Pass, ident *ast.Ident) (bool, string) { + obj := pass.TypesInfo.ObjectOf(ident) if obj.Pkg() == nil { return false, "" } - alt := c.deprecatedObjs[obj] - return alt != "", alt + var depr IsDeprecated + if pass.ImportObjectFact(obj, &depr) { + return true, depr.Msg + } + return false, "" } -func (c *Checker) CheckDeprecated(j *lint.Job) { +func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { + checkDeprecatedMark(pass) + // Selectors can appear outside of function literals, e.g. when // declaring package level variables. - var ssafn *ssa.Function + var tfn types.Object stack := 0 fn := func(node ast.Node, push bool) bool { if !push { stack-- - } else { - stack++ + return false } + stack++ if stack == 1 { - ssafn = nil + tfn = nil } if fn, ok := node.(*ast.FuncDecl); ok { - ssafn = j.Pkg.SSA.Prog.FuncValue(j.Pkg.TypesInfo.ObjectOf(fn.Name).(*types.Func)) + tfn = pass.TypesInfo.ObjectOf(fn.Name) } sel, ok := node.(*ast.SelectorExpr) if !ok { return true } - obj := j.Pkg.TypesInfo.ObjectOf(sel.Sel) + obj := pass.TypesInfo.ObjectOf(sel.Sel) if obj.Pkg() == nil { return true } - nodePkg := j.Pkg.Types - if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { + if pass.Pkg == obj.Pkg() || obj.Pkg().Path()+"_test" == pass.Pkg.Path() { // Don't flag stuff in our own package return true } - if ok, alt := c.isDeprecated(j, sel.Sel); ok { + if ok, alt := isDeprecated(pass, sel.Sel); ok { // Look for the first available alternative, not the first // version something was deprecated in. If a function was // deprecated in Go 1.6, an alternative has been available // already in 1.0, and we're targeting 1.2, it still // makes sense to use the alternative from 1.0, to be // future-proof. - minVersion := deprecated.Stdlib[SelectorName(j, sel)].AlternativeAvailableSince - if !IsGoVersion(j, minVersion) { + minVersion := deprecated.Stdlib[SelectorName(pass, sel)].AlternativeAvailableSince + if !IsGoVersion(pass, minVersion) { return true } - if ssafn != nil { - if _, ok := c.deprecatedObjs[ssafn.Object()]; ok { + if tfn != nil { + var depr IsDeprecated + if pass.ImportObjectFact(tfn, &depr) { // functions that are deprecated may use deprecated // symbols return true } } - j.Errorf(sel, "%s is deprecated: %s", Render(j, sel), alt) + pass.Reportf(sel.Pos(), "%s is deprecated: %s", Render(pass, sel), alt) return true } return true } - for _, f := range j.Pkg.Syntax { + + imps := map[string]*types.Package{} + for _, imp := range pass.Pkg.Imports() { + imps[imp.Path()] = imp + } + for _, f := range pass.Files { ast.Inspect(f, func(node ast.Node) bool { if node, ok := node.(*ast.ImportSpec); ok { p := node.Path.Value path := p[1 : len(p)-1] - imp := j.Pkg.Imports[path] - if alt := c.deprecatedPkgs[imp.Types]; alt != "" { - j.Errorf(node, "Package %s is deprecated: %s", path, alt) + imp := imps[path] + var depr IsDeprecated + if pass.ImportPackageFact(imp, &depr) { + pass.Reportf(node.Pos(), "Package %s is deprecated: %s", path, depr.Msg) } } return true }) } - j.Pkg.Inspector.Nodes(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes(nil, fn) + return nil, nil } -func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { - return func(j *lint.Job) { - c.checkCalls(j, rules) +func callChecker(rules map[string]CallCheck) func(pass *analysis.Pass) (interface{}, error) { + return func(pass *analysis.Pass) (interface{}, error) { + return checkCalls(pass, rules) } } -func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - callee := edge.Callee.Func - obj, ok := callee.Object().(*types.Func) - if !ok { - continue - } +func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, error) { + ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges) + fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + obj, ok := callee.Object().(*types.Func) + if !ok { + return + } - r, ok := rules[lint.FuncName(obj)] - if !ok { - continue - } - var args []*Argument - ssaargs := edge.Site.Common().Args - if callee.Signature.Recv() != nil { - ssaargs = ssaargs[1:] - } - for _, arg := range ssaargs { - if iarg, ok := arg.(*ssa.MakeInterface); ok { - arg = iarg.X - } - vr := c.funcDescs.Get(edge.Site.Parent()).Ranges[arg] - args = append(args, &Argument{Value: Value{arg, vr}}) - } - call := &Call{ - Job: j, - Instr: edge.Site, - Args: args, - Checker: c, - Parent: edge.Site.Parent(), + r, ok := rules[lint.FuncName(obj)] + if !ok { + return + } + var args []*Argument + ssaargs := site.Common().Args + if callee.Signature.Recv() != nil { + ssaargs = ssaargs[1:] + } + for _, arg := range ssaargs { + if iarg, ok := arg.(*ssa.MakeInterface); ok { + arg = iarg.X } - r(call) - for idx, arg := range call.Args { - _ = idx - for _, e := range arg.invalids { - // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) - // if len(path) < 2 { - // continue - // } - // astcall, ok := path[0].(*ast.CallExpr) - // if !ok { - // continue - // } - // j.Errorf(astcall.Args[idx], "%s", e) + vr := ranges[site.Parent()][arg] + args = append(args, &Argument{Value: Value{arg, vr}}) + } + call := &Call{ + Pass: pass, + Instr: site, + Args: args, + Parent: site.Parent(), + } + r(call) + for idx, arg := range call.Args { + _ = idx + for _, e := range arg.invalids { + // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) + // if len(path) < 2 { + // continue + // } + // astcall, ok := path[0].(*ast.CallExpr) + // if !ok { + // continue + // } + // pass.Reportf(astcall.Args[idx], "%s", e) - j.Errorf(edge.Site, "%s", e) - } - } - for _, e := range call.invalids { - j.Errorf(call.Instr.Common(), "%s", e) + pass.Reportf(site.Pos(), "%s", e) } } + for _, e := range call.invalids { + pass.Reportf(call.Instr.Common().Pos(), "%s", e) + } } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, fn) + } + return nil, nil } func shortCallName(call *ssa.CallCommon) string { @@ -2975,12 +3023,12 @@ func shortCallName(call *ssa.CallCommon) string { return "" } -func (c *Checker) CheckWriterBufferModified(j *lint.Job) { +func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this might be a good candidate for taint analysis. // Taint the argument as MUST_NOT_MODIFY, then propagate that // through functions like bytes.Split - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { sig := ssafn.Signature if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { continue @@ -3010,7 +3058,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { if addr.X != ssafn.Params[1] { continue } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily") case *ssa.Call: if !IsCallTo(ins.Common(), "append") { continue @@ -3018,11 +3066,12 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { if ins.Common().Args[0] != ssafn.Params[1] { continue } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily") } } } } + return nil, nil } func loopedRegexp(name string) CallCheck { @@ -3030,21 +3079,18 @@ func loopedRegexp(name string) CallCheck { if len(extractConsts(call.Args[0].Value.Value)) == 0 { return } - if !call.Checker.isInLoop(call.Instr.Block()) { + if !isInLoop(call.Instr.Block()) { return } call.Invalid(fmt.Sprintf("calling %s in a loop has poor performance, consider using regexp.Compile", name)) } } -func (c *Checker) CheckEmptyBranch(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if ssafn.Syntax() == nil { continue } - if IsGenerated(j.File(ssafn.Syntax())) { - continue - } if IsExample(ssafn) { continue } @@ -3058,20 +3104,21 @@ func (c *Checker) CheckEmptyBranch(j *lint.Job) { if !ok || len(b.List) != 0 { return true } - j.Errorf(ifstmt.Else, "empty branch") + ReportfFG(pass, ifstmt.Else.Pos(), "empty branch") } if len(ifstmt.Body.List) != 0 { return true } - j.Errorf(ifstmt, "empty branch") + ReportfFG(pass, ifstmt.Pos(), "empty branch") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckMapBytesKey(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range fn.Blocks { insLoop: for _, ins := range b.Instrs { @@ -3115,31 +3162,33 @@ func (c *Checker) CheckMapBytesKey(j *lint.Job) { if !ident { continue } - j.Errorf(conv, "m[string(key)] would be more efficient than k := string(key); m[k]") + pass.Reportf(conv.Pos(), "m[string(key)] would be more efficient than k := string(key); m[k]") } } } + return nil, nil } -func (c *Checker) CheckRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(j) +func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + return sharedcheck.CheckRangeStringRunes(pass) } -func (c *Checker) CheckSelfAssignment(j *lint.Job) { +func CheckSelfAssignment(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { assign := node.(*ast.AssignStmt) if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { return } for i, stmt := range assign.Lhs { - rlh := Render(j, stmt) - rrh := Render(j, assign.Rhs[i]) + rlh := Render(pass, stmt) + rrh := Render(pass, assign.Rhs[i]) if rlh == rrh { - j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) + ReportfFG(pass, assign.Pos(), "self-assignment of %s to %s", rrh, rlh) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } func buildTagsIdentical(s1, s2 []string) bool { @@ -3160,8 +3209,8 @@ func buildTagsIdentical(s1, s2 []string) bool { return true } -func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range job.Pkg.Syntax { +func CheckDuplicateBuildConstraints(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { constraints := buildTags(f) for i, constraint1 := range constraints { for j, constraint2 := range constraints { @@ -3169,19 +3218,20 @@ func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { continue } if buildTagsIdentical(constraint1, constraint2) { - job.Errorf(f, "identical build constraints %q and %q", + ReportfFG(pass, f.Pos(), "identical build constraints %q and %q", strings.Join(constraint1, " "), strings.Join(constraint2, " ")) } } } } + return nil, nil } -func (c *Checker) CheckSillyRegexp(j *lint.Job) { +func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) { // We could use the rule checking engine for this, but the // arguments aren't really invalid. - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range fn.Blocks { for _, ins := range b.Instrs { call, ok := ins.(*ssa.Call) @@ -3205,13 +3255,14 @@ func (c *Checker) CheckSillyRegexp(j *lint.Job) { if re.Op != syntax.OpLiteral && re.Op != syntax.OpEmptyMatch { continue } - j.Errorf(call, "regular expression does not contain any meta characters") + pass.Reportf(call.Pos(), "regular expression does not contain any meta characters") } } } + return nil, nil } -func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { +func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { decl := node.(*ast.GenDecl) if !decl.Lparen.IsValid() { @@ -3221,7 +3272,7 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { return } - groups := GroupSpecs(j.Pkg.Fset, decl.Specs) + groups := GroupSpecs(pass.Fset, decl.Specs) groupLoop: for _, group := range groups { if len(group) < 2 { @@ -3254,14 +3305,15 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { continue groupLoop } } - j.Errorf(group[0], "only the first constant in this group has an explicit type") + pass.Reportf(group[0].Pos(), "only the first constant in this group has an explicit type") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range fn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ssa.Call) @@ -3314,15 +3366,16 @@ func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { } if found { - j.Errorf(call, "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring") + pass.Reportf(call.Pos(), "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring") } } } } } + return nil, nil } -func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { +func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { binExpr := node.(*ast.BinaryExpr) @@ -3342,9 +3395,9 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { ) var call string - if IsCallToAST(j, binExpr.X, lo) && IsCallToAST(j, binExpr.Y, lo) { + if IsCallToAST(pass, binExpr.X, lo) && IsCallToAST(pass, binExpr.Y, lo) { call = lo - } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { + } else if IsCallToAST(pass, binExpr.X, up) && IsCallToAST(pass, binExpr.Y, up) { call = up } else { return @@ -3355,13 +3408,14 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { bang = "!" } - j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) + pass.Reportf(binExpr.Pos(), "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { +func CheckUnreachableTypeCases(pass *analysis.Pass) (interface{}, error) { // Check if T subsumes V in a type switch. T subsumes V if T is an interface and T's method set is a subset of V's method set. subsumes := func(T, V types.Type) bool { tIface, ok := T.Underlying().(*types.Interface) @@ -3404,7 +3458,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { Ts := make([]types.Type, len(cc.List)) for i, expr := range cc.List { - Ts[i] = j.Pkg.TypesInfo.TypeOf(expr) + Ts[i] = pass.TypesInfo.TypeOf(expr) } ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) @@ -3419,30 +3473,32 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { for i, cc := range ccs[:len(ccs)-1] { for _, next := range ccs[i+1:] { if T, V, yes := subsumesAny(cc.types, next.types); yes { - j.Errorf(next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String()) + pass.Reportf(next.cc.Pos(), "unreachable case clause: %s will always match before %s", T.String(), V.String()) } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSingleArgAppend(j *lint.Job) { +func CheckSingleArgAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !IsCallToAST(j, node, "append") { + if !IsCallToAST(pass, node, "append") { return } call := node.(*ast.CallExpr) if len(call.Args) != 1 { return } - j.Errorf(call, "x = append(y) is equivalent to x = y") + ReportfFG(pass, call.Pos(), "x = append(y) is equivalent to x = y") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckStructTags(j *lint.Job) { +func CheckStructTags(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { for _, field := range node.(*ast.StructType).Fields.List { if field.Tag == nil { @@ -3450,35 +3506,36 @@ func (c *Checker) CheckStructTags(j *lint.Job) { } tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) if err != nil { - j.Errorf(field.Tag, "unparseable struct tag: %s", err) + pass.Reportf(field.Tag.Pos(), "unparseable struct tag: %s", err) continue } for k, v := range tags { if len(v) > 1 { - j.Errorf(field.Tag, "duplicate struct tag %q", k) + pass.Reportf(field.Tag.Pos(), "duplicate struct tag %q", k) continue } switch k { case "json": - checkJSONTag(j, field, v[0]) + checkJSONTag(pass, field, v[0]) case "xml": - checkXMLTag(j, field, v[0]) + checkXMLTag(pass, field, v[0]) } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) + return nil, nil } -func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { +func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { + //lint:ignore SA9003 TODO(dh): should we flag empty tags? if len(tag) == 0 { - // TODO(dh): should we flag empty tags? } fields := strings.Split(tag, ",") for _, r := range fields[0] { if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) { - j.Errorf(field.Tag, "invalid JSON field name %q", fields[0]) + pass.Reportf(field.Tag.Pos(), "invalid JSON field name %q", fields[0]) } } var co, cs int @@ -3491,26 +3548,26 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { case "string": cs++ // only for string, floating point, integer and bool - T := Dereference(j.Pkg.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() + T := Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() basic, ok := T.(*types.Basic) if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { - j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + pass.Reportf(field.Tag.Pos(), "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") } default: - j.Errorf(field.Tag, "unknown JSON option %q", s) + pass.Reportf(field.Tag.Pos(), "unknown JSON option %q", s) } } if co > 1 { - j.Errorf(field.Tag, `duplicate JSON option "omitempty"`) + pass.Reportf(field.Tag.Pos(), `duplicate JSON option "omitempty"`) } if cs > 1 { - j.Errorf(field.Tag, `duplicate JSON option "string"`) + pass.Reportf(field.Tag.Pos(), `duplicate JSON option "string"`) } } -func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { +func checkXMLTag(pass *analysis.Pass, field *ast.Field, tag string) { + //lint:ignore SA9003 TODO(dh): should we flag empty tags? if len(tag) == 0 { - // TODO(dh): should we flag empty tags? } fields := strings.Split(tag, ",") counts := map[string]int{} @@ -3526,15 +3583,15 @@ func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { counts[s]++ case "": default: - j.Errorf(field.Tag, "unknown XML option %q", s) + pass.Reportf(field.Tag.Pos(), "unknown XML option %q", s) } } for k, v := range counts { if v > 1 { - j.Errorf(field.Tag, "duplicate XML option %q", k) + pass.Reportf(field.Tag.Pos(), "duplicate XML option %q", k) } } if len(exclusives) > 1 { - j.Errorf(field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) + pass.Reportf(field.Tag.Pos(), "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) } } diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 88658dfb8..2b129b53e 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -3,32 +3,111 @@ package staticcheck import ( "testing" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - c := NewChecker() - testutil.TestAll(t, c, "") -} - -func BenchmarkStdlib(b *testing.B) { - for i := 0; i < b.N; i++ { - c := NewChecker() - _, err := lintutil.Lint([]lint.Checker{c}, []string{"std"}, nil) - if err != nil { - b.Fatal(err) - } + checks := map[string][]struct { + dir string + version string + }{ + "SA1000": {{dir: "CheckRegexps"}}, + "SA1001": {{dir: "CheckTemplate"}}, + "SA1002": {{dir: "CheckTimeParse"}}, + "SA1003": { + {dir: "CheckEncodingBinary"}, + {dir: "CheckEncodingBinary_go17", version: "1.7"}, + {dir: "CheckEncodingBinary_go18", version: "1.8"}, + }, + "SA1004": {{dir: "CheckTimeSleepConstant"}}, + "SA1005": {{dir: "CheckExec"}}, + "SA1006": {{dir: "CheckUnsafePrintf"}}, + "SA1007": {{dir: "CheckURLs"}}, + "SA1008": {{dir: "CheckCanonicalHeaderKey"}}, + "SA1010": {{dir: "checkStdlibUsageRegexpFindAll"}}, + "SA1011": {{dir: "checkStdlibUsageUTF8Cutset"}}, + "SA1012": {{dir: "checkStdlibUsageNilContext"}}, + "SA1013": {{dir: "checkStdlibUsageSeeker"}}, + "SA1014": {{dir: "CheckUnmarshalPointer"}}, + "SA1015": { + {dir: "CheckLeakyTimeTick"}, + {dir: "CheckLeakyTimeTick-main"}, + }, + "SA1016": {{dir: "CheckUntrappableSignal"}}, + "SA1017": {{dir: "CheckUnbufferedSignalChan"}}, + "SA1018": {{dir: "CheckStringsReplaceZero"}}, + "SA1019": { + {dir: "CheckDeprecated"}, + {dir: "CheckDeprecated_go14", version: "1.4"}, + {dir: "CheckDeprecated_go18", version: "1.8"}, + }, + "SA1020": {{dir: "CheckListenAddress"}}, + "SA1021": {{dir: "CheckBytesEqualIP"}}, + "SA1023": {{dir: "CheckWriterBufferModified"}}, + "SA1024": {{dir: "CheckNonUniqueCutset"}}, + "SA1025": {{dir: "CheckTimerResetReturnValue"}}, + "SA1026": {{dir: "CheckUnsupportedMarshal"}}, + "SA2000": {{dir: "CheckWaitgroupAdd"}}, + "SA2001": {{dir: "CheckEmptyCriticalSection"}}, + "SA2002": {{dir: "CheckConcurrentTesting"}}, + "SA2003": {{dir: "CheckDeferLock"}}, + "SA3000": { + {dir: "CheckTestMainExit-1"}, + {dir: "CheckTestMainExit-2"}, + {dir: "CheckTestMainExit-3"}, + {dir: "CheckTestMainExit-4"}, + {dir: "CheckTestMainExit-5"}, + }, + "SA3001": {{dir: "CheckBenchmarkN"}}, + "SA4000": {{dir: "CheckLhsRhsIdentical"}}, + "SA4001": {{dir: "CheckIneffectiveCopy"}}, + "SA4002": {{dir: "CheckDiffSizeComparison"}}, + "SA4003": {{dir: "CheckExtremeComparison"}}, + "SA4004": {{dir: "CheckIneffectiveLoop"}}, + "SA4006": {{dir: "CheckUnreadVariableValues"}}, + "SA4008": {{dir: "CheckLoopCondition"}}, + "SA4009": {{dir: "CheckArgOverwritten"}}, + "SA4010": {{dir: "CheckIneffectiveAppend"}}, + "SA4011": {{dir: "CheckScopedBreak"}}, + "SA4012": {{dir: "CheckNaNComparison"}}, + "SA4013": {{dir: "CheckDoubleNegation"}}, + "SA4014": {{dir: "CheckRepeatedIfElse"}}, + "SA4015": {{dir: "CheckMathInt"}}, + "SA4016": {{dir: "CheckSillyBitwiseOps"}}, + "SA4017": {{dir: "CheckPureFunctions"}}, + "SA4018": {{dir: "CheckSelfAssignment"}}, + "SA4019": {{dir: "CheckDuplicateBuildConstraints"}}, + "SA4020": {{dir: "CheckUnreachableTypeCases"}}, + "SA4021": {{dir: "CheckSingleArgAppend"}}, + "SA5000": {{dir: "CheckNilMaps"}}, + "SA5001": {{dir: "CheckEarlyDefer"}}, + "SA5002": {{dir: "CheckInfiniteEmptyLoop"}}, + "SA5003": {{dir: "CheckDeferInInfiniteLoop"}}, + "SA5004": {{dir: "CheckLoopEmptyDefault"}}, + "SA5005": {{dir: "CheckCyclicFinalizer"}}, + "SA5007": {{dir: "CheckInfiniteRecursion"}}, + "SA5008": {{dir: "CheckStructTags"}}, + "SA5009": {{dir: "CheckPrintf"}}, + "SA6000": {{dir: "CheckRegexpMatchLoop"}}, + "SA6002": {{dir: "CheckSyncPoolValue"}}, + "SA6003": {{dir: "CheckRangeStringRunes"}}, + "SA6005": {{dir: "CheckToLowerToUpperComparison"}}, + "SA9001": {{dir: "CheckDubiousDeferInChannelRangeLoop"}}, + "SA9002": {{dir: "CheckNonOctalFileMode"}}, + "SA9003": {{dir: "CheckEmptyBranch"}}, + "SA9004": {{dir: "CheckMissingEnumTypesInDeclaration"}}, + "SA9005": {{dir: "CheckNoopMarshal"}}, } -} -func BenchmarkNetHttp(b *testing.B) { - for i := 0; i < b.N; i++ { - c := NewChecker() - _, err := lintutil.Lint([]lint.Checker{c}, []string{"net/http"}, nil) - if err != nil { - b.Fatal(err) + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) } } } diff --git a/staticcheck/rules.go b/staticcheck/rules.go index d6af573c2..0152cac1a 100644 --- a/staticcheck/rules.go +++ b/staticcheck/rules.go @@ -13,7 +13,7 @@ import ( "time" "unicode/utf8" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" "honnef.co/go/tools/staticcheck/vrp" @@ -26,12 +26,11 @@ const ( ) type Call struct { - Job *lint.Job + Pass *analysis.Pass Instr ssa.CallInstruction Args []*Argument - Checker *Checker - Parent *ssa.Function + Parent *ssa.Function invalids []string } @@ -184,7 +183,7 @@ func ConvertedFromInt(v Value) bool { return true } -func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { +func validEncodingBinaryType(pass *analysis.Pass, typ types.Type) bool { typ = typ.Underlying() switch typ := typ.(type) { case *types.Basic: @@ -194,19 +193,19 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid: return true case types.Bool: - return IsGoVersion(j, 8) + return IsGoVersion(pass, 8) } return false case *types.Struct: n := typ.NumFields() for i := 0; i < n; i++ { - if !validEncodingBinaryType(j, typ.Field(i).Type()) { + if !validEncodingBinaryType(pass, typ.Field(i).Type()) { return false } } return true case *types.Array: - return validEncodingBinaryType(j, typ.Elem()) + return validEncodingBinaryType(pass, typ.Elem()) case *types.Interface: // we can't determine if it's a valid type or not return true @@ -214,7 +213,7 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { return false } -func CanBinaryMarshal(j *lint.Job, v Value) bool { +func CanBinaryMarshal(pass *analysis.Pass, v Value) bool { typ := v.Value.Type().Underlying() if ttyp, ok := typ.(*types.Pointer); ok { typ = ttyp.Elem().Underlying() @@ -227,7 +226,7 @@ func CanBinaryMarshal(j *lint.Job, v Value) bool { } } - return validEncodingBinaryType(j, typ) + return validEncodingBinaryType(pass, typ) } func RepeatZeroTimes(name string, arg int) CallCheck { diff --git a/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go b/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go new file mode 100644 index 000000000..421b77462 --- /dev/null +++ b/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go @@ -0,0 +1,6 @@ +package pkg + +var x = func(arg int) { // want `overwritten` + arg = 1 + println(arg) +} diff --git a/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go b/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go index 8f5fa1b29..a900cf4da 100644 --- a/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go +++ b/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go @@ -4,6 +4,6 @@ import "testing" func foo() { var b *testing.B - b.N = 1 // MATCH /should not assign to b.N/ + b.N = 1 // want `should not assign to b\.N` _ = b } diff --git a/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go b/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go index db6ae4716..ea0833092 100644 --- a/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go +++ b/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go @@ -11,7 +11,7 @@ func fn() { var b1, b2 []byte var t1, t2 T - bytes.Equal(i1, i2) // MATCH /use net.IP.Equal to compare net.IPs, not bytes.Equal/ + bytes.Equal(i1, i2) // want `use net\.IP\.Equal to compare net\.IPs, not bytes\.Equal` bytes.Equal(b1, b2) bytes.Equal(t1, t2) diff --git a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go index 7482aefa1..8f4bffe6f 100644 --- a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go +++ b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go @@ -6,9 +6,9 @@ func fn() { var r http.Request h := http.Header{} var m map[string][]string - _ = h["foo"] // MATCH /keys in http.Header are canonicalized/ + _ = h["foo"] // want `keys in http\.Header are canonicalized` h["foo"] = nil - _ = r.Header["foo"] // MATCH /keys in http.Header are canonicalized/ + _ = r.Header["foo"] // want `keys in http\.Header are canonicalized` r.Header["foo"] = nil _ = m["foo"] } diff --git a/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go b/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go index a3b2df2e6..8d0c56479 100644 --- a/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go +++ b/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go @@ -5,10 +5,10 @@ import "testing" func fn1() { var t *testing.T t.Fatal() - go func() { // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go func() { // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` t.Fatal() }() - go fn2(t) // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go fn2(t) // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` func() { t.Fatal() }() @@ -17,7 +17,7 @@ func fn1() { t.Fatal() } fn() - go fn() // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go fn() // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` } func fn2(t *testing.T) { diff --git a/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go b/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go index 477202acb..2aad2e3c4 100644 --- a/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go +++ b/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go @@ -8,9 +8,9 @@ import ( func fn() { var x *int foo := func(y *int) { fmt.Println(x) } - runtime.SetFinalizer(x, foo) + runtime.SetFinalizer(x, foo) // want `the finalizer closes over the object, preventing the finalizer from ever running \(at .+:10:9` runtime.SetFinalizer(x, nil) - runtime.SetFinalizer(x, func(_ *int) { + runtime.SetFinalizer(x, func(_ *int) { // want `the finalizer closes over the object, preventing the finalizer from ever running \(at .+:13:26` fmt.Println(x) }) @@ -20,6 +20,3 @@ func fn() { fmt.Println(y) }) } - -// MATCH:11 /the finalizer closes over the object, preventing the finalizer from ever running \(at .+:10:9/ -// MATCH:13 /the finalizer closes over the object, preventing the finalizer from ever running \(at .+:13:26/ diff --git a/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go b/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go index 18e4795e1..8bc43e984 100644 --- a/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go +++ b/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go @@ -2,10 +2,10 @@ package pkg func fn() { for { - defer println() // MATCH /will never run/ + defer println() // want `will never run` } for { - defer println() // MATCH /will never run/ + defer println() // want `will never run` go func() { return }() diff --git a/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go b/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go index 27541852c..2ec212e67 100644 --- a/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go +++ b/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go @@ -7,7 +7,7 @@ var rw sync.RWMutex func fn1() { r.Lock() - defer r.Lock() // MATCH /deferring Lock right after having locked already; did you mean to defer Unlock/ + defer r.Lock() // want `deferring Lock right after having locked already; did you mean to defer Unlock` } func fn2() { @@ -22,7 +22,7 @@ func fn3() { func fn4() { rw.RLock() - defer rw.RLock() // MATCH /deferring RLock right after having locked already; did you mean to defer RUnlock/ + defer rw.RLock() // want `deferring RLock right after having locked already; did you mean to defer RUnlock` } func fn5() { diff --git a/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go index 2a1189328..c0ce1cef9 100644 --- a/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go @@ -1,3 +1,3 @@ package pkg -import _ "CheckDeprecatedassist" // MATCH "Alas, it is deprecated." +import _ "CheckDeprecatedassist" // want `Alas, it is deprecated\.` diff --git a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go index 45035215c..bd2420707 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go @@ -8,14 +8,14 @@ import ( "syscall" ) -var _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ +var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // MATCH /Use the Context and WithContext methods/ - _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ + _ = r.Cancel // want `Use the Context and WithContext methods` + _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` _ = os.SEEK_SET - if err == http.ErrWriteAfterFlush { // MATCH /ErrWriteAfterFlush is no longer/ + if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` println() } var _ flate.ReadError @@ -28,7 +28,7 @@ func fn1(err error) { } // Deprecated: Don't use this. -func fn2() { +func fn2() { // want fn2:`Deprecated: Don't use this\.` _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go index 8d91f9b4d..06754df19 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go @@ -8,27 +8,27 @@ import ( "syscall" ) -var _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ +var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // MATCH /Use the Context and WithContext methods/ - _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ - _ = os.SEEK_SET // MATCH /Use io.SeekStart, io.SeekCurrent, and io.SeekEnd/ - if err == http.ErrWriteAfterFlush { // MATCH /ErrWriteAfterFlush is no longer/ + _ = r.Cancel // want `Use the Context and WithContext methods` + _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` + _ = os.SEEK_SET // want `Use io\.SeekStart, io\.SeekCurrent, and io\.SeekEnd` + if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` println() } - var _ flate.ReadError // MATCH /No longer returned/ + var _ flate.ReadError // want `No longer returned` var tr *http.Transport - tr.CancelRequest(nil) // MATCH "CancelRequest is deprecated" + tr.CancelRequest(nil) // want `CancelRequest is deprecated` var conn driver.Conn - conn.Begin() // MATCH "Begin is deprecated" + conn.Begin() // want `Begin is deprecated` } // Deprecated: Don't use this. -func fn2() { +func fn2() { // want fn2:`Deprecated: Don't use this\.` _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go b/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go index 8029c828f..306ddb93e 100644 --- a/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go +++ b/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go @@ -2,16 +2,16 @@ package pkg func fn1() { var s1 = "foobar" - _ = "a"[:] == s1 // MATCH /comparing strings of different sizes/ - _ = s1 == "a"[:] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == s1[:2] // MATCH /comparing strings of different sizes/ - _ = "ab"[:] == s1[1:2] // MATCH /comparing strings of different sizes/ - _ = "ab"[:] == s1[0+1:2] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc" // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "a"+"bc" // MATCH /comparing strings of different sizes/ - _ = "foobar"[:] == s1+"bc" // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc"[:] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc"[:2] // MATCH /comparing strings of different sizes/ + _ = "a"[:] == s1 // want `comparing strings of different sizes` + _ = s1 == "a"[:] // want `comparing strings of different sizes` + _ = "a"[:] == s1[:2] // want `comparing strings of different sizes` + _ = "ab"[:] == s1[1:2] // want `comparing strings of different sizes` + _ = "ab"[:] == s1[0+1:2] // want `comparing strings of different sizes` + _ = "a"[:] == "abc" // want `comparing strings of different sizes` + _ = "a"[:] == "a"+"bc" // want `comparing strings of different sizes` + _ = "foobar"[:] == s1+"bc" // want `comparing strings of different sizes` + _ = "a"[:] == "abc"[:] // want `comparing strings of different sizes` + _ = "a"[:] == "abc"[:2] // want `comparing strings of different sizes` _ = "a" == s1 // ignores _ = s1 == "a" // ignored @@ -22,7 +22,7 @@ func fn1() { _ = "abc"[:] == "abc" _ = "abc"[:] == "a"+"bc" _ = s1[:] == "foo"+"bar" - _ = "abc"[:] == "abc"[:] // MATCH /identical expressions on the left and right side/ + _ = "abc"[:] == "abc"[:] _ = "ab"[:] == "abc"[:2] } @@ -32,17 +32,17 @@ func fn2() { s1 = "1234" } - _ = s1 == "12345"[:] // MATCH /comparing strings of different sizes/ + _ = s1 == "12345"[:] // want `comparing strings of different sizes` _ = s1 == "1234"[:] _ = s1 == "123"[:] - _ = s1 == "12"[:] // MATCH /comparing strings of different sizes/ + _ = s1 == "12"[:] // want `comparing strings of different sizes` } func fn3(x string) { switch x[:1] { case "a": - case "ab": // MATCH /comparing strings of different sizes/ + case "ab": // want `comparing strings of different sizes` case "b": - case "bc": // MATCH /comparing strings of different sizes/ + case "bc": // want `comparing strings of different sizes` } } diff --git a/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go index 6a1802723..e2729cbf4 100644 --- a/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go +++ b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go @@ -1,11 +1,11 @@ package pkg func fn(b1, b2 bool) { - if !!b1 { // MATCH /negating a boolean twice/ + if !!b1 { // want `negating a boolean twice` println() } - if b1 && !!b2 { // MATCH /negating a boolean twice/ + if b1 && !!b2 { // want `negating a boolean twice` println() } diff --git a/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go b/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go index e12c22790..a0ed45e71 100644 --- a/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go +++ b/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go @@ -3,6 +3,6 @@ package pkg func fn() { var ch chan int for range ch { - defer println() // MATCH /defers in this range loop/ + defer println() // want `defers in this range loop` } } diff --git a/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go b/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go index b5b317b63..33866ac74 100644 --- a/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go +++ b/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go @@ -1,4 +1,4 @@ // +build one two three go1.1 // +build three one two go1.1 -package pkg // MATCH "identical build constraints" +package pkg // want `identical build constraints` diff --git a/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go b/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go index 75bec1851..cc8c3a761 100644 --- a/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go +++ b/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go @@ -16,7 +16,7 @@ func fn3() (T, error) { func fn2() { rc, err := fn1() - defer rc.Close() // MATCH /should check returned error before deferring rc.Close/ + defer rc.Close() // want `should check returned error before deferring rc\.Close` if err != nil { println() } @@ -31,7 +31,7 @@ func fn2() { defer rc.Close() t, err := fn3() - defer t.rc.Close() // MATCH /should check returned error before deferring t.rc.Close/ + defer t.rc.Close() // want `should check returned error before deferring t\.rc\.Close` if err != nil { println() } diff --git a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go index f99973073..60595ec73 100644 --- a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go +++ b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go @@ -1,10 +1,10 @@ package pkg func fn1() { - if true { // MATCH "empty branch" + if true { // want `empty branch` } - if true { // MATCH "empty branch" - } else { // MATCH "empty branch" + if true { // want `empty branch` + } else { // want `empty branch` } if true { println() @@ -12,10 +12,10 @@ func fn1() { if true { println() - } else { // MATCH "empty branch" + } else { // want `empty branch` } - if true { // MATCH "empty branch" + if true { // want `empty branch` // TODO handle error } @@ -25,6 +25,6 @@ func fn1() { } if true { - } else if false { // MATCH "empty branch" + } else if false { // want `empty branch` } } diff --git a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go index d97496084..472ab5cb2 100644 --- a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go +++ b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go @@ -3,7 +3,7 @@ package pkg import "testing" func TestFoo(t *testing.T) { - if true { // MATCH "empty branch" + if true { // want `empty branch` // TODO } } diff --git a/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go b/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go index 7eb8afc0d..777badf1c 100644 --- a/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go +++ b/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go @@ -5,7 +5,7 @@ import "sync" func fn1() { var x sync.Mutex x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn2() { @@ -16,16 +16,16 @@ func fn2() { }{} x.m1.m2.Lock() - x.m1.m2.Unlock() // MATCH /empty critical section/ + x.m1.m2.Unlock() // want `empty critical section` } func fn3() { var x sync.RWMutex x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` x.RLock() - x.RUnlock() // MATCH /empty critical section/ + x.RUnlock() // want `empty critical section` x.Lock() defer x.Unlock() @@ -41,7 +41,7 @@ func fn4() { } x.m().Lock() - x.m().Unlock() // MATCH /empty critical section/ + x.m().Unlock() // want `empty critical section` } func fn5() { @@ -55,7 +55,7 @@ func fn5() { func fn6() { x := &sync.Mutex{} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn7() { @@ -64,7 +64,7 @@ func fn7() { }{} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn8() { @@ -72,7 +72,7 @@ func fn8() { x = new(sync.Mutex) x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn9() { @@ -80,7 +80,7 @@ func fn9() { sync.Locker }{&sync.Mutex{}} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } type T struct{} diff --git a/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go b/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go index 9dd3e8b44..3f2b655b2 100644 --- a/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go +++ b/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go @@ -39,19 +39,19 @@ func fn() { var x13 []byte var x14 *[]byte var x15 T6 - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x1)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x1)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x2)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x3)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x3)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x4)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x5)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x5)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x6)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x7)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x8)) // MATCH /cannot be used with binary.Write/ - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x9)) // MATCH /cannot be used with binary.Write/ - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x10)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x8)) // want `cannot be used with binary\.Write` + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x9)) // want `cannot be used with binary\.Write` + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x10)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x11)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x13)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x14)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x14)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x15)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x15)) } diff --git a/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go b/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go index b08e8e85a..a63ec9ef8 100644 --- a/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go +++ b/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go @@ -8,5 +8,5 @@ import ( func fn() { var x bool - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x)) // MATCH "cannot be used with binary.Write" + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x)) // want `cannot be used with binary\.Write` } diff --git a/staticcheck/testdata/src/CheckExec/CheckExec.go b/staticcheck/testdata/src/CheckExec/CheckExec.go index e70710ae1..ae5100239 100644 --- a/staticcheck/testdata/src/CheckExec/CheckExec.go +++ b/staticcheck/testdata/src/CheckExec/CheckExec.go @@ -4,7 +4,7 @@ import "os/exec" func fn() { exec.Command("ls") - exec.Command("ls arg1") // MATCH /first argument to exec/ + exec.Command("ls arg1") // want `first argument to exec` exec.Command(`C:\Program Files\this\is\insane.exe`) exec.Command("/Library/Application Support/VMware Tools/vmware-tools-daemon") } diff --git a/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go index a37521f2f..49642d1d1 100644 --- a/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go +++ b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go @@ -13,29 +13,29 @@ func fn() { i int ) - _ = u8 > math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" - _ = u8 >= math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" - _ = u8 >= 0 // MATCH "every value of type uint8 is >= 0" - _ = u8 <= math.MaxUint8 // MATCH "every value of type uint8 is <= math.MaxUint8" + _ = u8 > math.MaxUint8 // want `no value of type uint8 is greater than math\.MaxUint8` + _ = u8 >= math.MaxUint8 // want `no value of type uint8 is greater than math\.MaxUint8` + _ = u8 >= 0 // want `every value of type uint8 is >= 0` + _ = u8 <= math.MaxUint8 // want `every value of type uint8 is <= math\.MaxUint8` _ = u8 > 0 _ = u8 >= 1 _ = u8 < math.MaxUint8 _ = u16 > math.MaxUint8 - _ = u16 > math.MaxUint16 // MATCH "no value of type uint16 is greater than math.MaxUint16" + _ = u16 > math.MaxUint16 // want `no value of type uint16 is greater than math\.MaxUint16` _ = u16 <= math.MaxUint8 - _ = u16 <= math.MaxUint16 // MATCH "every value of type uint16 is <= math.MaxUint16" + _ = u16 <= math.MaxUint16 // want `every value of type uint16 is <= math\.MaxUint16` _ = u > math.MaxUint32 - _ = u > math.MaxUint64 // MATCH "no value of type uint is greater than math.MaxUint64" + _ = u > math.MaxUint64 // want `no value of type uint is greater than math\.MaxUint64` - _ = i8 > math.MaxInt8 // MATCH "no value of type int8 is greater than math.MaxInt8" + _ = i8 > math.MaxInt8 // want `no value of type int8 is greater than math\.MaxInt8` _ = i16 > math.MaxInt8 - _ = i16 > math.MaxInt16 // MATCH "no value of type int16 is greater than math.MaxInt16" + _ = i16 > math.MaxInt16 // want `no value of type int16 is greater than math\.MaxInt16` _ = i > math.MaxInt32 - _ = i > math.MaxInt64 // MATCH "no value of type int is greater than math.MaxInt64" + _ = i > math.MaxInt64 // want `no value of type int is greater than math\.MaxInt64` _ = i8 < 0 - _ = i8 <= math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" - _ = i8 < math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" - _ = i8 >= math.MinInt8 // MATCH "every value of type int8 is >= math.MinInt8" + _ = i8 <= math.MinInt8 // want `no value of type int8 is less than math\.MinInt8` + _ = i8 < math.MinInt8 // want `no value of type int8 is less than math\.MinInt8` + _ = i8 >= math.MinInt8 // want `every value of type int8 is >= math.MinInt8` } diff --git a/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go b/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go index 9f06fceeb..12e84cd80 100644 --- a/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go +++ b/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go @@ -4,9 +4,8 @@ import "fmt" func fn1() { var s []int - s = append(s, 1) // MATCH /this result of append is never used/ - // MATCH:9 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` + s = append(s, 1) // want `this result of append is never used` } func fn2() (named []int) { @@ -16,8 +15,7 @@ func fn2() (named []int) { func fn3() { s := make([]int, 0) - // MATCH:20 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } func fn4() []int { @@ -46,8 +44,7 @@ func fn9() { var s []int s = append(s, 1) fmt.Println(s) - // MATCH:50 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } func fn10() { @@ -59,6 +56,6 @@ func fn10() { func fn11() { var s []int for x := 0; x < 10; x++ { - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } } diff --git a/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go b/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go index b9aefa315..8057d4ea8 100644 --- a/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go +++ b/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go @@ -6,8 +6,8 @@ func fn1(_ *T) {} func fn2() { t1 := &T{} - fn1(&*t1) // MATCH /will not copy/ - fn1(*&t1) // MATCH /will not copy/ + fn1(&*t1) // want `will not copy` + fn1(*&t1) // want `will not copy` _Cvar_something := &T{} fn1(&*_Cvar_something) diff --git a/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go b/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go index a06e05a49..3aca3a4f9 100644 --- a/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go +++ b/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go @@ -5,7 +5,7 @@ func fn() { if true { println() } - break // MATCH /the surrounding loop is unconditionally terminated/ + break // want `the surrounding loop is unconditionally terminated` } for { if true { @@ -18,7 +18,7 @@ func fn() { if true { println() } - break // MATCH /the surrounding loop is unconditionally terminated/ + break // want `the surrounding loop is unconditionally terminated` } for range (map[int]int)(nil) { if true { @@ -46,3 +46,12 @@ func fn() { break } } + +var z = func() { + for { + if true { + println() + } + break // want `the surrounding loop is unconditionally terminated` + } +} diff --git a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go index 72cc1c26e..2cd45a7f4 100644 --- a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go +++ b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go @@ -3,7 +3,7 @@ package pkg func fn2() bool { return true } func fn() { - for { // MATCH /this loop will spin/ + for { // want `this loop will spin` } for fn2() { @@ -13,26 +13,21 @@ func fn() { break } - for true { // MATCH "loop condition never changes" + for true { // want `loop condition never changes` `this loop will spin` } x := true - for x { // MATCH "loop condition never changes" + for x { // want `loop condition never changes` `this loop will spin` } x = false - for x { // MATCH "loop condition never changes" + for x { // want `loop condition never changes` `this loop will spin` } for false { } false := true - for false { // MATCH "loop condition never changes" + for false { // want `loop condition never changes` `this loop will spin` } } - -// MATCH:16 "this loop will spin" -// MATCH:20 "this loop will spin" -// MATCH:24 "this loop will spin" -// MATCH:31 "this loop will spin" diff --git a/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go b/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go index fa2247d7a..b38c59c8a 100644 --- a/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go +++ b/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go @@ -2,7 +2,7 @@ package pkg func fn1(x int) bool { println(x) - return fn1(x + 1) // MATCH /infinite recursive call/ + return fn1(x + 1) // want `infinite recursive call` return true } @@ -38,7 +38,7 @@ func fn4(p *int, n int) { func fn5(p *int, n int) { x := 0 - fn5(&x, n-1) // MATCH /infinite recursive call/ + fn5(&x, n-1) // want `infinite recursive call` if x != n { panic("stack is corrupted") } @@ -53,12 +53,12 @@ type T struct { } func (t T) Fn1() { - t.Fn1() // MATCH /infinite recursive call/ + t.Fn1() // want `infinite recursive call` } func (t T) Fn2() { x := T{} - x.Fn2() // MATCH /infinite recursive call/ + x.Fn2() // want `infinite recursive call` } func (t T) Fn3() { diff --git a/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go b/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go index 6d11d39d6..d3d493b5b 100644 --- a/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go +++ b/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go @@ -9,7 +9,7 @@ func fn1() { } func fn2() { - for range time.Tick(0) { // MATCH /leaks the underlying ticker/ + for range time.Tick(0) { // want `leaks the underlying ticker` println("") if true { break @@ -18,7 +18,7 @@ func fn2() { } func fn3() { - for range time.Tick(0) { // MATCH /leaks the underlying ticker/ + for range time.Tick(0) { // want `leaks the underlying ticker` println("") if true { return diff --git a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go index be682350f..04cdeeea5 100644 --- a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go +++ b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go @@ -3,25 +3,25 @@ package pkg type Float float64 func fn(a int, s []int, f1 float64, f2 Float) { - if 0 == 0 { // MATCH /identical expressions/ + if 0 == 0 { // want `identical expressions` println() } - if 1 == 1 { // MATCH /identical expressions/ + if 1 == 1 { // want `identical expressions` println() } - if a == a { // MATCH /identical expressions/ + if a == a { // want `identical expressions` println() } - if a != a { // MATCH /identical expressions/ + if a != a { // want `identical expressions` println() } - if s[0] == s[0] { // MATCH /identical expressions/ + if s[0] == s[0] { // want `identical expressions` println() } - if 1&1 == 1 { // MATCH /identical expressions/ + if 1&1 == 1 { // want `identical expressions` println() } - if (1 + 2 + 3) == (1 + 2 + 3) { // MATCH /identical expressions/ + if (1 + 2 + 3) == (1 + 2 + 3) { // want `identical expressions` println() } if f1 == f1 { @@ -30,7 +30,7 @@ func fn(a int, s []int, f1 float64, f2 Float) { if f1 != f1 { println() } - if f1 > f1 { // MATCH /identical expressions/ + if f1 > f1 { // want `identical expressions` println() } if f2 == f2 { diff --git a/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go b/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go index 8df7d8733..4a1b04fe7 100644 --- a/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go +++ b/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go @@ -4,8 +4,8 @@ import "net/http" func fn() { // Seen in actual code - http.ListenAndServe("localhost:8080/", nil) // MATCH /invalid port or service name in host:port pair/ - http.ListenAndServe("localhost", nil) // MATCH /invalid port or service name in host:port pair/ + http.ListenAndServe("localhost:8080/", nil) // want `invalid port or service name in host:port pair` + http.ListenAndServe("localhost", nil) // want `invalid port or service name in host:port pair` http.ListenAndServe("localhost:8080", nil) http.ListenAndServe(":8080", nil) http.ListenAndServe(":http", nil) diff --git a/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go b/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go index 7264a3dbc..511e02a9d 100644 --- a/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go +++ b/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go @@ -2,9 +2,7 @@ package pkg func fn() { for i := 0; i < 10; i++ { - for j := 0; j < 10; i++ { // MATCH /variable in loop condition never changes/ + for j := 0; j < 10; i++ { // want `variable in loop condition never changes` } } } - -// M_ATCH:5 /j < 10 is always true for all possible values/ diff --git a/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go index 90dc05f40..9b21562bb 100644 --- a/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go +++ b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go @@ -10,7 +10,7 @@ func fn() { for { select { case <-ch: - default: // MATCH /should not have an empty default case/ + default: // want `should not have an empty default case` } } diff --git a/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go b/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go index c735f6f93..d413e8549 100644 --- a/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go +++ b/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go @@ -3,6 +3,6 @@ package pkg import "math" func fn(x int) { - math.Ceil(float64(x)) // MATCH /on a converted integer is pointless/ - math.Floor(float64(x * 2)) // MATCH /on a converted integer is pointless/ + math.Ceil(float64(x)) // want `on a converted integer is pointless` + math.Floor(float64(x * 2)) // want `on a converted integer is pointless` } diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go index 41cfa2d01..923e0c43b 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go @@ -10,7 +10,7 @@ const ( ) const ( - c6 int = 1 // MATCH "only the first constant in this group has an explicit type" + c6 int = 1 // want `only the first constant in this group has an explicit type` c7 = 2 c8 = 3 ) @@ -46,7 +46,7 @@ const ( c23 int = 1 c24 int = 2 - c25 string = "" // MATCH "only the first constant in this group has an explicit type" + c25 string = "" // want `only the first constant in this group has an explicit type` c26 = "" c27 = 1 @@ -56,12 +56,12 @@ const ( c30 = 2 c31 int = 2 - c32 string = "" // MATCH "only the first constant in this group has an explicit type" + c32 string = "" // want `only the first constant in this group has an explicit type` c33 = "" ) const ( - c34 int = 1 // MATCH "only the first constant in this group has an explicit type" + c34 int = 1 // want `only the first constant in this group has an explicit type` c35 = 2 c36 int = 2 diff --git a/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go b/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go index c6b7a2762..78d775371 100644 --- a/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go +++ b/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go @@ -3,7 +3,7 @@ package pkg import "math" func fn(f float64) { - _ = f == math.NaN() // MATCH /no value is equal to NaN/ - _ = f > math.NaN() // MATCH /no value is equal to NaN/ - _ = f != math.NaN() // MATCH /no value is equal to NaN/ + _ = f == math.NaN() // want `no value is equal to NaN` + _ = f > math.NaN() // want `no value is equal to NaN` + _ = f != math.NaN() // want `no value is equal to NaN` } diff --git a/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go b/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go index 89399da7d..0fbfc088b 100644 --- a/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go +++ b/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go @@ -2,7 +2,7 @@ package pkg func fn1() { var m map[int]int - m[1] = 1 // MATCH /assignment to nil map/ + m[1] = 1 // want `assignment to nil map` } func fn2(m map[int]int) { diff --git a/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go index 49edecaea..49cbab7d2 100644 --- a/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go +++ b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go @@ -3,5 +3,5 @@ package pkg import "os" func fn() { - os.OpenFile("", 0, 644) // MATCH /file mode.+/ + os.OpenFile("", 0, 644) // want `file mode.+` } diff --git a/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go b/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go index 72a38ba0c..a8ae03cd6 100644 --- a/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go +++ b/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go @@ -7,5 +7,5 @@ func fn(s string) { _ = strings.TrimLeft(s, "a") _ = strings.TrimLeft(s, "µ") _ = strings.TrimLeft(s, "abc") - _ = strings.TrimLeft(s, "http://") // MATCH "duplicate characters" + _ = strings.TrimLeft(s, "http://") // want `duplicate characters` } diff --git a/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go index 2a7200005..9d3d0dbb6 100644 --- a/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go +++ b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go @@ -46,9 +46,9 @@ func fn() { // don't flag structs with no fields json.Marshal(T1{}) // no exported fields - json.Marshal(T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T2{}) // want `struct doesn't have any exported fields, nor custom marshaling` // pointer vs non-pointer makes no difference - json.Marshal(&T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(&T2{}) // want `struct doesn't have any exported fields, nor custom marshaling` // exported field json.Marshal(T3{}) // exported field, pointer makes no difference @@ -62,11 +62,11 @@ func fn() { // MarshalJSON json.Marshal(T7{}) // MarshalXML does not apply to JSON - json.Marshal(T8{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T8{}) // want `struct doesn't have any exported fields, nor custom marshaling` // MarshalText json.Marshal(T9{}) // embeds exported struct, but it has no fields - json.Marshal(T11{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T11{}) // want `struct doesn't have any exported fields, nor custom marshaling` // embeds type with MarshalJSON json.Marshal(T12{}) // embeds type with MarshalJSON and type isn't exported @@ -76,11 +76,11 @@ func fn() { // embedded pointer to struct with exported fields json.Marshal(T16{}) // don't recurse forever on recursive data structure - json.Marshal(T17{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T17{}) // want `struct doesn't have any exported fields, nor custom marshaling` json.Marshal(T18{}) // MarshalJSON does not apply to JSON - xml.Marshal(T7{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + xml.Marshal(T7{}) // want `struct doesn't have any exported fields, nor custom marshaling` // MarshalXML xml.Marshal(T8{}) @@ -90,32 +90,34 @@ func fn() { var t8 T8 var t9 T9 // check that all other variations of methods also work - json.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Unmarshal(nil, &t2) // want `struct doesn't have any exported fields, nor custom marshaling` json.Unmarshal(nil, &t3) json.Unmarshal(nil, &t9) - xml.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + xml.Unmarshal(nil, &t2) // want `struct doesn't have any exported fields, nor custom marshaling` xml.Unmarshal(nil, &t3) xml.Unmarshal(nil, &t9) - (*json.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Decoder)(nil).Decode(&t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Decoder)(nil).Decode(&t3) (*json.Decoder)(nil).Decode(&t9) - (*json.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Encoder)(nil).Encode(t3) (*json.Encoder)(nil).Encode(t9) - (*xml.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Decoder)(nil).Decode(&t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Decoder)(nil).Decode(&t3) (*xml.Decoder)(nil).Decode(&t9) - (*xml.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Encoder)(nil).Encode(t3) (*xml.Encoder)(nil).Encode(t9) (*json.Decoder)(nil).Decode(&t7) - (*json.Decoder)(nil).Decode(&t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Decoder)(nil).Decode(&t8) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Encoder)(nil).Encode(t7) - (*json.Encoder)(nil).Encode(t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" - (*xml.Decoder)(nil).Decode(&t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t8) // want `struct doesn't have any exported fields, nor custom marshaling` + (*xml.Decoder)(nil).Decode(&t7) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Decoder)(nil).Decode(&t8) - (*xml.Encoder)(nil).Encode(t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t7) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Encoder)(nil).Encode(t8) } + +var _, _ = json.Marshal(T9{}) diff --git a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go index 1433dc1c2..07ff26a3f 100644 --- a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go +++ b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go @@ -65,68 +65,68 @@ func fn() { fmt.Printf("%g", 1+2i) fmt.Printf("%#e %#E %#f %#F %#g %#G", 1.2, 1.2, 1.2, 1.2, 1.2, 1.2) // OK since Go 1.9 // Some bad format/argTypes - fmt.Printf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - _ = fmt.Sprintf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - fmt.Fprintf(os.Stdout, "%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - fmt.Printf("%t", c) // MATCH "Printf format %t has arg #1 of wrong type complex64" - fmt.Printf("%t", 1+2i) // MATCH "Printf format %t has arg #1 of wrong type complex128" - fmt.Printf("%c", 2.3) // MATCH "Printf format %c has arg #1 of wrong type float64" - fmt.Printf("%d", 2.3) // MATCH "Printf format %d has arg #1 of wrong type float64" - fmt.Printf("%e", "hi") // MATCH "Printf format %e has arg #1 of wrong type string" - fmt.Printf("%E", true) // MATCH "Printf format %E has arg #1 of wrong type bool" - fmt.Printf("%f", "hi") // MATCH "Printf format %f has arg #1 of wrong type string" - fmt.Printf("%F", 'x') // MATCH "Printf format %F has arg #1 of wrong type rune" - fmt.Printf("%g", "hi") // MATCH "Printf format %g has arg #1 of wrong type string" - fmt.Printf("%g", imap) // MATCH "Printf format %g has arg #1 of wrong type map[int]int" - fmt.Printf("%G", i) // MATCH "Printf format %G has arg #1 of wrong type int" - fmt.Printf("%o", x) // MATCH "Printf format %o has arg #1 of wrong type float64" - fmt.Printf("%p", 23) // MATCH "Printf format %p has arg #1 of wrong type int" - fmt.Printf("%q", x) // MATCH "Printf format %q has arg #1 of wrong type float64" - fmt.Printf("%s", b) // MATCH "Printf format %s has arg #1 of wrong type bool" - fmt.Printf("%s", byte(65)) // MATCH "Printf format %s has arg #1 of wrong type byte" - fmt.Printf("%t", 23) // MATCH "Printf format %t has arg #1 of wrong type int" - fmt.Printf("%U", x) // MATCH "Printf format %U has arg #1 of wrong type float64" - fmt.Printf("%X", 2.3) // MATCH "Printf format %X has arg #1 of wrong type float64" - fmt.Printf("%s", stringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.ptrStringer" - fmt.Printf("%t", stringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.ptrStringer" - fmt.Printf("%s", embeddedStringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.embeddedStringer" - fmt.Printf("%t", embeddedStringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.embeddedStringer" - fmt.Printf("%q", notstringerv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringer" - fmt.Printf("%t", notstringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringer" - fmt.Printf("%t", stringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.stringerarray" - fmt.Printf("%t", notstringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringerarray" - fmt.Printf("%q", notstringerarrayv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringerarray" - fmt.Printf("%d", BoolFormatter(true)) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.BoolFormatter" + fmt.Printf("%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + _ = fmt.Sprintf("%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + fmt.Fprintf(os.Stdout, "%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + fmt.Printf("%t", c) // want `Printf format %t has arg #1 of wrong type complex64` + fmt.Printf("%t", 1+2i) // want `Printf format %t has arg #1 of wrong type complex128` + fmt.Printf("%c", 2.3) // want `Printf format %c has arg #1 of wrong type float64` + fmt.Printf("%d", 2.3) // want `Printf format %d has arg #1 of wrong type float64` + fmt.Printf("%e", "hi") // want `Printf format %e has arg #1 of wrong type string` + fmt.Printf("%E", true) // want `Printf format %E has arg #1 of wrong type bool` + fmt.Printf("%f", "hi") // want `Printf format %f has arg #1 of wrong type string` + fmt.Printf("%F", 'x') // want `Printf format %F has arg #1 of wrong type rune` + fmt.Printf("%g", "hi") // want `Printf format %g has arg #1 of wrong type string` + fmt.Printf("%g", imap) // want `Printf format %g has arg #1 of wrong type map\[int\]int` + fmt.Printf("%G", i) // want `Printf format %G has arg #1 of wrong type int` + fmt.Printf("%o", x) // want `Printf format %o has arg #1 of wrong type float64` + fmt.Printf("%p", 23) // want `Printf format %p has arg #1 of wrong type int` + fmt.Printf("%q", x) // want `Printf format %q has arg #1 of wrong type float64` + fmt.Printf("%s", b) // want `Printf format %s has arg #1 of wrong type bool` + fmt.Printf("%s", byte(65)) // want `Printf format %s has arg #1 of wrong type byte` + fmt.Printf("%t", 23) // want `Printf format %t has arg #1 of wrong type int` + fmt.Printf("%U", x) // want `Printf format %U has arg #1 of wrong type float64` + fmt.Printf("%X", 2.3) // want `Printf format %X has arg #1 of wrong type float64` + fmt.Printf("%s", stringerv) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.ptrStringer` + fmt.Printf("%t", stringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.ptrStringer` + fmt.Printf("%s", embeddedStringerv) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.embeddedStringer` + fmt.Printf("%t", embeddedStringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.embeddedStringer` + fmt.Printf("%q", notstringerv) // want `Printf format %q has arg #1 of wrong type CheckPrintf\.notstringer` + fmt.Printf("%t", notstringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.notstringer` + fmt.Printf("%t", stringerarrayv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.stringerarray` + fmt.Printf("%t", notstringerarrayv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.notstringerarray` + fmt.Printf("%q", notstringerarrayv) // want `Printf format %q has arg #1 of wrong type CheckPrintf\.notstringerarray` + fmt.Printf("%d", BoolFormatter(true)) // want `Printf format %d has arg #1 of wrong type CheckPrintf\.BoolFormatter` fmt.Printf("%z", FormatterVal(true)) // correct (the type is responsible for formatting) fmt.Printf("%d", FormatterVal(true)) // correct (the type is responsible for formatting) fmt.Printf("%s", nonemptyinterface) // correct (the type is responsible for formatting) - fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // MATCH "Printf format %6g has arg #4 of wrong type rune" - fmt.Printf("%s", "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" - fmt.Printf("%"+("s"), "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" + fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // want `Printf format %6g has arg #4 of wrong type rune` + fmt.Printf("%s", "hi", 3) // want `Printf call needs 1 args but has 2 args` + fmt.Printf("%"+("s"), "hi", 3) // want `Printf call needs 1 args but has 2 args` fmt.Printf("%s%%%d", "hi", 3) // correct fmt.Printf("%08s", "woo") // correct fmt.Printf("% 8s", "woo") // correct fmt.Printf("%.*d", 3, 3) // correct - fmt.Printf("%.*d x", 3, 3, 3, 3) // MATCH "Printf call needs 2 args but has 4 args" - fmt.Printf("%.*d x", "hi", 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" + fmt.Printf("%.*d x", 3, 3, 3, 3) // want `Printf call needs 2 args but has 4 args` + fmt.Printf("%.*d x", "hi", 3) // want `Printf format %\.\*d reads non-int arg #1 as argument of \*` fmt.Printf("%.*d x", i, 3) // correct - fmt.Printf("%.*d x", s, 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" - fmt.Printf("%*% x", 0.22) // MATCH "Printf format %*% reads non-int arg #1 as argument of *" + fmt.Printf("%.*d x", s, 3) // want `Printf format %\.\*d reads non-int arg #1 as argument of \*` + fmt.Printf("%*% x", 0.22) // want `Printf format %\*% reads non-int arg #1 as argument of \*` fmt.Printf("%q %q", multi()...) // ok fmt.Printf("%#q", `blah`) // ok const format = "%s %s\n" fmt.Printf(format, "hi", "there") - fmt.Printf(format, "hi") // MATCH "Printf format %s reads arg #2, but call has only 1 args" - fmt.Printf("%s %d %.3v %q", "str", 4) // MATCH "Printf format %.3v reads arg #3, but call has only 2 args" + fmt.Printf(format, "hi") // want `Printf format %s reads arg #2, but call has only 1 args` + fmt.Printf("%s %d %.3v %q", "str", 4) // want `Printf format %\.3v reads arg #3, but call has only 2 args` fmt.Printf("%#s", FormatterVal(true)) // correct (the type is responsible for formatting) - fmt.Printf("d%", 2) // MATCH "couldn't parse format string" + fmt.Printf("d%", 2) // want `couldn't parse format string` fmt.Printf("%d", percentDV) fmt.Printf("%d", &percentDV) - fmt.Printf("%d", notPercentDV) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.notPercentDStruct" - fmt.Printf("%d", ¬PercentDV) // MATCH "Printf format %d has arg #1 of wrong type *CheckPrintf.notPercentDStruct" + fmt.Printf("%d", notPercentDV) // want `Printf format %d has arg #1 of wrong type CheckPrintf\.notPercentDStruct` + fmt.Printf("%d", ¬PercentDV) // want `Printf format %d has arg #1 of wrong type \*CheckPrintf\.notPercentDStruct` fmt.Printf("%p", ¬PercentDV) // Works regardless: we print it as a pointer. - fmt.Printf("%q", &percentDV) // MATCH "Printf format %q has arg #1 of wrong type *CheckPrintf.percentDStruct" + fmt.Printf("%q", &percentDV) // want `Printf format %q has arg #1 of wrong type \*CheckPrintf\.percentDStruct` fmt.Printf("%s", percentSV) fmt.Printf("%s", &percentSV) // Good argument reorderings. @@ -136,13 +136,13 @@ func fn() { fmt.Printf("%[2]*.[1]*[3]d", 2, 3, 4) fmt.Fprintf(os.Stderr, "%[2]*.[1]*[3]d", 2, 3, 4) // Use Fprintf to make sure we count arguments correctly. // Bad argument reorderings. - fmt.Printf("%[xd", 3) // MATCH "couldn't parse format string" - fmt.Printf("%[x]d x", 3) // MATCH "couldn't parse format string" - fmt.Printf("%[3]*s x", "hi", 2) // MATCH "Printf format %[3]*s reads arg #3, but call has only 2 args" - fmt.Printf("%[3]d x", 2) // MATCH "Printf format %[3]d reads arg #3, but call has only 1 args" - fmt.Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // MATCH "Printf format %[2]*.[1]*[3]d reads non-int arg #2 as argument of *" - fmt.Printf("%[0]s x", "arg1") // MATCH "Printf format %[0]s reads invalid arg 0; indices are 1-based" - fmt.Printf("%[0]d x", 1) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" + fmt.Printf("%[xd", 3) // want `couldn't parse format string` + fmt.Printf("%[x]d x", 3) // want `couldn't parse format string` + fmt.Printf("%[3]*s x", "hi", 2) // want `Printf format %\[3\]\*s reads arg #3, but call has only 2 args` + fmt.Printf("%[3]d x", 2) // want `Printf format %\[3\]d reads arg #3, but call has only 1 args` + fmt.Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // want `Printf format %\[2\]\*\.\[1\]\*\[3\]d reads non-int arg #2 as argument of \*` + fmt.Printf("%[0]s x", "arg1") // want `Printf format %\[0\]s reads invalid arg 0; indices are 1-based` + fmt.Printf("%[0]d x", 1) // want `Printf format %\[0\]d reads invalid arg 0; indices are 1-based` // Interfaces can be used with any verb. var iface interface { @@ -150,7 +150,7 @@ func fn() { } fmt.Printf("%f", iface) // ok: fmt treats interfaces as transparent and iface may well have a float concrete type // Can print functions in many ways - fmt.Printf("%s", someFunction) // MATCH "Printf format %s has arg #1 of wrong type func()" + fmt.Printf("%s", someFunction) // want `Printf format %s has arg #1 of wrong type func\(\)` fmt.Printf("%d", someFunction) // ok: maybe someone wants to see the pointer fmt.Printf("%v", someFunction) // ok: maybe someone wants to see the pointer in decimal fmt.Printf("%p", someFunction) // ok: maybe someone wants to see the pointer @@ -163,11 +163,11 @@ func fn() { // indexed arguments fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4) // OK - fmt.Printf("%d %[0]d %d %[2]d x", 1, 2, 3, 4) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" - fmt.Printf("%d %[3]d %d %[-2]d x", 1, 2, 3, 4) // MATCH "couldn't parse format string" - fmt.Printf("%d %[3]d %d %[2234234234234]d x", 1, 2, 3, 4) // MATCH "Printf format %[2234234234234]d reads arg #2234234234234, but call has only 4 args" - fmt.Printf("%d %[3]d %-10d %[2]d x", 1, 2, 3) // MATCH "Printf format %-10d reads arg #4, but call has only 3 args" - fmt.Printf("%[1][3]d x", 1, 2) // MATCH "couldn't parse format string" + fmt.Printf("%d %[0]d %d %[2]d x", 1, 2, 3, 4) // want `Printf format %\[0\]d reads invalid arg 0; indices are 1-based` + fmt.Printf("%d %[3]d %d %[-2]d x", 1, 2, 3, 4) // want `couldn't parse format string` + fmt.Printf("%d %[3]d %d %[2234234234234]d x", 1, 2, 3, 4) // want `Printf format %\[2234234234234\]d reads arg #2234234234234, but call has only 4 args` + fmt.Printf("%d %[3]d %-10d %[2]d x", 1, 2, 3) // want `Printf format %-10d reads arg #4, but call has only 3 args` + fmt.Printf("%[1][3]d x", 1, 2) // want `couldn't parse format string` fmt.Printf("%[1]d x", 1, 2) // OK fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4, 5) // OK @@ -191,11 +191,11 @@ func fn() { t1 := T1{&T2{"hi"}} fmt.Printf("%s\n", &x1) - fmt.Printf("%s\n", t1) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.T1" + fmt.Printf("%s\n", t1) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.T1` var x2 struct{ A *int } - fmt.Printf("%p\n", x2) // MATCH "Printf format %p has arg #1 of wrong type struct{A *int}" + fmt.Printf("%p\n", x2) // want `Printf format %p has arg #1 of wrong type struct\{A \*int\}` var x3 [2]int - fmt.Printf("%p", x3) // MATCH "Printf format %p has arg #1 of wrong type [2]int" + fmt.Printf("%p", x3) // want `Printf format %p has arg #1 of wrong type \[2\]int` ue := unexportedError{nil} fmt.Printf("%s", ue) @@ -364,20 +364,20 @@ func UnexportedStringerOrError() { fmt.Printf("%s", unexportedInterface{3}) // ok; we can't see the problem us := unexportedStringer{} - fmt.Printf("%s", us) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringer" - fmt.Printf("%s", &us) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringer" + fmt.Printf("%s", us) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.unexportedStringer` + fmt.Printf("%s", &us) // want `Printf format %s has arg #1 of wrong type \*CheckPrintf\.unexportedStringer` usf := unexportedStringerOtherFields{ s: "foo", S: "bar", } - fmt.Printf("%s", usf) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringerOtherFields" - fmt.Printf("%s", &usf) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringerOtherFields" + fmt.Printf("%s", usf) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.unexportedStringerOtherFields` + fmt.Printf("%s", &usf) // want `Printf format %s has arg #1 of wrong type \*CheckPrintf\.unexportedStringerOtherFields` intSlice := []int{3, 4} - fmt.Printf("%s", intSlice) // MATCH "Printf format %s has arg #1 of wrong type []int" + fmt.Printf("%s", intSlice) // want `Printf format %s has arg #1 of wrong type \[\]int` nonStringerArray := [1]unexportedStringer{{}} - fmt.Printf("%s", nonStringerArray) // MATCH "Printf format %s has arg #1 of wrong type [1]CheckPrintf.unexportedStringer" + fmt.Printf("%s", nonStringerArray) // want `Printf format %s has arg #1 of wrong type \[1\]CheckPrintf\.unexportedStringer` fmt.Printf("%s", []stringer{3, 4}) // not an error fmt.Printf("%s", [2]stringer{3, 4}) // not an error } diff --git a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go index dc6d9ccf3..5a269e829 100644 --- a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go +++ b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go @@ -7,17 +7,17 @@ import ( ) func fn1() { - strings.Replace("", "", "", 1) // MATCH /is a pure function but its return value is ignored/ - foo(1, 2) // MATCH /is a pure function but its return value is ignored/ + strings.Replace("", "", "", 1) // want `is a pure function but its return value is ignored` + foo(1, 2) // want `is a pure function but its return value is ignored` bar(1, 2) } func fn2() { r, _ := http.NewRequest("GET", "/", nil) - r.WithContext(context.Background()) // MATCH /is a pure function but its return value is ignored/ + r.WithContext(context.Background()) // want `is a pure function but its return value is ignored` } -func foo(a, b int) int { return a + b } +func foo(a, b int) int { return a + b } // want foo:"IsPure" func bar(a, b int) int { println(a + b) return a + b diff --git a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go index a3a1efd9f..71fe2fa48 100644 --- a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go +++ b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go @@ -6,7 +6,7 @@ import ( ) func TestFoo(t *testing.T) { - strings.Replace("", "", "", 1) // MATCH /is a pure function but its return value is ignored/ + strings.Replace("", "", "", 1) // want `is a pure function but its return value is ignored` } func BenchmarkFoo(b *testing.B) { diff --git a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go index 59d349ac0..b6761a187 100644 --- a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go +++ b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go @@ -5,7 +5,7 @@ func fn(s string) { println(r) } - for _, r := range []rune(s) { // MATCH "should range over string" + for _, r := range []rune(s) { // want `should range over string` println(r) } @@ -15,7 +15,7 @@ func fn(s string) { } x := []rune(s) - for _, r := range x { // MATCH "should range over string" + for _, r := range x { // want `should range over string` println(r) } diff --git a/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go b/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go index 00ce19ec8..83b95450a 100644 --- a/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go +++ b/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go @@ -8,8 +8,8 @@ func fn() { regexp.MatchReader(".", nil) for { - regexp.Match(".", nil) // MATCH /calling regexp.Match in a loop has poor performance/ - regexp.MatchString(".", "") // MATCH /calling regexp.MatchString in a loop has poor performance/ - regexp.MatchReader(".", nil) // MATCH /calling regexp.MatchReader in a loop has poor performance/ + regexp.Match(".", nil) // want `calling regexp\.Match in a loop has poor performance` + regexp.MatchString(".", "") // want `calling regexp\.MatchString in a loop has poor performance` + regexp.MatchReader(".", nil) // want `calling regexp\.MatchReader in a loop has poor performance` } } diff --git a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go index 42cbd13e3..48f00e102 100644 --- a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go +++ b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go @@ -8,12 +8,12 @@ import ( const c1 = `[` const c2 = `(abc)` -var re1 = regexp.MustCompile(`ab\yef`) // MATCH /error parsing regexp/ -var re2 = regexp.MustCompile(c1) // MATCH /error parsing regexp/ +var re1 = regexp.MustCompile(`ab\yef`) // want `error parsing regexp` +var re2 = regexp.MustCompile(c1) // want `error parsing regexp` var re3 = regexp.MustCompile(c2) func fn() { - _, err := regexp.Compile(`foo(`) // MATCH /error parsing regexp/ + _, err := regexp.Compile(`foo(`) // want `error parsing regexp` if err != nil { panic(err) } @@ -21,7 +21,19 @@ func fn() { log.Println("of course 'foo(' matches 'foo('") } - regexp.Match("foo(", nil) // MATCH /error parsing regexp/ - regexp.MatchReader("foo(", nil) // MATCH /error parsing regexp/ - regexp.MatchString("foo(", "") // MATCH /error parsing regexp/ + regexp.Match("foo(", nil) // want `error parsing regexp` + regexp.MatchReader("foo(", nil) // want `error parsing regexp` + regexp.MatchString("foo(", "") // want `error parsing regexp` } + +// must be a basic type to trigger SA4017 (in case of a test failure) +type T string + +func (T) Fn() {} + +// Don't get confused by methods named init +func (T) init() {} + +// this will become a synthetic init function, that we don't want to +// ignore +var _ = regexp.MustCompile("(") // want `error parsing regexp` diff --git a/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go b/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go index 7d0f79135..95255900b 100644 --- a/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go +++ b/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go @@ -3,8 +3,8 @@ package pkg func fn1(b1, b2 bool) { if b1 && !b2 { } else if b1 { - } else if b1 && !b2 { // MATCH /condition occurs multiple times/ - } else if b1 { // MATCH /condition occurs multiple times/ + } else if b1 && !b2 { // want `condition occurs multiple times` + } else if b1 { // want `condition occurs multiple times` } else { println() } diff --git a/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go b/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go index d3a2eab2a..1c1c8b9b8 100644 --- a/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go +++ b/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go @@ -5,16 +5,16 @@ func fn() { for { switch { case true: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` default: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } for { select { case <-ch: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } @@ -25,7 +25,7 @@ func fn() { switch { case true: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } switch { @@ -37,9 +37,9 @@ func fn() { switch { case true: if true { - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } else { - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } } @@ -66,7 +66,7 @@ label: for range ([]int)(nil) { switch { default: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } } diff --git a/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go b/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go index 8865e95fe..f47d973a8 100644 --- a/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go +++ b/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go @@ -3,9 +3,9 @@ package pkg func fn(x int) { var z int var y int - x = x // MATCH "self-assignment" - y = y // MATCH "self-assignment" - y, x, z = y, x, 1 + x = x // want `self-assignment` + y = y // want `self-assignment` + y, x, z = y, x, 1 // want `self-assignment of y to y` `self-assignment of x to x` y = x _ = y _ = x @@ -15,6 +15,3 @@ func fn(x int) { println(x) }() } - -// MATCH:8 "self-assignment of y to y" -// MATCH:8 "self-assignment of x to x" diff --git a/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go b/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go index b8bea100c..2b0eacedf 100644 --- a/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go +++ b/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go @@ -1,10 +1,10 @@ package pkg func fn(x int) { - println(x | 0) // MATCH "x | 0 always equals x" - println(x & 0) // MATCH "x & 0 always equals 0" - println(x ^ 0) // MATCH "x ^ 0 always equals x" - println((x << 5) | 0) // MATCH "x | 0 always equals x" + println(x | 0) // want `x \| 0 always equals x` + println(x & 0) // want `x & 0 always equals 0` + println(x ^ 0) // want `x \^ 0 always equals x` + println((x << 5) | 0) // want `x \| 0 always equals x` println(x | 1) println(x << 0) } diff --git a/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go index 27f9bd600..2b6f0adda 100644 --- a/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go +++ b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go @@ -3,11 +3,11 @@ package pkg //lint:file-ignore SA4010,SA4006 Not relevant to this test case func fn(arg []int) { - x := append(arg) // MATCH "x = append(y) is equivalent to x = y" + x := append(arg) // want `x = append\(y\) is equivalent to x = y` _ = x y := append(arg, 1) _ = y - arg = append(arg) // MATCH "x = append(y) is equivalent to x = y" + arg = append(arg) // want `x = append\(y\) is equivalent to x = y` arg = append(arg, 1, 2, 3) var nilly []int arg = append(arg, nilly...) diff --git a/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go b/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go index 426ccbc6a..318c4cb8d 100644 --- a/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go +++ b/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go @@ -3,7 +3,7 @@ package pkg import "strings" func fn() { - _ = strings.Replace("", "", "", 0) // MATCH /calling strings.Replace with n == 0/ + _ = strings.Replace("", "", "", 0) // want `calling strings\.Replace with n == 0` _ = strings.Replace("", "", "", -1) _ = strings.Replace("", "", "", 1) } diff --git a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go index 4d75f9bc7..8a533a8b4 100644 --- a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go +++ b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go @@ -1,18 +1,18 @@ package pkg type T1 struct { - B int `foo:"" foo:""` // MATCH "duplicate struct tag" + B int `foo:"" foo:""` // want `duplicate struct tag` C int `foo:"" bar:""` D int `json:"-"` - E int `json:"\\"` // MATCH "invalid JSON field name" - F int `json:",omitempty,omitempty"` // MATCH "duplicate JSON option "omitempty"" + E int `json:"\\"` // want `invalid JSON field name` + F int `json:",omitempty,omitempty"` // want `duplicate JSON option "omitempty"` G int `json:",omitempty,string"` - H int `json:",string,omitempty,string"` // MATCH "duplicate JSON option "string"" - I int `json:",unknown"` // MATCH "unknown JSON option "unknown"" + H int `json:",string,omitempty,string"` // want `duplicate JSON option "string"` + I int `json:",unknown"` // want `unknown JSON option "unknown"` J int `json:",string"` K *int `json:",string"` - L **int `json:",string"` // MATCH "the JSON string option" - M complex128 `json:",string"` // MATCH "the JSON string option" + L **int `json:",string"` // want `the JSON string option` + M complex128 `json:",string"` // want `the JSON string option` N int `json:"some-name"` } @@ -24,13 +24,13 @@ type T2 struct { E int `xml:",comment"` F int `xml:",omitempty"` G int `xml:",any"` - H int `xml:",unknown"` // MATCH "unknown XML option" - I int `xml:",any,any"` // MATCH "duplicate XML option" + H int `xml:",unknown"` // want `unknown XML option` + I int `xml:",any,any"` // want `duplicate XML option` J int `xml:"a>b>c,"` - K int `xml:",attr,cdata"` // MATCH "mutually exclusive" + K int `xml:",attr,cdata"` // want `mutually exclusive` } type T3 struct { A int `json:",omitempty" xml:",attr"` - B int `json:",unknown" xml:",attr"` // MATCH "unknown JSON option" + B int `json:",unknown" xml:",attr"` // want `unknown JSON option` } diff --git a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go index 548ab5ab2..44d8d20b7 100644 --- a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go +++ b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go @@ -18,13 +18,13 @@ func fn() { s := []int{} v := sync.Pool{} - v.Put(s) // MATCH /argument should be pointer-like/ + v.Put(s) // want `argument should be pointer-like` v.Put(&s) - v.Put(T1{}) // MATCH /argument should be pointer-like/ - v.Put(T2{}) // MATCH /argument should be pointer-like/ + v.Put(T1{}) // want `argument should be pointer-like` + v.Put(T2{}) // want `argument should be pointer-like` p := &sync.Pool{} - p.Put(s) // MATCH /argument should be pointer-like/ + p.Put(s) // want `argument should be pointer-like` p.Put(&s) var i interface{} @@ -34,5 +34,5 @@ func fn() { p.Put(up) var basic int - p.Put(basic) // MATCH /argument should be pointer-like/ + p.Put(basic) // want `argument should be pointer-like` } diff --git a/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go b/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go index f38050599..f4e1cc6ff 100644 --- a/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go +++ b/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go @@ -9,11 +9,11 @@ const tmpl1 = `{{.Name}} {{.LastName}` const tmpl2 = `{{fn}}` func fn() { - tt.New("").Parse(tmpl1) // MATCH /template/ + tt.New("").Parse(tmpl1) // want `template` tt.New("").Parse(tmpl2) t1 := tt.New("") t1.Parse(tmpl1) - th.New("").Parse(tmpl1) // MATCH /template/ + th.New("").Parse(tmpl1) // want `template` th.New("").Parse(tmpl2) t2 := th.New("") t2.Parse(tmpl1) diff --git a/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go b/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go index 2834f026f..3e372be5a 100644 --- a/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go +++ b/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go @@ -2,6 +2,6 @@ package pkg import "testing" -func TestMain(m *testing.M) { // MATCH /should call os.Exit/ +func TestMain(m *testing.M) { // want `should call os\.Exit` m.Run() } diff --git a/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go b/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go index 7f4b0be36..b09399ecf 100644 --- a/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go +++ b/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go @@ -7,7 +7,7 @@ import ( func helper() { os.Exit(1) } -func TestMain(m *testing.M) { // MATCH /should call os.Exit/ +func TestMain(m *testing.M) { // want `should call os\.Exit` // FIXME(dominikh): this is a false positive m.Run() helper() diff --git a/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go b/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go index 06bdf53ee..246aac043 100644 --- a/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go +++ b/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go @@ -6,8 +6,8 @@ const c1 = "12345" const c2 = "2006" func fn() { - time.Parse("12345", "") // MATCH /parsing time/ - time.Parse(c1, "") // MATCH /parsing time/ + time.Parse("12345", "") // want `parsing time` + time.Parse(c1, "") // want `parsing time` time.Parse(c2, "") time.Parse(time.RFC3339Nano, "") time.Parse(time.Kitchen, "") diff --git a/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go index 53451f45f..4f6ed0c41 100644 --- a/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go +++ b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go @@ -6,8 +6,8 @@ const c1 = 1 const c2 = 200 func fn() { - time.Sleep(1) // MATCH /sleeping for 1/ - time.Sleep(42) // MATCH /sleeping for 42/ + time.Sleep(1) // want `sleeping for 1` + time.Sleep(42) // want `sleeping for 42` time.Sleep(201) time.Sleep(c1) time.Sleep(c2) diff --git a/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go b/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go index a67c5c1cf..bb4cfb44d 100644 --- a/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go +++ b/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go @@ -26,7 +26,7 @@ func fn4() { func fn5() { t := time.NewTimer(time.Second) - if t.Reset(time.Second) { // MATCH "it is not possible to use Reset's return value correctly" + if t.Reset(time.Second) { // want `it is not possible to use Reset's return value correctly` <-t.C } } @@ -53,7 +53,7 @@ func fn7(x bool) { func fn8() { t := time.NewTimer(time.Second) - abc := t.Reset(time.Second) // MATCH "it is not possible to use Reset's return value correctly" + abc := t.Reset(time.Second) // want `it is not possible to use Reset's return value correctly` if abc { <-t.C } @@ -69,7 +69,7 @@ func fn9() { func fn10() { t := time.NewTimer(time.Second) - if !t.Reset(time.Second) { // MATCH "it is not possible to use Reset's return value correctly" + if !t.Reset(time.Second) { // want `it is not possible to use Reset's return value correctly` <-t.C } } diff --git a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go index 4a42ee616..263d11021 100644 --- a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go +++ b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go @@ -8,24 +8,24 @@ func fn() { s2 = "bar" ) - if strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" + if strings.ToLower(s1) == strings.ToLower(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` panic("") } - if strings.ToUpper(s1) == strings.ToUpper(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToUpper(a) == strings.ToUpper(b)" + if strings.ToUpper(s1) == strings.ToUpper(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToUpper\(a\) == strings\.ToUpper\(b\)` panic("") } - if strings.ToLower(s1) != strings.ToLower(s2) { // MATCH "should use !strings.EqualFold(a, b) instead of strings.ToLower(a) != strings.ToLower(b)" + if strings.ToLower(s1) != strings.ToLower(s2) { // want `should use !strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) != strings\.ToLower\(b\)` panic("") } - switch strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" + switch strings.ToLower(s1) == strings.ToLower(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` case true, false: panic("") } - if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" { + if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` panic("") } diff --git a/staticcheck/testdata/src/CheckURLs/CheckURLs.go b/staticcheck/testdata/src/CheckURLs/CheckURLs.go index a350359ee..0c9477296 100644 --- a/staticcheck/testdata/src/CheckURLs/CheckURLs.go +++ b/staticcheck/testdata/src/CheckURLs/CheckURLs.go @@ -4,6 +4,6 @@ import "net/url" func fn() { url.Parse("foobar") - url.Parse(":") // MATCH /is not a valid URL/ + url.Parse(":") // want `is not a valid URL` url.Parse("https://golang.org") } diff --git a/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go b/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go index 70d986a54..cb4d853ae 100644 --- a/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go +++ b/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go @@ -8,7 +8,7 @@ import ( func fn(b bool) { c0 := make(chan os.Signal) - signal.Notify(c0, os.Interrupt) // MATCH /the channel used with signal.Notify should be buffered/ + signal.Notify(c0, os.Interrupt) // want `the channel used with signal\.Notify should be buffered` c1 := make(chan os.Signal, 1) signal.Notify(c1, os.Interrupt, syscall.SIGHUP) diff --git a/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go b/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go index b22c6cee1..862837a2d 100644 --- a/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go +++ b/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go @@ -7,12 +7,12 @@ func fn1(i3 interface{}) { var i1 interface{} = v var i2 interface{} = &v p := &v - json.Unmarshal([]byte(`{}`), v) // MATCH /Unmarshal expects to unmarshal into a pointer/ + json.Unmarshal([]byte(`{}`), v) // want `Unmarshal expects to unmarshal into a pointer` json.Unmarshal([]byte(`{}`), &v) - json.Unmarshal([]byte(`{}`), i1) // MATCH /Unmarshal expects to unmarshal into a pointer/ + json.Unmarshal([]byte(`{}`), i1) // want `Unmarshal expects to unmarshal into a pointer` json.Unmarshal([]byte(`{}`), i2) json.Unmarshal([]byte(`{}`), i3) json.Unmarshal([]byte(`{}`), p) - json.NewDecoder(nil).Decode(v) // MATCH /Decode expects to unmarshal into a pointer/ + json.NewDecoder(nil).Decode(v) // want `Decode expects to unmarshal into a pointer` } diff --git a/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go index d08a18392..2fdddd4a7 100644 --- a/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go +++ b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go @@ -19,37 +19,37 @@ func fn1() { switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser") } switch v.(type) { case io.Reader: println("io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser, T: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser, T: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser or T") } switch v.(type) { case io.ReadCloser, io.Reader: println("io.ReadCloser or io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } @@ -58,21 +58,21 @@ func fn1() { println("something else") case io.Reader: println("io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch err.(type) { case V: println("V") - case U: // MATCH "unreachable case clause: CheckUnreachableTypeCases.V will always match before CheckUnreachableTypeCases.U" + case U: // want `unreachable case clause: CheckUnreachableTypeCases\.V will always match before CheckUnreachableTypeCases\.U` println("U") } switch err.(type) { case U: println("U") - case V: // MATCH "unreachable case clause: CheckUnreachableTypeCases.U will always match before CheckUnreachableTypeCases.V" + case V: // want `unreachable case clause: CheckUnreachableTypeCases\.U will always match before CheckUnreachableTypeCases\.V` println("V") } } diff --git a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go index 4f588232e..c50c4689a 100644 --- a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go +++ b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go @@ -2,13 +2,13 @@ package pkg func fn1() { var x int - x = gen() // MATCH /this value of x is never used/ + x = gen() // want `this value of x is never used` x = gen() println(x) var y int if true { - y = gen() // MATCH /this value of y is never used/ + y = gen() // want `this value of y is never used` } y = gen() println(y) @@ -20,14 +20,11 @@ func gen() int { } func fn2() { - x, y := gen(), gen() + x, y := gen(), gen() // want `this value of x is never used` `this value of y is never used` x, y = gen(), gen() println(x, y) } -// MATCH:23 /this value of x is never used/ -// MATCH:23 /this value of y is never used/ - func fn3() { x := uint32(0) if true { @@ -44,25 +41,19 @@ func gen2() (int, int) { } func fn4() { - x, y := gen2() // MATCH /this value of x is never used/ + x, y := gen2() // want `this value of x is never used` println(y) - x, y = gen2() + x, y = gen2() // want `this value of x is never used` `this value of y is never used` x, y = gen2() println(x, y) } -// MATCH:49 /this value of x is never used/ -// MATCH:49 /this value of y is never used/ - func fn5(m map[string]string) { - v, ok := m[""] + v, ok := m[""] // want `this value of v is never used` `this value of ok is never used` v, ok = m[""] println(v, ok) } -// MATCH:58 /this value of v is never used/ -// MATCH:58 /this value of ok is never used/ - func fn6() { x := gen() // Do not report variables if they've been assigned to the blank identifier @@ -72,8 +63,16 @@ func fn6() { func fn7() { func() { var x int - x = gen() // MATCH /this value of x is never used/ + x = gen() // want `this value of x is never used` x = gen() println(x) }() } + +func fn() int { println(); return 0 } + +var y = func() { + v := fn() // want `never used` + v = fn() + println(v) +} diff --git a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go index c85a1a01a..9884718fb 100644 --- a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go +++ b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go @@ -2,10 +2,8 @@ package pkg import "testing" -func fn() int { println(); return 0 } - func TestFoo(t *testing.T) { - x := fn() // MATCH "never used" + x := fn() // want `never used` x = fn() println(x) } diff --git a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go index 48cad7933..33a336de6 100644 --- a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go +++ b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go @@ -8,12 +8,12 @@ import ( func fn(s string) { fn2 := func() string { return "" } - fmt.Printf(fn2()) // MATCH /should use print-style function/ - _ = fmt.Sprintf(fn2()) // MATCH /should use print-style function/ - log.Printf(fn2()) // MATCH /should use print-style function/ - fmt.Printf(s) // MATCH /should use print-style function/ + fmt.Printf(fn2()) // want `should use print-style function` + _ = fmt.Sprintf(fn2()) // want `should use print-style function` + log.Printf(fn2()) // want `should use print-style function` + fmt.Printf(s) // want `should use print-style function` fmt.Printf(s, "") - fmt.Fprintf(os.Stdout, s) // MATCH /should use print-style function/ + fmt.Fprintf(os.Stdout, s) // want `should use print-style function` fmt.Fprintf(os.Stdout, s, "") fmt.Printf(fn2(), "") diff --git a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go index e02b90fc8..63b1aec8d 100644 --- a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go +++ b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go @@ -56,29 +56,29 @@ func fn() { var t8 T8 json.Marshal(t1) json.Marshal(t2) - json.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + json.Marshal(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` json.Marshal(t4) - json.Marshal(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" + json.Marshal(t5) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T5\.B` json.Marshal(t6) (*json.Encoder)(nil).Encode(t1) (*json.Encoder)(nil).Encode(t2) - (*json.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + (*json.Encoder)(nil).Encode(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` (*json.Encoder)(nil).Encode(t4) - (*json.Encoder)(nil).Encode(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" + (*json.Encoder)(nil).Encode(t5) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T5\.B` (*json.Encoder)(nil).Encode(t6) xml.Marshal(t1) xml.Marshal(t2) - xml.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + xml.Marshal(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` xml.Marshal(t4) xml.Marshal(t5) - xml.Marshal(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" + xml.Marshal(t6) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T6\.B` (*xml.Encoder)(nil).Encode(t1) (*xml.Encoder)(nil).Encode(t2) - (*xml.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + (*xml.Encoder)(nil).Encode(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` (*xml.Encoder)(nil).Encode(t4) (*xml.Encoder)(nil).Encode(t5) - (*xml.Encoder)(nil).Encode(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" + (*xml.Encoder)(nil).Encode(t6) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T6\.B` - json.Marshal(t8) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T8.T7.T3.C" + json.Marshal(t8) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T8\.T7\.T3\.C` } diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go index 0e64d898d..3e0a64dfe 100644 --- a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go @@ -9,14 +9,14 @@ import ( func fn() { c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) - signal.Ignore(os.Signal(syscall.SIGKILL)) // MATCH /cannot be trapped/ - signal.Ignore(os.Kill) // MATCH /cannot be trapped/ - signal.Notify(c, os.Kill) // MATCH /cannot be trapped/ - signal.Reset(os.Kill) // MATCH /cannot be trapped/ - signal.Ignore(syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Notify(c, syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Reset(syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Ignore(syscall.SIGSTOP) // MATCH /cannot be trapped/ - signal.Notify(c, syscall.SIGSTOP) // MATCH /cannot be trapped/ - signal.Reset(syscall.SIGSTOP) // MATCH /cannot be trapped/ + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` + signal.Ignore(syscall.SIGSTOP) // want `cannot be trapped` + signal.Notify(c, syscall.SIGSTOP) // want `cannot be trapped` + signal.Reset(syscall.SIGSTOP) // want `cannot be trapped` } diff --git a/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go b/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go index c5769fbd8..3688c2c32 100644 --- a/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go +++ b/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go @@ -12,7 +12,7 @@ func fn() { }() go func() { - wg.Add(1) // MATCH "should call wg.Add(1) before starting" + wg.Add(1) // want `should call wg\.Add\(1\) before starting` wg.Done() }() diff --git a/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go b/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go index 6305286a6..dc316708a 100644 --- a/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go +++ b/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go @@ -6,13 +6,13 @@ type T3 struct{} type T4 struct{} func (T1) Write(b []byte) (int, error) { - b = append(b, '\n') // MATCH /io.Writer.Write must not modify the provided buffer/ + b = append(b, '\n') // want `io\.Writer\.Write must not modify the provided buffer` _ = b return 0, nil } func (T2) Write(b []byte) (int, error) { - b[0] = 0 // MATCH /io.Writer.Write must not modify the provided buffer/ + b[0] = 0 // want `io\.Writer\.Write must not modify the provided buffer` return 0, nil } diff --git a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go index 594049525..7e2af8f92 100644 --- a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go +++ b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go @@ -6,7 +6,7 @@ func fn1(ctx context.Context) {} func fn2(x string, ctx context.Context) {} func fn3() { - fn1(nil) // MATCH /do not pass a nil Context/ + fn1(nil) // want `do not pass a nil Context` fn1(context.TODO()) fn2("", nil) } diff --git a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go index 71990e5eb..86056e577 100644 --- a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go +++ b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go @@ -4,5 +4,5 @@ import "regexp" func fn() { var r *regexp.Regexp - _ = r.FindAll(nil, 0) //MATCH /calling a FindAll method with n == 0 will return no results/ + _ = r.FindAll(nil, 0) //want `calling a FindAll method with n == 0 will return no results` } diff --git a/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go index 53c4f28af..8ebd909e5 100644 --- a/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go +++ b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go @@ -7,6 +7,6 @@ func fn() { var s io.Seeker s.Seek(0, 0) s.Seek(0, io.SeekStart) - s.Seek(io.SeekStart, 0) // MATCH /the first argument of io.Seeker is the offset/ + s.Seek(io.SeekStart, 0) // want `the first argument of io\.Seeker is the offset` s.Seek(SeekStart, 0) } diff --git a/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go b/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go index d1ee3edd9..7caf5a7c7 100644 --- a/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go +++ b/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go @@ -3,7 +3,7 @@ package pkg import "strings" func fn() { - println(strings.Trim("\x80test\xff", "\xff")) // MATCH /is not a valid UTF-8 encoded string/ + println(strings.Trim("\x80test\xff", "\xff")) // want `is not a valid UTF-8 encoded string` println(strings.Trim("foo", "bar")) s := "\xff" diff --git a/staticcheck/testdata/src/function-literals/function-literals.go b/staticcheck/testdata/src/function-literals/function-literals.go deleted file mode 100644 index bfd0e1917..000000000 --- a/staticcheck/testdata/src/function-literals/function-literals.go +++ /dev/null @@ -1,23 +0,0 @@ -package pkg - -func fn() int { println(); return 0 } - -var x = func(arg int) { // MATCH "overwritten" - arg = 1 - println(arg) -} - -var y = func() { - v := fn() // MATCH "never used" - v = fn() - println(v) -} - -var z = func() { - for { - if true { - println() - } - break // MATCH "the surrounding loop is unconditionally terminated" - } -} diff --git a/staticcheck/testdata/src/synthetic/synthetic.go b/staticcheck/testdata/src/synthetic/synthetic.go deleted file mode 100644 index 02c9e8053..000000000 --- a/staticcheck/testdata/src/synthetic/synthetic.go +++ /dev/null @@ -1,15 +0,0 @@ -package pkg - -import "regexp" - -// must be a basic type to trigger SA4017 (in case of a test failure) -type T string - -func (T) Fn() {} - -// Don't get confused by methods named init -func (T) init() {} - -// this will become a synthetic init function, that we don't want to -// ignore -var _ = regexp.MustCompile("(") // MATCH /error parsing regexp/ diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go new file mode 100644 index 000000000..5a36ec77c --- /dev/null +++ b/stylecheck/analysis.go @@ -0,0 +1,111 @@ +package stylecheck + +import ( + "flag" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "ST1000": { + Name: "ST1000", + Run: CheckPackageComment, + Doc: docST1000, + Requires: []*analysis.Analyzer{}, + Flags: newFlagSet(), + }, + "ST1001": { + Name: "ST1001", + Run: CheckDotImports, + Doc: docST1001, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1003": { + Name: "ST1003", + Run: CheckNames, + Doc: docST1003, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1005": { + Name: "ST1005", + Run: CheckErrorStrings, + Doc: docST1005, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1006": { + Name: "ST1006", + Run: CheckReceiverNames, + Doc: docST1006, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1008": { + Name: "ST1008", + Run: CheckErrorReturn, + Doc: docST1008, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1011": { + Name: "ST1011", + Run: CheckTimeNames, + Doc: docST1011, + Flags: newFlagSet(), + }, + "ST1012": { + Name: "ST1012", + Run: CheckErrorVarNames, + Doc: docST1012, + Requires: []*analysis.Analyzer{config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1013": { + Name: "ST1013", + Run: CheckHTTPStatusCodes, + Doc: docST1013, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1015": { + Name: "ST1015", + Run: CheckDefaultCaseOrder, + Doc: docST1015, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "ST1016": { + Name: "ST1016", + Run: CheckReceiverNamesIdentical, + Doc: docST1016, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1017": { + Name: "ST1017", + Run: CheckYodaConditions, + Doc: docST1017, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "ST1018": { + Name: "ST1018", + Run: CheckInvisibleCharacters, + Doc: docST1018, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, +} diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 120d97f03..35ddc15b9 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -11,49 +11,18 @@ import ( "unicode" "unicode/utf8" - "honnef.co/go/tools/lint" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/passes/buildssa" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) -type Checker struct { - CheckGenerated bool -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "stylecheck" } -func (*Checker) Prefix() string { return "ST" } -func (c *Checker) Init(prog *lint.Program) {} - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment, Doc: docST1000}, - {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports, Doc: docST1001}, - // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports, Doc: docST1002}, - {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames, Doc: docST1003}, - // {ID: "ST1004", FilterGenerated: false, Fn: nil, , Doc: docST1004}, - {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings, Doc: docST1005}, - {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames, Doc: docST1006}, - // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec, Doc: docST1007}, - {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn, Doc: docST1008}, - // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn, Doc: docST1009}, - // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg, Doc: docST1010}, - {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames, Doc: docST1011}, - {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames, Doc: docST1012}, - {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes, Doc: docST1013}, - {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder, Doc: docST1015}, - {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical, Doc: docST1016}, - {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions, Doc: docST1017}, - {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters, Doc: docST1018}, - } -} - -func (c *Checker) CheckPackageComment(j *lint.Job) { +func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { // - At least one file in a non-main package should have a package comment // // - The comment should be of the form @@ -62,57 +31,59 @@ func (c *Checker) CheckPackageComment(j *lint.Job) { // which case they get appended. But that doesn't happen a lot in // the real world. - if j.Pkg.Name == "main" { - return + if pass.Pkg.Name() == "main" { + return nil, nil } hasDocs := false - for _, f := range j.Pkg.Syntax { - if IsInTest(j, f) { + for _, f := range pass.Files { + if IsInTest(pass, f) { continue } if f.Doc != nil && len(f.Doc.List) > 0 { hasDocs = true prefix := "Package " + f.Name.Name + " " if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { - j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) + pass.Reportf(f.Doc.Pos(), `package comment should be of the form "%s..."`, prefix) } f.Doc.Text() } } if !hasDocs { - for _, f := range j.Pkg.Syntax { - if IsInTest(j, f) { + for _, f := range pass.Files { + if IsInTest(pass, f) { continue } - j.Errorf(f, "at least one file in a package should have a package comment") + pass.Reportf(f.Pos(), "at least one file in a package should have a package comment") } } + return nil, nil } -func (c *Checker) CheckDotImports(j *lint.Job) { - for _, f := range j.Pkg.Syntax { +func CheckDotImports(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { imports: for _, imp := range f.Imports { path := imp.Path.Value path = path[1 : len(path)-1] - for _, w := range j.Pkg.Config.DotImportWhitelist { + for _, w := range config.For(pass).DotImportWhitelist { if w == path { continue imports } } - if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { - j.Errorf(imp, "should not use dot imports") + if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) { + ReportfFG(pass, imp.Pos(), "should not use dot imports") } } } + return nil, nil } -func (c *Checker) CheckBlankImports(j *lint.Job) { - fset := j.Pkg.Fset - for _, f := range j.Pkg.Syntax { - if IsInMain(j, f) || IsInTest(j, f) { +func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { + fset := pass.Fset + for _, f := range pass.Files { + if IsInMain(pass, f) || IsInTest(pass, f) { continue } @@ -161,13 +132,14 @@ func (c *Checker) CheckBlankImports(j *lint.Job) { } if imp.Doc == nil && imp.Comment == nil && !skip[imp] { - j.Errorf(imp, "a blank import should be only in a main or test package, or have a comment justifying it") + pass.Reportf(imp.Pos(), "a blank import should be only in a main or test package, or have a comment justifying it") } } } + return nil, nil } -func (c *Checker) CheckIncDec(j *lint.Job) { +func CheckIncDec(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this can be noisy for function bodies that look like this: // x += 3 // ... @@ -192,14 +164,15 @@ func (c *Checker) CheckIncDec(j *lint.Job) { suffix = "--" } - j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) + pass.Reportf(assign.Pos(), "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckErrorReturn(j *lint.Job) { +func CheckErrorReturn(pass *analysis.Pass) (interface{}, error) { fnLoop: - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { sig := fn.Type().(*types.Signature) rets := sig.Results() if rets == nil || rets.Len() < 2 { @@ -213,21 +186,22 @@ fnLoop: } for i := rets.Len() - 2; i >= 0; i-- { if rets.At(i).Type() == types.Universe.Lookup("error").Type() { - j.Errorf(rets.At(i), "error should be returned as the last argument") + pass.Reportf(rets.At(i).Pos(), "error should be returned as the last argument") continue fnLoop } } } + return nil, nil } // CheckUnexportedReturn checks that exported functions on exported // types do not return unexported types. -func (c *Checker) CheckUnexportedReturn(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if fn.Synthetic != "" || fn.Parent() != nil { continue } - if !ast.IsExported(fn.Name()) || IsInMain(j, fn) || IsInTest(j, fn) { + if !ast.IsExported(fn.Name()) || IsInMain(pass, fn) || IsInTest(pass, fn) { continue } sig := fn.Type().(*types.Signature) @@ -239,14 +213,16 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) { if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok && !ast.IsExported(named.Obj().Name()) && named != types.Universe.Lookup("error").Type() { - j.Errorf(fn, "should not return unexported type") + pass.Reportf(fn.Pos(), "should not return unexported type") } } } + return nil, nil } -func (c *Checker) CheckReceiverNames(j *lint.Job) { - for _, m := range j.Pkg.SSA.Members { +func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) { + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + for _, m := range ssapkg.Members { if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { ms := typeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { @@ -257,18 +233,20 @@ func (c *Checker) CheckReceiverNames(j *lint.Job) { continue } if recv.Name() == "self" || recv.Name() == "this" { - j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + pass.Reportf(recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) } if recv.Name() == "_" { - j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") + pass.Reportf(recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused") } } } } + return nil, nil } -func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { - for _, m := range j.Pkg.SSA.Members { +func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) { + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + for _, m := range ssapkg.Members { names := map[string]int{} var firstFn *types.Func @@ -296,16 +274,17 @@ func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { seen = append(seen, fmt.Sprintf("%dx %q", count, name)) } - j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) + pass.Reportf(firstFn.Pos(), "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) } } + return nil, nil } -func (c *Checker) CheckContextFirstArg(j *lint.Job) { +func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this check doesn't apply to test helpers. Example from the stdlib: // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { fnLoop: - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -319,28 +298,29 @@ fnLoop: for i := 1; i < params.Len(); i++ { param := params.At(i) if types.TypeString(param.Type(), nil) == "context.Context" { - j.Errorf(param, "context.Context should be the first argument of a function") + pass.Reportf(param.Pos(), "context.Context should be the first argument of a function") continue fnLoop } } } + return nil, nil } -func (c *Checker) CheckErrorStrings(j *lint.Job) { +func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { objNames := map[*ssa.Package]map[string]bool{} - ssapkg := j.Pkg.SSA + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg objNames[ssapkg] = map[string]bool{} for _, m := range ssapkg.Members { if typ, ok := m.(*ssa.Type); ok { objNames[ssapkg][typ.Name()] = true } } - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { objNames[fn.Package()][fn.Name()] = true } - for _, fn := range j.Pkg.InitialFunctions { - if IsInTest(j, fn) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInTest(pass, fn) { // We don't care about malformed error messages in tests; // they're usually for direct human consumption, not part // of an API @@ -368,7 +348,7 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { } switch s[len(s)-1] { case '.', ':', '!', '\n': - j.Errorf(call, "error strings should not end with punctuation or a newline") + pass.Reportf(call.Pos(), "error strings should not end with punctuation or a newline") } idx := strings.IndexByte(s, ' ') if idx == -1 { @@ -402,13 +382,14 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { // // It could still be a proper noun, though. - j.Errorf(call, "error strings should not be capitalized") + pass.Reportf(call.Pos(), "error strings should not be capitalized") } } } + return nil, nil } -func (c *Checker) CheckTimeNames(j *lint.Job) { +func CheckTimeNames(pass *analysis.Pass) (interface{}, error) { suffixes := []string{ "Sec", "Secs", "Seconds", "Msec", "Msecs", @@ -423,31 +404,32 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { for _, name := range names { for _, suffix := range suffixes { if strings.HasSuffix(name.Name, suffix) { - j.Errorf(name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) + pass.Reportf(name.Pos(), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) break } } } } - for _, f := range j.Pkg.Syntax { + for _, f := range pass.Files { ast.Inspect(f, func(node ast.Node) bool { switch node := node.(type) { case *ast.ValueSpec: - T := j.Pkg.TypesInfo.TypeOf(node.Type) + T := pass.TypesInfo.TypeOf(node.Type) fn(T, node.Names) case *ast.FieldList: for _, field := range node.List { - T := j.Pkg.TypesInfo.TypeOf(field.Type) + T := pass.TypesInfo.TypeOf(field.Type) fn(T, field.Names) } } return true }) } + return nil, nil } -func (c *Checker) CheckErrorVarNames(j *lint.Job) { - for _, f := range j.Pkg.Syntax { +func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { for _, decl := range f.Decls { gen, ok := decl.(*ast.GenDecl) if !ok || gen.Tok != token.VAR { @@ -461,7 +443,7 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) { for i, name := range spec.Names { val := spec.Values[i] - if !IsCallToAST(j, val, "errors.New") && !IsCallToAST(j, val, "fmt.Errorf") { + if !IsCallToAST(pass, val, "errors.New") && !IsCallToAST(pass, val, "fmt.Errorf") { continue } @@ -470,12 +452,13 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) { prefix = "Err" } if !strings.HasPrefix(name.Name, prefix) { - j.Errorf(name, "error var %s should have name of the form %sFoo", name.Name, prefix) + pass.Reportf(name.Pos(), "error var %s should have name of the form %sFoo", name.Name, prefix) } } } } } + return nil, nil } var httpStatusCodes = map[int]string{ @@ -540,19 +523,22 @@ var httpStatusCodes = map[int]string{ 511: "StatusNetworkAuthenticationRequired", } -func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { +func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) { whitelist := map[string]bool{} - for _, code := range j.Pkg.Config.HTTPStatusCodeWhitelist { + for _, code := range config.For(pass).HTTPStatusCodeWhitelist { whitelist[code] = true } fn := func(node ast.Node) bool { + if node == nil { + return true + } call, ok := node.(*ast.CallExpr) if !ok { return true } var arg int - switch CallNameAST(j, call) { + switch CallNameAST(pass, call) { case "net/http.Error": arg = 2 case "net/http.Redirect": @@ -580,29 +566,32 @@ func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { if !ok { return true } - j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + ReportfFG(pass, lit.Pos(), "should use constant http.%s instead of numeric literal %d", s, n) return true } - for _, f := range j.Pkg.Syntax { + // OPT(dh): replace with inspector + for _, f := range pass.Files { ast.Inspect(f, fn) } + return nil, nil } -func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { +func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { stmt := node.(*ast.SwitchStmt) list := stmt.Body.List for i, c := range list { if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { - j.Errorf(c, "default case should be first or last in switch statement") + ReportfFG(pass, c.Pos(), "default case should be first or last in switch statement") break } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckYodaConditions(j *lint.Job) { +func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { cond := node.(*ast.BinaryExpr) if cond.Op != token.EQL && cond.Op != token.NEQ { @@ -615,12 +604,13 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) { // Don't flag lit == lit conditions, just in case return } - j.Errorf(cond, "don't use Yoda conditions") + ReportfFG(pass, cond.Pos(), "don't use Yoda conditions") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { +func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { lit := node.(*ast.BasicLit) if lit.Kind != token.STRING { @@ -628,11 +618,12 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { } for _, r := range lit.Value { if unicode.Is(unicode.Cf, r) { - j.Errorf(lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) + pass.Reportf(lit.Pos(), "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { - j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) + pass.Reportf(lit.Pos(), "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) + return nil, nil } diff --git a/stylecheck/lint_test.go b/stylecheck/lint_test.go index 548fadd14..7b3dd3cc6 100644 --- a/stylecheck/lint_test.go +++ b/stylecheck/lint_test.go @@ -3,10 +3,44 @@ package stylecheck import ( "testing" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - c := NewChecker() - testutil.TestAll(t, c, "") + checks := map[string][]struct { + dir string + version string + }{ + "ST1000": { + {dir: "CheckPackageComment-1"}, + {dir: "CheckPackageComment-2"}, + }, + "ST1001": {{dir: "CheckDotImports"}}, + "ST1003": { + {dir: "CheckNames"}, + {dir: "CheckNames_generated"}, + }, + "ST1005": {{dir: "CheckErrorStrings"}}, + "ST1006": {{dir: "CheckReceiverNames"}}, + "ST1008": {{dir: "CheckErrorReturn"}}, + "ST1011": {{dir: "CheckTimeNames"}}, + "ST1012": {{dir: "CheckErrorVarNames"}}, + "ST1013": {{dir: "CheckHTTPStatusCodes"}}, + "ST1015": {{dir: "CheckDefaultCaseOrder"}}, + "ST1016": {{dir: "CheckReceiverNamesIdentical"}}, + "ST1017": {{dir: "CheckYodaConditions"}}, + "ST1018": {{dir: "CheckInvisibleCharacters"}}, + } + + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) + } + } } diff --git a/stylecheck/names.go b/stylecheck/names.go index 1c0718fdd..160f9d7ff 100644 --- a/stylecheck/names.go +++ b/stylecheck/names.go @@ -9,7 +9,8 @@ import ( "strings" "unicode" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/config" . "honnef.co/go/tools/lint/lintdsl" ) @@ -21,7 +22,7 @@ var knownNameExceptions = map[string]bool{ "kWh": true, } -func (c *Checker) CheckNames(j *lint.Job) { +func CheckNames(pass *analysis.Pass) (interface{}, error) { // A large part of this function is copied from // github.com/golang/lint, Copyright (c) 2013 The Go Authors, // licensed under the BSD 3-clause license. @@ -45,7 +46,7 @@ func (c *Checker) CheckNames(j *lint.Job) { // Handle two common styles from other languages that don't belong in Go. if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") { - j.Errorf(id, "should not use ALL_CAPS in Go names; use CamelCase instead") + ReportfFG(pass, id.Pos(), "should not use ALL_CAPS in Go names; use CamelCase instead") return } @@ -55,10 +56,10 @@ func (c *Checker) CheckNames(j *lint.Job) { } if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") { - j.Errorf(id, "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should) + ReportfFG(pass, id.Pos(), "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should) return } - j.Errorf(id, "%s %s should be %s", thing, id.Name, should) + ReportfFG(pass, id.Pos(), "%s %s should be %s", thing, id.Name, should) } checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) { if fl == nil { @@ -71,17 +72,18 @@ func (c *Checker) CheckNames(j *lint.Job) { } } - initialisms := make(map[string]bool, len(j.Pkg.Config.Initialisms)) - for _, word := range j.Pkg.Config.Initialisms { + il := config.For(pass).Initialisms + initialisms := make(map[string]bool, len(il)) + for _, word := range il { initialisms[word] = true } - for _, f := range j.Pkg.Syntax { + for _, f := range pass.Files { // Package names need slightly different handling than other names. if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { - j.Errorf(f, "should not use underscores in package names") + ReportfFG(pass, f.Pos(), "should not use underscores in package names") } if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { - j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) + ReportfFG(pass, f.Pos(), "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) } ast.Inspect(f, func(node ast.Node) bool { @@ -104,7 +106,7 @@ func (c *Checker) CheckNames(j *lint.Job) { return true } - if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + if IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { return true } @@ -173,6 +175,7 @@ func (c *Checker) CheckNames(j *lint.Job) { return true }) } + return nil, nil } // lintName returns a different name if it should be different. diff --git a/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go b/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go index 975b0684b..a96ab60e2 100644 --- a/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go +++ b/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go @@ -8,5 +8,5 @@ type T int func fn1(int) {} func fn2(context.Context, int) {} func fn3(context.Context, int, context.Context) {} -func fn4(int, context.Context) {} // MATCH "context.Context should be the first argument of a function" -func (T) FN(int, context.Context) {} // MATCH "context.Context should be the first argument of a function" +func fn4(int, context.Context) {} // want `context\.Context should be the first argument of a function` +func (T) FN(int, context.Context) {} // want `context\.Context should be the first argument of a function` diff --git a/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go b/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go index 88294e3e5..28c32eb18 100644 --- a/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go +++ b/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go @@ -30,7 +30,7 @@ func fn(x int) { switch x { case 1: - default: // MATCH "default case should be first or last in switch statement" + default: // want `default case should be first or last in switch statement` case 2: } } diff --git a/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go b/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go index d353b6608..246afe6d5 100644 --- a/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go +++ b/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go @@ -1,6 +1,6 @@ // Package pkg ... package pkg -import . "fmt" // MATCH "should not use dot imports" +import . "fmt" // want `should not use dot imports` var _ = Println diff --git a/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go b/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go index b353bf63b..feed85e58 100644 --- a/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go +++ b/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go @@ -1,8 +1,8 @@ // Package pkg ... package pkg -func fn1() (error, int) { return nil, 0 } // MATCH "error should be returned as the last argument" -func fn2() (a, b error, c int) { return nil, nil, 0 } // MATCH "error should be returned as the last argument" +func fn1() (error, int) { return nil, 0 } // want `error should be returned as the last argument` +func fn2() (a, b error, c int) { return nil, nil, 0 } // want `error should be returned as the last argument` func fn3() (a int, b, c error) { return 0, nil, nil } func fn4() (error, error) { return nil, nil } func fn5() int { return 0 } diff --git a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go index 0de45215b..2659fa126 100644 --- a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go +++ b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go @@ -5,11 +5,11 @@ import "errors" func fn() { errors.New("a perfectly fine error") - errors.New("Not a great error") // MATCH "error strings should not be capitalized" - errors.New("also not a great error.") // MATCH "error strings should not end with punctuation or a newline" + errors.New("Not a great error") // want `error strings should not be capitalized` + errors.New("also not a great error.") // want `error strings should not end with punctuation or a newline` errors.New("URL is okay") errors.New("SomeFunc is okay") - errors.New("URL is okay, but the period is not.") // MATCH "error strings should not end with punctuation or a newline" + errors.New("URL is okay, but the period is not.") // want `error strings should not end with punctuation or a newline` errors.New("T must not be nil") } diff --git a/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go b/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go index fcae261ae..5652ef9a5 100644 --- a/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go +++ b/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go @@ -7,19 +7,16 @@ import ( ) var ( - foo = errors.New("") // MATCH "error var foo should have name of the form errFoo" + foo = errors.New("") // want `error var foo should have name of the form errFoo` errBar = errors.New("") - qux, fisk, errAnother = errors.New(""), errors.New(""), errors.New("") - abc = fmt.Errorf("") // MATCH "error var abc should have name of the form errFoo" + qux, fisk, errAnother = errors.New(""), errors.New(""), errors.New("") // want `error var qux should have name of the form errFoo` `error var fisk should have name of the form errFoo` + abc = fmt.Errorf("") // want `error var abc should have name of the form errFoo` errAbc = fmt.Errorf("") ) -var wrong = errors.New("") // MATCH "error var wrong should have name of the form errFoo" +var wrong = errors.New("") // want `error var wrong should have name of the form errFoo` var result = fn() func fn() error { return nil } - -// MATCH:12 "error var qux should have name of the form errFoo" -// MATCH:12 "error var fisk should have name of the form errFoo" diff --git a/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go index 17eac85ca..602be22bb 100644 --- a/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go +++ b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go @@ -5,10 +5,10 @@ import "net/http" func fn() { // Check all the supported functions - http.Error(nil, "", 506) // MATCH "http.StatusVariantAlsoNegotiates" - http.Redirect(nil, nil, "", 506) // MATCH "http.StatusVariantAlsoNegotiates" - http.StatusText(506) // MATCH "http.StatusVariantAlsoNegotiates" - http.RedirectHandler("", 506) // MATCH "http.StatusVariantAlsoNegotiates" + http.Error(nil, "", 506) // want `http\.StatusVariantAlsoNegotiates` + http.Redirect(nil, nil, "", 506) // want `http\.StatusVariantAlsoNegotiates` + http.StatusText(506) // want `http\.StatusVariantAlsoNegotiates` + http.RedirectHandler("", 506) // want `http\.StatusVariantAlsoNegotiates` // Don't flag literals with no known constant http.StatusText(600) diff --git a/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go b/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go index 7755a9944..c8aa51d10 100644 --- a/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go +++ b/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go @@ -5,8 +5,8 @@ func fn() { var x int x-- x++ - x += 1 // MATCH "should replace x += 1 with x++" - x -= 1 // MATCH "should replace x -= 1 with x--" + x += 1 // want `should replace x \+= 1 with x\+\+` + x -= 1 // want `should replace x -= 1 with x--` x /= 1 x += 2 x -= 2 diff --git a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go index 152505f69..bc6a7446c 100644 --- a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go +++ b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go @@ -2,14 +2,11 @@ package pkg var ( - a = "" // MATCH "Unicode control character U+0007" - b = "" + a = "" // want `Unicode control character U\+0007` + b = "" // want `Unicode control character U\+0007` `Unicode control character U\+001A` c = "Test test" d = `T est` - e = `Zero​Width` // MATCH "Unicode format character U+200B" + e = `Zero​Width` // want `Unicode format character U\+200B` f = "\u200b" ) - -// MATCH:6 "Unicode control character U+0007" -// MATCH:6 "Unicode control character U+001A" diff --git a/stylecheck/testdata/src/CheckNames/CheckNames.go b/stylecheck/testdata/src/CheckNames/CheckNames.go index 9f06d9361..a61b5d4a0 100644 --- a/stylecheck/testdata/src/CheckNames/CheckNames.go +++ b/stylecheck/testdata/src/CheckNames/CheckNames.go @@ -1,29 +1,29 @@ // Package pkg_foo ... -package pkg_foo // MATCH "should not use underscores in package names" +package pkg_foo // want `should not use underscores in package names` var range_ int var _abcdef int var abcdef_ int -var abc_def int // MATCH "should not use underscores in Go names; var abc_def should be abcDef" -var abc_def_ int // MATCH "should not use underscores in Go names; var abc_def_ should be abcDef_" +var abc_def int // want `should not use underscores in Go names; var abc_def should be abcDef` +var abc_def_ int // want `should not use underscores in Go names; var abc_def_ should be abcDef_` -func fn_1() {} // MATCH "func fn_1 should be fn1" +func fn_1() {} // want `func fn_1 should be fn1` func fn2() {} -func fn_Id() {} // MATCH "func fn_Id should be fnID" -func fnId() {} // MATCH "func fnId should be fnID" +func fn_Id() {} // want `func fn_Id should be fnID` +func fnId() {} // want `func fnId should be fnID` -var FOO_BAR int // MATCH "should not use ALL_CAPS in Go names; use CamelCase instead" -var Foo_BAR int // MATCH "var Foo_BAR should be FooBAR" -var foo_bar int // MATCH "foo_bar should be fooBar" +var FOO_BAR int // want `should not use ALL_CAPS in Go names; use CamelCase instead` +var Foo_BAR int // want `var Foo_BAR should be FooBAR` +var foo_bar int // want `foo_bar should be fooBar` var kFoobar int // not a check we inherited from golint. more false positives than true ones. func fn(x []int) { var ( - a_b = 1 // MATCH "var a_b should be aB" - c_d int // MATCH "var c_d should be cD" + a_b = 1 // want `var a_b should be aB` + c_d int // want `var c_d should be cD` ) a_b += 2 - for e_f := range x { // MATCH "range var e_f should be eF" + for e_f := range x { // want `range var e_f should be eF` _ = e_f } @@ -35,16 +35,16 @@ func fn(x []int) { func fn_3() {} //export not actually the export keyword -func fn_4() {} // MATCH "func fn_4 should be fn4" +func fn_4() {} // want `func fn_4 should be fn4` //export -func fn_5() {} // MATCH "func fn_5 should be fn5" +func fn_5() {} // want `func fn_5 should be fn5` // export fn_6 -func fn_6() {} // MATCH "func fn_6 should be fn6" +func fn_6() {} // want `func fn_6 should be fn6` //export fn_8 -func fn_7() {} // MATCH "func fn_7 should be fn7" +func fn_7() {} // want `func fn_7 should be fn7` //go:linkname fn_8 time.Now func fn_8() {} diff --git a/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go b/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go index 52f77f4ec..2862e374d 100644 --- a/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go +++ b/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go @@ -1 +1 @@ -package pkg // MATCH "at least one file in a package should have a package comment" +package pkg // want `at least one file in a package should have a package comment` diff --git a/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go b/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go index 1ee009306..21499368e 100644 --- a/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go +++ b/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go @@ -1,4 +1,2 @@ -// This package is great +// This package is great // want `package comment should be of the form` package pkg - -// MATCH:1 "package comment should be of the form" diff --git a/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go b/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go index e6bb2c48d..ead410cec 100644 --- a/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go +++ b/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go @@ -3,9 +3,9 @@ package pkg type T1 int -func (x T1) Fn1() {} // MATCH "methods on the same type should have the same receiver name" +func (x T1) Fn1() {} func (y T1) Fn2() {} func (x T1) Fn3() {} func (T1) Fn4() {} -func (_ T1) Fn5() {} // MATCH "receiver name should not be an underscore, omit the name if it is unused" -func (self T1) Fn6() {} // MATCH "receiver name should be a reflection of its identity" +func (_ T1) Fn5() {} // want `receiver name should not be an underscore, omit the name if it is unused` +func (self T1) Fn6() {} // want `receiver name should be a reflection of its identity` diff --git a/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go b/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go new file mode 100644 index 000000000..7447a8107 --- /dev/null +++ b/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go @@ -0,0 +1,11 @@ +// Package pkg ... +package pkg + +type T1 int + +func (x T1) Fn1() {} // want `methods on the same type should have the same receiver name` +func (y T1) Fn2() {} +func (x T1) Fn3() {} +func (T1) Fn4() {} +func (_ T1) Fn5() {} +func (self T1) Fn6() {} diff --git a/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go b/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go index 9049b4b3c..be68e4574 100644 --- a/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go +++ b/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go @@ -6,12 +6,12 @@ import "time" type T1 struct { aMS int B time.Duration - BMillis time.Duration // MATCH "don't use unit-specific suffix" + BMillis time.Duration // want `don't use unit-specific suffix` } -func fn1(a, b, cMS time.Duration) { // MATCH "don't use unit-specific suffix" +func fn1(a, b, cMS time.Duration) { // want `don't use unit-specific suffix` var x time.Duration - var xMS time.Duration // MATCH "don't use unit-specific suffix" - var y, yMS time.Duration // MATCH "don't use unit-specific suffix" + var xMS time.Duration // want `don't use unit-specific suffix` + var y, yMS time.Duration // want `don't use unit-specific suffix` _, _, _, _ = x, xMS, y, yMS } diff --git a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go index 273794656..94e3c6c8b 100644 --- a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go +++ b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go @@ -2,15 +2,15 @@ package pkg func fn(x string, y int) { - if "" == x { // MATCH "Yoda" + if "" == x { // want `Yoda` } - if 0 == y { // MATCH "Yoda" + if 0 == y { // want `Yoda` } if 0 > y { } if "" == "" { } - if "" == "" || 0 == y { // MATCH "Yoda" + if "" == "" || 0 == y { // want `Yoda` } } diff --git a/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go b/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go index 87224cd33..87f93861c 100644 --- a/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go +++ b/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go @@ -1,15 +1,15 @@ // Package pkg ... package pkg -import _ "fmt" +import _ "fmt" // want `blank import` -import _ "fmt" +import _ "fmt" // want `blank import` import _ "fmt" import _ "fmt" -import _ "fmt" +import _ "fmt" // want `blank import` import "strings" -import _ "fmt" +import _ "fmt" // want `blank import` // This is fine import _ "fmt" @@ -22,17 +22,17 @@ import _ "fmt" // This is fine import _ "fmt" import "bytes" -import _ "fmt" +import _ "fmt" // want `blank import` import _ "fmt" // This is fine // This is not fine import ( - _ "fmt" + _ "fmt" // want `blank import` ) import ( - _ "fmt" + _ "fmt" // want `blank import` "strconv" // This is fine _ "fmt" @@ -41,11 +41,3 @@ import ( var _ = strings.NewReader var _ = bytes.NewBuffer var _ = strconv.IntSize - -// MATCH:4 "blank import" -// MATCH:6 "blank import" -// MATCH:10 "blank import" -// MATCH:12 "blank import" -// MATCH:25 "blank import" -// MATCH:31 "blank import" -// MATCH:35 "blank import" diff --git a/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go b/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go index 802015fde..0a49b888f 100644 --- a/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go +++ b/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go @@ -10,12 +10,12 @@ func fn1() string { return "" } func Fn2() error { return nil } func fn3() error { return nil } func fn5() t1 { return 0 } -func Fn6() t1 { return 0 } // MATCH "should not return unexported type" -func Fn7() *t1 { return nil } // MATCH "should not return unexported type" +func Fn6() t1 { return 0 } // want `should not return unexported type` +func Fn7() *t1 { return nil } // want `should not return unexported type` func Fn8() T2 { return 0 } func (Recv) fn9() t1 { return 0 } -func (Recv) Fn10() t1 { return 0 } // MATCH "should not return unexported type" +func (Recv) Fn10() t1 { return 0 } // want `should not return unexported type` func (Recv) Fn11() T2 { return 0 } func (recv) fn9() t1 { return 0 } diff --git a/unused/testdata/src/alias/alias.go b/unused/testdata/src/alias/alias.go index f181c173a..911501e59 100644 --- a/unused/testdata/src/alias/alias.go +++ b/unused/testdata/src/alias/alias.go @@ -1,11 +1,11 @@ package main type t1 struct{} -type t2 struct{} // MATCH "t2 is unused" +type t2 struct{} // want `t2` type t3 struct{} type alias1 = t1 -type alias2 = t2 // MATCH "alias2 is unused" +type alias2 = t2 // want `alias2` type alias3 = t3 type alias4 = int diff --git a/unused/testdata/src/blank/blank.go b/unused/testdata/src/blank/blank.go index b43ab1078..ee707b626 100644 --- a/unused/testdata/src/blank/blank.go +++ b/unused/testdata/src/blank/blank.go @@ -2,7 +2,7 @@ package pkg import _ "fmt" -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` type t2 struct { _ int } @@ -12,7 +12,7 @@ type t5 struct{} var _ = t2{} -func fn1() { // MATCH /fn1 is unused/ +func fn1() { // want `fn1` _ = t1{} var _ = t1{} } diff --git a/unused/testdata/src/cgo/cgo.go b/unused/testdata/src/cgo/cgo.go index 6b484f820..4b852d173 100644 --- a/unused/testdata/src/cgo/cgo.go +++ b/unused/testdata/src/cgo/cgo.go @@ -3,4 +3,4 @@ package pkg //go:cgo_export_dynamic func foo() {} -func bar() {} // MATCH /bar is unused/ +func bar() {} // want `bar` diff --git a/unused/testdata/src/consts/consts.go b/unused/testdata/src/consts/consts.go index 8c1076cbd..1cab7ddde 100644 --- a/unused/testdata/src/consts/consts.go +++ b/unused/testdata/src/consts/consts.go @@ -12,9 +12,9 @@ const ( c7 c8 - c9 // MATCH "c9 is unused" - c10 // MATCH "c10 is unused" - c11 // MATCH "c11 is unused" + c9 // want `c9` + c10 // want `c10` + c11 // want `c11` ) var _ = []int{c3: 1} @@ -31,5 +31,5 @@ func init() { } func Fn() { - const X = 1 // MATCH "X is unused" + const X = 1 // want `X` } diff --git a/unused/testdata/src/conversion/conversion.go b/unused/testdata/src/conversion/conversion.go index afeb1f7a1..0821c67da 100644 --- a/unused/testdata/src/conversion/conversion.go +++ b/unused/testdata/src/conversion/conversion.go @@ -17,12 +17,12 @@ type t2 struct { type t3 struct { a int - b int // MATCH /b is unused/ + b int // want `b` } type t4 struct { a int - b int // MATCH /b is unused/ + b int // want `b` } type t5 struct { diff --git a/unused/testdata/src/cyclic/cyclic.go b/unused/testdata/src/cyclic/cyclic.go index 8601c24f2..b9dfc952d 100644 --- a/unused/testdata/src/cyclic/cyclic.go +++ b/unused/testdata/src/cyclic/cyclic.go @@ -1,9 +1,9 @@ package pkg -func a() { // MATCH /a is unused/ +func a() { // want `a` b() } -func b() { // MATCH /b is unused/ +func b() { // want `b` a() } diff --git a/unused/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go index b45b3fc72..03fb8dd2f 100644 --- a/unused/testdata/src/embedding/embedding.go +++ b/unused/testdata/src/embedding/embedding.go @@ -27,8 +27,8 @@ type I2 interface { type t3 struct{} type t4 struct { - x int // MATCH /x is unused/ - y int // MATCH /y is unused/ + x int // want `x` + y int // want `y` t3 } diff --git a/unused/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go index feb5ea022..401acf4f1 100644 --- a/unused/testdata/src/fields/fields.go +++ b/unused/testdata/src/fields/fields.go @@ -23,9 +23,8 @@ type a1 [1]t14 type t15 struct{ f151 int } type a2 [1]t15 type t16 struct{ f161 int } -type t17 struct{ f171, f172 int } // MATCH /t17 is unused/ -// MATCH:28 /f183 is unused/ -type t18 struct{ f181, f182, f183 int } // MATCH /f182 is unused/ +type t17 struct{ f171, f172 int } // want `t17` +type t18 struct{ f181, f182, f183 int } // want `f182` `f183` type t19 struct{ f191 int } type m2 map[string]t19 @@ -33,7 +32,7 @@ type m2 map[string]t19 type t20 struct{ f201 int } type m3 map[string]t20 -type t21 struct{ f211, f212 int } // MATCH /f211 is unused/ +type t21 struct{ f211, f212 int } // want `f211` func foo() { _ = t10{1} @@ -58,7 +57,7 @@ func foo() { _ = a1{{1}} _ = a2{0: {1}} _ = map[[1]t16]int{{{1}}: 1} - y := struct{ x int }{} // MATCH /x is unused/ + y := struct{ x int }{} // want `x` _ = y _ = t18{f181: 1} _ = []m2{{"a": {1}}} diff --git a/unused/testdata/src/functions/functions.go b/unused/testdata/src/functions/functions.go index 8434e8587..cb74a895f 100644 --- a/unused/testdata/src/functions/functions.go +++ b/unused/testdata/src/functions/functions.go @@ -11,18 +11,18 @@ func main() { _ = st() } -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` type t2 struct{} type t3 struct{} -func fn1() t1 { return t1{} } // MATCH /fn1 is unused/ +func fn1() t1 { return t1{} } // want `fn1` func fn2() (x t2) { return } func fn3() *t3 { return nil } func fn4() { const x = 1 - const y = 2 // MATCH /y is unused/ - type foo int // MATCH /foo is unused/ + const y = 2 // want `y` + type foo int // want `foo` type bar int _ = x diff --git a/unused/testdata/src/generated1/generated1.go b/unused/testdata/src/generated1/generated1.go deleted file mode 100644 index 1a8ca55f6..000000000 --- a/unused/testdata/src/generated1/generated1.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a clever monkey; DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/generated2/generated2.go b/unused/testdata/src/generated2/generated2.go deleted file mode 100644 index 17d736ee1..000000000 --- a/unused/testdata/src/generated2/generated2.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a bunch of monkeys with typewriters and RSI, DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/interfaces/interfaces.go b/unused/testdata/src/interfaces/interfaces.go index cb507a07e..59b1be73e 100644 --- a/unused/testdata/src/interfaces/interfaces.go +++ b/unused/testdata/src/interfaces/interfaces.go @@ -7,7 +7,7 @@ type I interface { type t struct{} func (t) fn1() {} -func (t) fn2() {} // MATCH /fn2 is unused/ +func (t) fn2() {} // want `fn2` func init() { _ = t{} diff --git a/unused/testdata/src/linkname/linkname.go b/unused/testdata/src/linkname/linkname.go index 1c43c35da..1423a2148 100644 --- a/unused/testdata/src/linkname/linkname.go +++ b/unused/testdata/src/linkname/linkname.go @@ -12,7 +12,7 @@ func foo() {} var bar int var ( - baz int // MATCH "baz is unused" + baz int // want `baz` //go:linkname qux other3 qux int ) diff --git a/unused/testdata/src/main/main.go b/unused/testdata/src/main/main.go index ab000fc79..ae5c913ae 100644 --- a/unused/testdata/src/main/main.go +++ b/unused/testdata/src/main/main.go @@ -1,13 +1,13 @@ package main func Fn1() {} -func Fn2() {} // MATCH /Fn2 is unused/ +func Fn2() {} // want `Fn2` -const X = 1 // MATCH /X is unused/ +const X = 1 // want `X` -var Y = 2 // MATCH /Y is unused/ +var Y = 2 // want `Y` -type Z struct{} // MATCH /Z is unused/ +type Z struct{} // want `Z` func main() { Fn1() diff --git a/unused/testdata/src/methods/methods.go b/unused/testdata/src/methods/methods.go index 17673addd..0eaf6ee7f 100644 --- a/unused/testdata/src/methods/methods.go +++ b/unused/testdata/src/methods/methods.go @@ -6,7 +6,7 @@ type t3 struct{} func (t1) Foo() {} func (t3) Foo() {} -func (t3) foo() {} // MATCH /foo is unused/ +func (t3) foo() {} // want `foo` func init() { _ = t1{} diff --git a/unused/testdata/src/nested/nested.go b/unused/testdata/src/nested/nested.go index 518172830..7e108a28c 100644 --- a/unused/testdata/src/nested/nested.go +++ b/unused/testdata/src/nested/nested.go @@ -1,10 +1,10 @@ package pkg -type t struct{} // MATCH /t is unused/ +type t struct{} // want `t` func (t) fragment() {} -func fn() bool { // MATCH /fn is unused/ +func fn() bool { // want `fn` var v interface{} = t{} switch obj := v.(type) { case interface { diff --git a/unused/testdata/src/nocopy-main/nocopy-main.go b/unused/testdata/src/nocopy-main/nocopy-main.go index 4fefb5071..369a5d503 100644 --- a/unused/testdata/src/nocopy-main/nocopy-main.go +++ b/unused/testdata/src/nocopy-main/nocopy-main.go @@ -2,8 +2,8 @@ package main type myNoCopy1 struct{} type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" +type locker struct{} // want `locker` +type someStruct struct{ x int } // want `someStruct` func (myNoCopy1) Lock() {} func (recv myNoCopy2) Lock() {} @@ -14,9 +14,9 @@ func (someStruct) Lock() {} type T struct { noCopy1 myNoCopy1 noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" + field1 someStruct // want `field1` + field2 locker // want `field2` + field3 int // want `field3` } func main() { diff --git a/unused/testdata/src/nocopy/nocopy.go b/unused/testdata/src/nocopy/nocopy.go index 156edf50c..98e46d4eb 100644 --- a/unused/testdata/src/nocopy/nocopy.go +++ b/unused/testdata/src/nocopy/nocopy.go @@ -2,8 +2,8 @@ package bar type myNoCopy1 struct{} type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" +type locker struct{} // want `locker` +type someStruct struct{ x int } // want `someStruct` func (myNoCopy1) Lock() {} func (recv myNoCopy2) Lock() {} @@ -14,7 +14,7 @@ func (someStruct) Lock() {} type T struct { noCopy1 myNoCopy1 noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" + field1 someStruct // want `field1` + field2 locker // want `field2` + field3 int // want `field3` } diff --git a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go index 648b7d6f0..fb577f97c 100644 --- a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go +++ b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go @@ -6,7 +6,7 @@ func init() { } type T0 struct { - m int // MATCH /m is unused/ + m int // want `m` n int } diff --git a/unused/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go index dbdf47d1e..42cca0dfd 100644 --- a/unused/testdata/src/quiet/quiet.go +++ b/unused/testdata/src/quiet/quiet.go @@ -1,21 +1,21 @@ package pkg -type iface interface { // MATCH "type iface is unused" +type iface interface { // want `iface` foo() } -type t1 struct{} // MATCH "type t1 is unused" +type t1 struct{} // want `t1` func (t1) foo() {} type t2 struct{} -func (t t2) bar(arg int) (ret int) { return 0 } // MATCH "func t2.bar is unused" +func (t t2) bar(arg int) (ret int) { return 0 } // want `bar` func init() { _ = t2{} } -type t3 struct { // MATCH "type t3 is unused" +type t3 struct { // want `t3` a int b int } diff --git a/unused/testdata/src/unused_type/unused_type.go b/unused/testdata/src/unused_type/unused_type.go index eabfce4b5..0881ffe61 100644 --- a/unused/testdata/src/unused_type/unused_type.go +++ b/unused/testdata/src/unused_type/unused_type.go @@ -1,6 +1,6 @@ package pkg -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` func (t1) Fn() {} @@ -12,6 +12,6 @@ func init() { (*t2).Fn(nil) } -type t3 struct{} // MATCH /t3 is unused/ +type t3 struct{} // want `t3` func (t3) fn() diff --git a/unused/unused.go b/unused/unused.go index f69bddae8..fdecc743a 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -9,7 +9,9 @@ import ( "strings" "sync" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" @@ -131,28 +133,6 @@ func assert(b bool) { } } -type Checker struct { - WholeProgram bool - Debug io.Writer - - interfaces []*types.Interface - initialPackages []*lint.Pkg - scopes map[*types.Scope]*ssa.Function - - seenMu sync.Mutex - seen map[token.Position]struct{} - out []types.Object -} - -func (*Checker) Name() string { return "unused" } -func (*Checker) Prefix() string { return "U" } - -func (l *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, - } -} - func typString(obj types.Object) string { switch obj := obj.(type) { case *types.Func: @@ -413,77 +393,135 @@ var runtimeFuncs = map[string]bool{ "write": true, } -func (c *Checker) Init(prog *lint.Program) { - for _, pkg := range prog.AllPackages { - c.interfaces = append(c.interfaces, interfacesFromExportData(pkg.Types)...) +type pkg struct { + Fset *token.FileSet + Files []*ast.File + Pkg *types.Package + TypesInfo *types.Info + TypesSizes types.Sizes + SSA *ssa.Package + SrcFuncs []*ssa.Function +} + +type Checker struct { + mu sync.Mutex + + WholeProgram bool + Debug io.Writer + + initialPackages map[*types.Package]struct{} + allPackages map[*types.Package]struct{} + + seenMu sync.Mutex + seen map[token.Position]struct{} + + graph *Graph + out []types.Object + fset *token.FileSet +} + +func NewChecker() *Checker { + c := &Checker{ + seen: map[token.Position]struct{}{}, + initialPackages: map[*types.Package]struct{}{}, } - c.initialPackages = prog.InitialPackages - c.seen = map[token.Position]struct{}{} - c.scopes = map[*types.Scope]*ssa.Function{} - for _, pkg := range prog.InitialPackages { - for _, fn := range pkg.InitialFunctions { - if fn.Object() != nil { - scope := fn.Object().(*types.Func).Scope() - c.scopes[scope] = fn - } + return c +} + +func (c *Checker) Analyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "U1000", + Doc: "Unused code", + Run: c.Run, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + } +} + +func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { + c.mu.Lock() + defer c.mu.Unlock() + + var visit func(pkg *types.Package) + visit = func(pkg *types.Package) { + if _, ok := c.allPackages[pkg]; ok { + return + } + c.allPackages[pkg] = struct{}{} + for _, imp := range pkg.Imports() { + visit(imp) } } - // This is a hack to work in the confines of "one package per - // job". We do all the actual work in the Init function, and only - // report results in the actual checker function. - var out []types.Object + c.fset = pass.Fset + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + pkg := &pkg{ + Fset: pass.Fset, + Files: pass.Files, + Pkg: pass.Pkg, + TypesInfo: pass.TypesInfo, + TypesSizes: pass.TypesSizes, + SSA: ssapkg.Pkg, + SrcFuncs: ssapkg.SrcFuncs, + } + + c.initialPackages[pkg.Pkg] = struct{}{} + if c.WholeProgram { // (e1) all packages share a single graph - out = c.processPkgs(prog.InitialPackages...) - } else { - var wg sync.WaitGroup - var mu sync.Mutex - for _, pkg := range prog.InitialPackages { - pkg := pkg - wg.Add(1) - go func() { - res := c.processPkgs(pkg) - mu.Lock() - out = append(out, res...) - mu.Unlock() - wg.Done() - }() + if c.graph == nil { + c.graph = NewGraph() + c.graph.wholeProgram = true } - wg.Wait() + c.processPkg(pkg) + } else { + c.graph = NewGraph() + c.graph.wholeProgram = false + + c.processPkg(pkg) + c.out = append(c.out, c.results()...) } - out2 := make([]types.Object, 0, len(out)) - for _, v := range out { - if _, ok := c.seen[prog.Fset().Position(v.Pos())]; !ok { - out2 = append(out2, v) + + return nil, nil +} + +func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Problem { + name := obj.Name() + if sig, ok := obj.Type().(*types.Signature); ok && sig.Recv() != nil { + switch sig.Recv().Type().(type) { + case *types.Named, *types.Pointer: + typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) + if len(typ) > 0 && typ[0] == '*' { + name = fmt.Sprintf("(%s).%s", typ, obj.Name()) + } else if len(typ) > 0 { + name = fmt.Sprintf("%s.%s", typ, obj.Name()) + } } } - c.out = out2 + + return lint.Problem{ + Pos: lint.DisplayPosition(fset, obj.Pos()), + Message: fmt.Sprintf("%s %s is unused", typString(obj), name), + Check: "U1000", + } } -func (c *Checker) Lint(j *lint.Job) { - // The actual work is being done in Init. We only report existing - // results here. - unused := c.out - for _, u := range unused { - if u.Pkg() != j.Pkg.Types { +func (c *Checker) Result() []types.Object { + if c.WholeProgram { + c.out = c.results() + } + + out2 := make([]types.Object, 0, len(c.out)) + for _, v := range c.out { + if _, ok := c.initialPackages[v.Pkg()]; !ok { continue } - name := u.Name() - if sig, ok := u.Type().(*types.Signature); ok && sig.Recv() != nil { - switch sig.Recv().Type().(type) { - case *types.Named, *types.Pointer: - typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) - if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Name()) - } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Name()) - } - } + position := c.fset.PositionFor(v.Pos(), false) + if _, ok := c.seen[position]; !ok { + out2 = append(out2, v) } - j.Errorf(u, "%s %s is unused", typString(u), name) } + return out2 } func (c *Checker) debugf(f string, v ...interface{}) { @@ -537,27 +575,15 @@ func (graph *Graph) quieten(node *Node) { } } -func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { - graph := NewGraph() - graph.wholeProgram = c.WholeProgram - graph.scopes = c.scopes - graph.initialPackages = c.initialPackages - +func (c *Checker) results() []types.Object { var out []types.Object - for _, pkg := range pkgs { - if pkg.PkgPath == "unsafe" { - continue - } - graph.entry(pkg) - } - if c.WholeProgram { var ifaces []*types.Interface var notIfaces []types.Type // implement as many interfaces as possible - graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: ifaces = append(ifaces, t) @@ -568,17 +594,18 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } }) - // OPT(dh): this is not terribly efficient - ifaces = append(ifaces, c.interfaces...) + for pkg := range c.allPackages { + ifaces = append(ifaces, interfacesFromExportData(pkg)...) + } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { - ms := graph.msCache.MethodSet(t) + ms := c.graph.msCache.MethodSet(t) for _, iface := range ifaces { - if sels, ok := graph.implements(t, iface, ms); ok { + if sels, ok := c.graph.implements(t, iface, ms); ok { for _, sel := range sels { - graph.useMethod(t, sel, t, "implements") + c.graph.useMethod(t, sel, t, "implements") } } } @@ -600,27 +627,27 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } c.debugf("digraph{\n") - debugNode(graph.Root) - for _, node := range graph.Nodes { + debugNode(c.graph.Root) + for _, node := range c.graph.Nodes { debugNode(node) } - graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) c.debugf("}\n") } - graph.color(graph.Root) + c.graph.color(c.graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, // don't flag its receiver. if a named type is unused, don't // flag its methods. - for _, node := range graph.Nodes { - graph.quieten(node) + for _, node := range c.graph.Nodes { + c.graph.quieten(node) } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - graph.quieten(value.(*Node)) + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.quieten(value.(*Node)) }) report := func(node *Node) { @@ -634,8 +661,9 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } if pos != 0 { + position := c.fset.PositionFor(pos, false) c.seenMu.Lock() - c.seen[pkgs[0].Fset.Position(pos)] = struct{}{} + c.seen[position] = struct{}{} c.seenMu.Unlock() } return @@ -645,39 +673,6 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { return } - type packager1 interface { - Pkg() *types.Package - } - type packager2 interface { - Package() *ssa.Package - } - - // do not report objects from packages we aren't checking. - checkPkg: - switch obj := node.obj.(type) { - case packager1: - for _, pkg := range pkgs { - if pkg.Types == obj.Pkg() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) - return - case packager2: - // This happens to filter $bound and $thunk, which - // should be fine, since we wouldn't want to report - // them, anyway. Remember that this filtering is only - // for the output, it doesn't affect the reachability - // of nodes in the graph. - for _, pkg := range pkgs { - if pkg.SSA == obj.Package() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) - return - } - c.debugf("n%d [color=red];\n", node.id) switch obj := node.obj.(type) { case *types.Var: @@ -703,20 +698,26 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { c.debugf("n%d [color=gray];\n", node.id) } } - for _, node := range graph.Nodes { + for _, node := range c.graph.Nodes { report(node) } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) return out } +func (c *Checker) processPkg(pkg *pkg) { + if pkg.Pkg.Path() == "unsafe" { + return + } + c.graph.entry(pkg) +} + type Graph struct { pkg *ssa.Package msCache typeutil.MethodSetCache - scopes map[*types.Scope]*ssa.Function wholeProgram bool @@ -728,8 +729,6 @@ type Graph struct { seenTypes typeutil.Map seenFns map[*ssa.Function]struct{} - - initialPackages []*lint.Pkg } func NewGraph() *Graph { @@ -877,12 +876,7 @@ func isIrrelevant(obj interface{}) bool { func (g *Graph) isInterestingPackage(pkg *types.Package) bool { if g.wholeProgram { - for _, opkg := range g.initialPackages { - if opkg.Types == pkg { - return true - } - } - return false + return true } return pkg == g.pkg.Pkg } @@ -968,11 +962,19 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { return true } -func (g *Graph) entry(pkg *lint.Pkg) { +func (g *Graph) entry(pkg *pkg) { // TODO rename Entry g.pkg = pkg.SSA - for _, f := range pkg.Syntax { + scopes := map[*types.Scope]*ssa.Function{} + for _, fn := range pkg.SrcFuncs { + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + scopes[scope] = fn + } + } + + for _, f := range pkg.Files { for _, cg := range f.Comments { for _, c := range cg.List { if strings.HasPrefix(c.Text, "//go:linkname ") { @@ -1006,7 +1008,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() for scope != nil { - if fn := g.scopes[scope]; fn != nil { + if fn := scopes[scope]; fn != nil { return fn } scope = scope.Parent() @@ -1037,7 +1039,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { // Find constants being used inside functions, find sinks in tests handledConsts := map[*ast.Ident]struct{}{} - for _, fn := range pkg.InitialFunctions { + for _, fn := range pkg.SrcFuncs { g.see(fn) node := fn.Syntax() if node == nil { @@ -1094,82 +1096,83 @@ func (g *Graph) entry(pkg *lint.Pkg) { } var fn *ssa.Function - pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)}, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) - if fn != nil { - g.see(fn) - } - case *ast.GenDecl: - switch n.Tok { - case token.CONST: - groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) - for _, specs := range groups { - if len(specs) > 1 { - cg := &ConstGroup{} - g.see(cg) - for _, spec := range specs { - for _, name := range spec.(*ast.ValueSpec).Names { - obj := pkg.TypesInfo.ObjectOf(name) - // (10.1) const groups - g.seeAndUse(obj, cg, "const group") - g.use(cg, obj, "const group") + for _, f := range pkg.Files { + ast.Inspect(f, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncDecl: + fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) + if fn != nil { + g.see(fn) + } + case *ast.GenDecl: + switch n.Tok { + case token.CONST: + groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) + for _, specs := range groups { + if len(specs) > 1 { + cg := &ConstGroup{} + g.see(cg) + for _, spec := range specs { + for _, name := range spec.(*ast.ValueSpec).Names { + obj := pkg.TypesInfo.ObjectOf(name) + // (10.1) const groups + g.seeAndUse(obj, cg, "const group") + g.use(cg, obj, "const group") + } } } } - } - case token.VAR: - for _, spec := range n.Specs { - v := spec.(*ast.ValueSpec) - for _, name := range v.Names { - T := pkg.TypesInfo.TypeOf(name) - if fn != nil { - g.seeAndUse(T, fn, "var decl") - } else { - g.seeAndUse(T, nil, "var decl") + case token.VAR: + for _, spec := range n.Specs { + v := spec.(*ast.ValueSpec) + for _, name := range v.Names { + T := pkg.TypesInfo.TypeOf(name) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) } - g.typ(T) } - } - case token.TYPE: - for _, spec := range n.Specs { - // go/types doesn't provide a way to go from a - // types.Named to the named type it was based on - // (the t1 in type t2 t1). Therefore we walk the - // AST and process GenDecls. - // - // (2.2) named types use the type they're based on - v := spec.(*ast.TypeSpec) - T := pkg.TypesInfo.TypeOf(v.Type) - obj := pkg.TypesInfo.ObjectOf(v.Name) - g.see(obj) - g.see(T) - g.use(T, obj, "type") - g.typ(obj.Type()) - g.typ(T) - - if v.Assign != 0 { - aliasFor := obj.(*types.TypeName).Type() - // (2.3) named types use all their aliases. we can't easily track uses of aliases - if isIrrelevant(aliasFor) { - // We do not track the type this is an - // alias for (for example builtins), so - // just mark the alias used. - // - // FIXME(dh): what about aliases declared inside functions? - g.use(obj, nil, "alias") - } else { - g.see(aliasFor) - g.seeAndUse(obj, aliasFor, "alias") + case token.TYPE: + for _, spec := range n.Specs { + // go/types doesn't provide a way to go from a + // types.Named to the named type it was based on + // (the t1 in type t2 t1). Therefore we walk the + // AST and process GenDecls. + // + // (2.2) named types use the type they're based on + v := spec.(*ast.TypeSpec) + T := pkg.TypesInfo.TypeOf(v.Type) + obj := pkg.TypesInfo.ObjectOf(v.Name) + g.see(obj) + g.see(T) + g.use(T, obj, "type") + g.typ(obj.Type()) + g.typ(T) + + if v.Assign != 0 { + aliasFor := obj.(*types.TypeName).Type() + // (2.3) named types use all their aliases. we can't easily track uses of aliases + if isIrrelevant(aliasFor) { + // We do not track the type this is an + // alias for (for example builtins), so + // just mark the alias used. + // + // FIXME(dh): what about aliases declared inside functions? + g.use(obj, nil, "alias") + } else { + g.see(aliasFor) + g.seeAndUse(obj, aliasFor, "alias") + } } } } } - default: - panic(fmt.Sprintf("unreachable: %T", n)) - } - }) + return true + }) + } for _, m := range g.pkg.Members { switch m := m.(type) { diff --git a/unused/unused_test.go b/unused/unused_test.go index 73c9c0cf1..5a023ddfe 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -1,18 +1,198 @@ package unused -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found at -// https://developers.google.com/open-source/licenses/bsd. - import ( + "fmt" + "go/parser" + "go/token" + "go/types" + "os" + "sort" + "strconv" + "strings" "testing" + "text/scanner" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/packages" + "honnef.co/go/tools/lint" ) +// parseExpectations parses the content of a "// want ..." comment +// and returns the expections, a mixture of diagnostics ("rx") and +// facts (name:"rx"). +func parseExpectations(text string) ([]string, error) { + var scanErr string + sc := new(scanner.Scanner).Init(strings.NewReader(text)) + sc.Error = func(s *scanner.Scanner, msg string) { + scanErr = msg // e.g. bad string escape + } + sc.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanRawStrings + + scanRegexp := func(tok rune) (string, error) { + if tok != scanner.String && tok != scanner.RawString { + return "", fmt.Errorf("got %s, want regular expression", + scanner.TokenString(tok)) + } + pattern, _ := strconv.Unquote(sc.TokenText()) // can't fail + return pattern, nil + } + + var expects []string + for { + tok := sc.Scan() + switch tok { + case scanner.String, scanner.RawString: + rx, err := scanRegexp(tok) + if err != nil { + return nil, err + } + expects = append(expects, rx) + + case scanner.EOF: + if scanErr != "" { + return nil, fmt.Errorf("%s", scanErr) + } + return expects, nil + + default: + return nil, fmt.Errorf("unexpected %s", scanner.TokenString(tok)) + } + } + + return expects, nil +} + +func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { + type key struct { + file string + line int + } + + files := map[string]struct{}{} + for _, d := range diagnostics { + files[fset.Position(d.Pos()).Filename] = struct{}{} + } + + want := make(map[key][]string) + + // processComment parses expectations out of comments. + processComment := func(filename string, linenum int, text string) { + text = strings.TrimSpace(text) + + // Any comment starting with "want" is treated + // as an expectation, even without following whitespace. + if rest := strings.TrimPrefix(text, "want"); rest != text { + expects, err := parseExpectations(rest) + if err != nil { + t.Errorf("%s:%d: in 'want' comment: %s", filename, linenum, err) + return + } + if expects != nil { + want[key{filename, linenum}] = expects + } + } + } + + // Extract 'want' comments from Go files. + fset2 := token.NewFileSet() + for f := range files { + af, err := parser.ParseFile(fset2, f, nil, parser.ParseComments) + if err != nil { + t.Fatal(err) + } + for _, cgroup := range af.Comments { + for _, c := range cgroup.List { + + text := strings.TrimPrefix(c.Text, "//") + if text == c.Text { + continue // not a //-comment + } + + // Hack: treat a comment of the form "//...// want..." + // as if it starts at 'want'. + // This allows us to add comments on comments, + // as required when testing the buildtag analyzer. + if i := strings.Index(text, "// want"); i >= 0 { + text = text[i+len("// "):] + } + + // It's tempting to compute the filename + // once outside the loop, but it's + // incorrect because it can change due + // to //line directives. + posn := fset2.Position(c.Pos()) + processComment(posn.Filename, posn.Line, text) + } + } + } + + checkMessage := func(posn token.Position, name, message string) { + k := key{posn.Filename, posn.Line} + expects := want[k] + var unmatched []string + for i, exp := range expects { + if exp == message { + // matched: remove the expectation. + expects[i] = expects[len(expects)-1] + expects = expects[:len(expects)-1] + want[k] = expects + return + } + unmatched = append(unmatched, fmt.Sprintf("%q", exp)) + } + if unmatched == nil { + t.Errorf("%v: unexpected: %v", posn, message) + } else { + t.Errorf("%v: %q does not match pattern %s", + posn, message, strings.Join(unmatched, " or ")) + } + } + + // Check the diagnostics match expectations. + for _, f := range diagnostics { + posn := fset.Position(f.Pos()) + checkMessage(posn, "", f.Name()) + } + + // Reject surplus expectations. + // + // Sometimes an Analyzer reports two similar diagnostics on a + // line with only one expectation. The reader may be confused by + // the error message. + // TODO(adonovan): print a better error: + // "got 2 diagnostics here; each one needs its own expectation". + var surplus []string + for key, expects := range want { + for _, exp := range expects { + err := fmt.Sprintf("%s:%d: no diagnostic was reported matching %q", key.file, key.line, exp) + surplus = append(surplus, err) + } + } + sort.Strings(surplus) + for _, err := range surplus { + t.Errorf("%s", err) + } +} + func TestAll(t *testing.T) { - c := &Checker{} - testutil.TestAll(t, c, "") + c := NewChecker() + r, err := lint.NewRunner() + if err != nil { + t.Fatal(err) + } + + dir := analysistest.TestData() + cfg := &packages.Config{ + Dir: dir, + Tests: true, + Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"), + } + pkgs, err := r.Run(cfg, []string{"./..."}, []*analysis.Analyzer{c.Analyzer()}) + if err != nil { + t.Fatal(err) + } + + res := c.Result() + check(t, pkgs[0].Fset, res) }