diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..1c219fd --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,19 @@ +run: + deadline: 1h + tests: true + +linters: + enable: + - gocyclo + - gofmt + - golint + +linters-settings: + gocyclo: + min-complexity: 11 + + gofmt: + simplify: true + + golint: + min-confidence: 0.25 # ignore missing package comment false positive diff --git a/.travis.yml b/.travis.yml index 5b2f0f8..5140927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,24 +9,10 @@ install: true notifications: email: false -# install code checkers +# install linter before_script: - - cd vendor/golang.org/x/lint/golint - - go install - - cd - - - cd vendor/honnef.co/go/tools/cmd/megacheck - - go install - - cd - - - cd vendor/github.com/fzipp/gocyclo - - go install - - cd - + - go get -u github.com/golangci/golangci-lint/cmd/golangci-lint script: - - GO_FILES=$(find . -iname '*.go' -type f | grep -v /vendor/) - - test -z $(gofmt -s -l ${GO_FILES}) + - golangci-lint --concurrency $(nproc) run - go test -v -race ./... - - go vet ./... - - megacheck ./... - # golint ./... doesn't ignore vendor/ for some dumb reason - - golint -set_exit_status $(go list ./... | grep -v /vendor/) - - gocyclo -over 10 ${GO_FILES} diff --git a/Gopkg.lock b/Gopkg.lock index 9f4817e..50a80cf 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -2,79 +2,20 @@ [[projects]] - branch = "master" - name = "github.com/fzipp/gocyclo" - packages = ["."] - revision = "6acd4345c835499920e8426c7e4e8d7a34f1bb83" - -[[projects]] - name = "github.com/kisielk/gotool" - packages = [ - ".", - "internal/load" - ] - revision = "80517062f582ea3340cd4baf70e86d539ae7d84d" - version = "v1.0.0" - -[[projects]] - branch = "master" name = "github.com/kr/pretty" packages = ["."] - revision = "cfb55aafdaf3ec08f0db22699ab822c50091b1c4" + revision = "73f6ac0b30a98e433b289500d779f50c1a6f0712" + version = "v0.1.0" [[projects]] - branch = "master" name = "github.com/kr/text" packages = ["."] - revision = "7cafcd837844e784b526369c9bce262804aebc60" - -[[projects]] - branch = "master" - name = "golang.org/x/lint" - packages = [ - ".", - "golint" - ] - revision = "85993ffd0a6cd043291f3f63d45d656d97b165bd" - -[[projects]] - branch = "master" - name = "golang.org/x/tools" - packages = [ - "go/ast/astutil", - "go/buildutil", - "go/gcexportdata", - "go/gcimporter15", - "go/loader", - "go/types/typeutil" - ] - revision = "4c0f0e48a635ba08a80fbebdb19258a0f675a910" - -[[projects]] - name = "honnef.co/go/tools" - packages = [ - "callgraph", - "callgraph/static", - "cmd/megacheck", - "deprecated", - "functions", - "internal/sharedcheck", - "lint", - "lint/lintutil", - "simple", - "ssa", - "ssa/ssautil", - "staticcheck", - "staticcheck/vrp", - "unused", - "version" - ] - revision = "d73ab98e7c39fdcf9ba65062e43d34310f198353" - version = "2017.2.2" + revision = "e2ffdb16a802fe2bb95e2e35ff34f0e53aeef34f" + version = "v0.1.0" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "f9817690650739b7e8cc7c487924fbe7043fc818348569fdb9050e440c080287" + inputs-digest = "442f9b2f90f1fa3f24bd516e566ff7931c21438139f0077ad4f6d79fe1fdace6" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index bb8fc20..54c171e 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -1,19 +1,8 @@ -required = [ - "github.com/fzipp/gocyclo", - "golang.org/x/lint", - "golang.org/x/lint/golint", - "honnef.co/go/tools/cmd/megacheck", -] - -[[constraint]] -name = "github.com/fzipp/gocyclo" -branch = "master" - [[constraint]] -name = "golang.org/x/lint" -branch = "master" + name = "github.com/kr/pretty" + version = "^0.1.0" [prune] -go-tests = true -non-go = true -unused-packages = true + go-tests = true + non-go = true + unused-packages = true diff --git a/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS b/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS deleted file mode 100644 index 1c09f1a..0000000 --- a/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS +++ /dev/null @@ -1,7 +0,0 @@ -# Names should be added to this file like so: -# Name - -# Please keep the list sorted. - -Frederik Zipp -Harshavardhana diff --git a/vendor/github.com/fzipp/gocyclo/LICENSE b/vendor/github.com/fzipp/gocyclo/LICENSE deleted file mode 100644 index 45f88d6..0000000 --- a/vendor/github.com/fzipp/gocyclo/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 Frederik Zipp. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of the copyright owner nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fzipp/gocyclo/gocyclo.go b/vendor/github.com/fzipp/gocyclo/gocyclo.go deleted file mode 100644 index 00ab50d..0000000 --- a/vendor/github.com/fzipp/gocyclo/gocyclo.go +++ /dev/null @@ -1,225 +0,0 @@ -// Copyright 2013 Frederik Zipp. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Gocyclo calculates the cyclomatic complexities of functions and -// methods in Go source code. -// -// Usage: -// gocyclo [ ...] ... -// -// Flags: -// -over N show functions with complexity > N only and -// return exit code 1 if the output is non-empty -// -top N show the top N most complex functions only -// -avg show the average complexity -// -// The output fields for each line are: -// -package main - -import ( - "flag" - "fmt" - "go/ast" - "go/parser" - "go/token" - "io" - "log" - "os" - "path/filepath" - "sort" - "strings" -) - -const usageDoc = `Calculate cyclomatic complexities of Go functions. -Usage: - gocyclo [flags] ... - -Flags: - -over N show functions with complexity > N only and - return exit code 1 if the set is non-empty - -top N show the top N most complex functions only - -avg show the average complexity over all functions, - not depending on whether -over or -top are set - -The output fields for each line are: - -` - -func usage() { - fmt.Fprintf(os.Stderr, usageDoc) - os.Exit(2) -} - -var ( - over = flag.Int("over", 0, "show functions with complexity > N only") - top = flag.Int("top", -1, "show the top N most complex functions only") - avg = flag.Bool("avg", false, "show the average complexity") -) - -func main() { - log.SetFlags(0) - log.SetPrefix("gocyclo: ") - flag.Usage = usage - flag.Parse() - args := flag.Args() - if len(args) == 0 { - usage() - } - - stats := analyze(args) - sort.Sort(byComplexity(stats)) - written := writeStats(os.Stdout, stats) - - if *avg { - showAverage(stats) - } - - if *over > 0 && written > 0 { - os.Exit(1) - } -} - -func analyze(paths []string) []stat { - var stats []stat - for _, path := range paths { - if isDir(path) { - stats = analyzeDir(path, stats) - } else { - stats = analyzeFile(path, stats) - } - } - return stats -} - -func isDir(filename string) bool { - fi, err := os.Stat(filename) - return err == nil && fi.IsDir() -} - -func analyzeFile(fname string, stats []stat) []stat { - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, fname, nil, 0) - if err != nil { - log.Fatal(err) - } - return buildStats(f, fset, stats) -} - -func analyzeDir(dirname string, stats []stat) []stat { - filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error { - if err == nil && !info.IsDir() && strings.HasSuffix(path, ".go") { - stats = analyzeFile(path, stats) - } - return err - }) - return stats -} - -func writeStats(w io.Writer, sortedStats []stat) int { - for i, stat := range sortedStats { - if i == *top { - return i - } - if stat.Complexity <= *over { - return i - } - fmt.Fprintln(w, stat) - } - return len(sortedStats) -} - -func showAverage(stats []stat) { - fmt.Printf("Average: %.3g\n", average(stats)) -} - -func average(stats []stat) float64 { - total := 0 - for _, s := range stats { - total += s.Complexity - } - return float64(total) / float64(len(stats)) -} - -type stat struct { - PkgName string - FuncName string - Complexity int - Pos token.Position -} - -func (s stat) String() string { - return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos) -} - -type byComplexity []stat - -func (s byComplexity) Len() int { return len(s) } -func (s byComplexity) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s byComplexity) Less(i, j int) bool { - return s[i].Complexity >= s[j].Complexity -} - -func buildStats(f *ast.File, fset *token.FileSet, stats []stat) []stat { - for _, decl := range f.Decls { - if fn, ok := decl.(*ast.FuncDecl); ok { - stats = append(stats, stat{ - PkgName: f.Name.Name, - FuncName: funcName(fn), - Complexity: complexity(fn), - Pos: fset.Position(fn.Pos()), - }) - } - } - return stats -} - -// funcName returns the name representation of a function or method: -// "(Type).Name" for methods or simply "Name" for functions. -func funcName(fn *ast.FuncDecl) string { - if fn.Recv != nil { - if fn.Recv.NumFields() > 0 { - typ := fn.Recv.List[0].Type - return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) - } - } - return fn.Name.Name -} - -// recvString returns a string representation of recv of the -// form "T", "*T", or "BADRECV" (if not a proper receiver type). -func recvString(recv ast.Expr) string { - switch t := recv.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - return "*" + recvString(t.X) - } - return "BADRECV" -} - -// complexity calculates the cyclomatic complexity of a function. -func complexity(fn *ast.FuncDecl) int { - v := complexityVisitor{} - ast.Walk(&v, fn) - return v.Complexity -} - -type complexityVisitor struct { - // Complexity is the cyclomatic complexity - Complexity int -} - -// Visit implements the ast.Visitor interface. -func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { - switch n := n.(type) { - case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: - v.Complexity++ - case *ast.BinaryExpr: - if n.Op == token.LAND || n.Op == token.LOR { - v.Complexity++ - } - } - return v -} diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL deleted file mode 100644 index 72b859c..0000000 --- a/vendor/github.com/kisielk/gotool/LEGAL +++ /dev/null @@ -1,32 +0,0 @@ -All the files in this distribution are covered under either the MIT -license (see the file LICENSE) except some files mentioned below. - -match.go, match_test.go: - - Copyright (c) 2009 The Go Authors. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - * Neither the name of Google Inc. nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE deleted file mode 100644 index 1cbf651..0000000 --- a/vendor/github.com/kisielk/gotool/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 Kamil Kisiel - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go deleted file mode 100644 index 2dd9b3f..0000000 --- a/vendor/github.com/kisielk/gotool/go13.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build !go1.4 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg") - -func shouldIgnoreImport(p *build.Package) bool { - return true -} diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go deleted file mode 100644 index aa99a32..0000000 --- a/vendor/github.com/kisielk/gotool/go14-15.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build go1.4,!go1.6 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src") - -func shouldIgnoreImport(p *build.Package) bool { - return true -} diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go deleted file mode 100644 index f25cec1..0000000 --- a/vendor/github.com/kisielk/gotool/go16-18.go +++ /dev/null @@ -1,15 +0,0 @@ -// +build go1.6,!go1.9 - -package gotool - -import ( - "go/build" - "path/filepath" - "runtime" -) - -var gorootSrc = filepath.Join(runtime.GOROOT(), "src") - -func shouldIgnoreImport(p *build.Package) bool { - return p == nil || len(p.InvalidGoFiles) == 0 -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go deleted file mode 100644 index 74e15b9..0000000 --- a/vendor/github.com/kisielk/gotool/internal/load/path.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package load - -import ( - "strings" -) - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go deleted file mode 100644 index b937ede..0000000 --- a/vendor/github.com/kisielk/gotool/internal/load/pkg.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -// Package load loads packages. -package load - -import ( - "strings" -) - -// isStandardImportPath reports whether $GOROOT/src/path should be considered -// part of the standard distribution. For historical reasons we allow people to add -// their own code to $GOROOT instead of using $GOPATH, but we assume that -// code will start with a domain name (dot in the first element). -func isStandardImportPath(path string) bool { - i := strings.Index(path, "/") - if i < 0 { - i = len(path) - } - elem := path[:i] - return !strings.Contains(elem, ".") -} diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go deleted file mode 100644 index 17ed62d..0000000 --- a/vendor/github.com/kisielk/gotool/internal/load/search.go +++ /dev/null @@ -1,354 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package load - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "strings" -) - -// Context specifies values for operation of ImportPaths that would -// otherwise come from cmd/go/internal/cfg package. -// -// This is a construct added for gotool purposes and doesn't have -// an equivalent upstream in cmd/go. -type Context struct { - // BuildContext is the build context to use. - BuildContext build.Context - - // GOROOTsrc is the location of the src directory in GOROOT. - // At this time, it's used only in MatchPackages to skip - // GOOROOT/src entry from BuildContext.SrcDirs output. - GOROOTsrc string -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages), -// "cmd" (standard commands), or a path including "...". -func (c *Context) allPackages(pattern string) []string { - pkgs := c.MatchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func (c *Context) allPackagesInFS(pattern string) []string { - pkgs := c.MatchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// MatchPackages returns a list of package paths matching pattern -// (see go help packages for pattern syntax). -func (c *Context) MatchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if !IsMetaPackage(pattern) { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !c.BuildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - for _, src := range c.BuildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || path == src { - return nil - } - - want := true - // Avoid .foo, _foo, and testdata directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { - want = false - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - want = false - } - if !treeCanMatch(name) { - want = false - } - - if !fi.IsDir() { - if fi.Mode()&os.ModeSymlink != 0 && want { - if target, err := os.Stat(path); err == nil && target.IsDir() { - fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path) - } - } - return nil - } - if !want { - return filepath.SkipDir - } - - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - pkg, err := c.BuildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - - // If we are expanding "cmd", skip main - // packages under cmd/vendor. At least as of - // March, 2017, there is one there for the - // vendored pprof tool. - if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" { - return nil - } - - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// MatchPackagesInFS returns a list of package paths matching pattern, -// which must begin with ./ or ../ -// (see go help packages for pattern syntax). -func (c *Context) MatchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - - // We keep the directory if we can import it, or if we can't import it - // due to invalid Go source files. This means that directories containing - // parse errors will be built (and fail) instead of being silently skipped - // as not matching the pattern. Go 1.5 and earlier skipped, but that - // behavior means people miss serious mistakes. - // See golang.org/issue/11407. - if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -// Unfortunately, there are two special cases. Quoting "go help packages": -// -// First, /... at the end of the pattern can match an empty string, -// so that net/... matches both net and packages in its subdirectories, like net/http. -// Second, any slash-separted pattern element containing a wildcard never -// participates in a match of the "vendor" element in the path of a vendored -// package, so that ./... does not match packages in subdirectories of -// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. -// Note, however, that a directory named vendor that itself contains code -// is not a vendored package: cmd/vendor would be a command named vendor, -// and the pattern cmd/... matches it. -func matchPattern(pattern string) func(name string) bool { - // Convert pattern to regular expression. - // The strategy for the trailing /... is to nest it in an explicit ? expression. - // The strategy for the vendor exclusion is to change the unmatchable - // vendor strings to a disallowed code point (vendorChar) and to use - // "(anything but that codepoint)*" as the implementation of the ... wildcard. - // This is a bit complicated but the obvious alternative, - // namely a hand-written search like in most shell glob matchers, - // is too easy to make accidentally exponential. - // Using package regexp guarantees linear-time matching. - - const vendorChar = "\x00" - - if strings.Contains(pattern, vendorChar) { - return func(name string) bool { return false } - } - - re := regexp.QuoteMeta(pattern) - re = replaceVendor(re, vendorChar) - switch { - case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): - re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` - case re == vendorChar+`/\.\.\.`: - re = `(/vendor|/` + vendorChar + `/\.\.\.)` - case strings.HasSuffix(re, `/\.\.\.`): - re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` - } - re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1) - - reg := regexp.MustCompile(`^` + re + `$`) - - return func(name string) bool { - if strings.Contains(name, vendorChar) { - return false - } - return reg.MatchString(replaceVendor(name, vendorChar)) - } -} - -// replaceVendor returns the result of replacing -// non-trailing vendor path elements in x with repl. -func replaceVendor(x, repl string) string { - if !strings.Contains(x, "vendor") { - return x - } - elem := strings.Split(x, "/") - for i := 0; i < len(elem)-1; i++ { - if elem[i] == "vendor" { - elem[i] = repl - } - } - return strings.Join(elem, "/") -} - -// ImportPaths returns the import paths to use for the given command line. -func (c *Context) ImportPaths(args []string) []string { - args = c.ImportPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, c.allPackagesInFS(a)...) - } else { - out = append(out, c.allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// ImportPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func (c *Context) ImportPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if IsMetaPackage(a) { - out = append(out, c.allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// IsMetaPackage checks if name is a reserved package name that expands to multiple packages. -func IsMetaPackage(name string) bool { - return name == "std" || name == "cmd" || name == "all" -} diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go deleted file mode 100644 index 4dbdbff..0000000 --- a/vendor/github.com/kisielk/gotool/match.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright (c) 2009 The Go Authors. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build go1.9 - -package gotool - -import ( - "path/filepath" - - "github.com/kisielk/gotool/internal/load" -) - -// importPaths returns the import paths to use for the given command line. -func (c *Context) importPaths(args []string) []string { - lctx := load.Context{ - BuildContext: c.BuildContext, - GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"), - } - return lctx.ImportPaths(args) -} - -// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join. -// -// It's a copy of the unexported build.Context.joinPath helper. -func (c *Context) joinPath(elem ...string) string { - if f := c.BuildContext.JoinPath; f != nil { - return f(elem...) - } - return filepath.Join(elem...) -} diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go deleted file mode 100644 index 6d6b136..0000000 --- a/vendor/github.com/kisielk/gotool/match18.go +++ /dev/null @@ -1,317 +0,0 @@ -// Copyright (c) 2009 The Go Authors. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !go1.9 - -package gotool - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "strings" -) - -// This file contains code from the Go distribution. - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -func matchPattern(pattern string) func(name string) bool { - re := regexp.QuoteMeta(pattern) - re = strings.Replace(re, `\.\.\.`, `.*`, -1) - // Special case: foo/... matches foo too. - if strings.HasSuffix(re, `/.*`) { - re = re[:len(re)-len(`/.*`)] + `(/.*)?` - } - reg := regexp.MustCompile(`^` + re + `$`) - return reg.MatchString -} - -// matchPackages returns a list of package paths matching pattern -// (see go help packages for pattern syntax). -func (c *Context) matchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if !isMetaPackage(pattern) { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !c.BuildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - for _, src := range c.BuildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != gorootSrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == src { - return nil - } - - // Avoid .foo, _foo, and testdata directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - return filepath.SkipDir - } - if !treeCanMatch(name) { - return filepath.SkipDir - } - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = c.BuildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// importPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func (c *Context) importPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if isMetaPackage(a) { - out = append(out, c.allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// importPaths returns the import paths to use for the given command line. -func (c *Context) importPaths(args []string) []string { - args = c.importPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, c.allPackagesInFS(a)...) - } else { - out = append(out, c.allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages), -// "cmd" (standard commands), or a path including "...". -func (c *Context) allPackages(pattern string) []string { - pkgs := c.matchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func (c *Context) allPackagesInFS(pattern string) []string { - pkgs := c.matchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -// matchPackagesInFS returns a list of package paths matching pattern, -// which must begin with ./ or ../ -// (see go help packages for pattern syntax). -func (c *Context) matchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - - // We keep the directory if we can import it, or if we can't import it - // due to invalid Go source files. This means that directories containing - // parse errors will be built (and fail) instead of being silently skipped - // as not matching the pattern. Go 1.5 and earlier skipped, but that - // behavior means people miss serious mistakes. - // See golang.org/issue/11407. - if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} - -// isMetaPackage checks if name is a reserved package name that expands to multiple packages. -func isMetaPackage(name string) bool { - return name == "std" || name == "cmd" || name == "all" -} - -// isStandardImportPath reports whether $GOROOT/src/path should be considered -// part of the standard distribution. For historical reasons we allow people to add -// their own code to $GOROOT instead of using $GOPATH, but we assume that -// code will start with a domain name (dot in the first element). -func isStandardImportPath(path string) bool { - i := strings.Index(path, "/") - if i < 0 { - i = len(path) - } - elem := path[:i] - return !strings.Contains(elem, ".") -} - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go deleted file mode 100644 index c7409e1..0000000 --- a/vendor/github.com/kisielk/gotool/tool.go +++ /dev/null @@ -1,48 +0,0 @@ -// Package gotool contains utility functions used to implement the standard -// "cmd/go" tool, provided as a convenience to developers who want to write -// tools with similar semantics. -package gotool - -import "go/build" - -// Export functions here to make it easier to keep the implementations up to date with upstream. - -// DefaultContext is the default context that uses build.Default. -var DefaultContext = Context{ - BuildContext: build.Default, -} - -// A Context specifies the supporting context. -type Context struct { - // BuildContext is the build.Context that is used when computing import paths. - BuildContext build.Context -} - -// ImportPaths returns the import paths to use for the given command line. -// -// The path "all" is expanded to all packages in $GOPATH and $GOROOT. -// The path "std" is expanded to all packages in the Go standard library. -// The path "cmd" is expanded to all Go standard commands. -// The string "..." is treated as a wildcard within a path. -// When matching recursively, directories are ignored if they are prefixed with -// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". -// Relative import paths are not converted to full import paths. -// If args is empty, a single element "." is returned. -func (c *Context) ImportPaths(args []string) []string { - return c.importPaths(args) -} - -// ImportPaths returns the import paths to use for the given command line -// using default context. -// -// The path "all" is expanded to all packages in $GOPATH and $GOROOT. -// The path "std" is expanded to all packages in the Go standard library. -// The path "cmd" is expanded to all Go standard commands. -// The string "..." is treated as a wildcard within a path. -// When matching recursively, directories are ignored if they are prefixed with -// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". -// Relative import paths are not converted to full import paths. -// If args is empty, a single element "." is returned. -func ImportPaths(args []string) []string { - return DefaultContext.importPaths(args) -} diff --git a/vendor/golang.org/x/lint/LICENSE b/vendor/golang.org/x/lint/LICENSE deleted file mode 100644 index 65d761b..0000000 --- a/vendor/golang.org/x/lint/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2013 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/lint/golint/golint.go b/vendor/golang.org/x/lint/golint/golint.go deleted file mode 100644 index ac024b6..0000000 --- a/vendor/golang.org/x/lint/golint/golint.go +++ /dev/null @@ -1,159 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// golint lints the Go source files named on its command line. -package main - -import ( - "flag" - "fmt" - "go/build" - "io/ioutil" - "os" - "path/filepath" - "strings" - - "golang.org/x/lint" -) - -var ( - minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it") - setExitStatus = flag.Bool("set_exit_status", false, "set exit status to 1 if any issues are found") - suggestions int -) - -func usage() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) - fmt.Fprintf(os.Stderr, "\tgolint [flags] # runs on package in current directory\n") - fmt.Fprintf(os.Stderr, "\tgolint [flags] [packages]\n") - fmt.Fprintf(os.Stderr, "\tgolint [flags] [directories] # where a '/...' suffix includes all sub-directories\n") - fmt.Fprintf(os.Stderr, "\tgolint [flags] [files] # all must belong to a single package\n") - fmt.Fprintf(os.Stderr, "Flags:\n") - flag.PrintDefaults() -} - -func main() { - flag.Usage = usage - flag.Parse() - - if flag.NArg() == 0 { - lintDir(".") - } else { - // dirsRun, filesRun, and pkgsRun indicate whether golint is applied to - // directory, file or package targets. The distinction affects which - // checks are run. It is no valid to mix target types. - var dirsRun, filesRun, pkgsRun int - var args []string - for _, arg := range flag.Args() { - if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) { - dirsRun = 1 - for _, dirname := range allPackagesInFS(arg) { - args = append(args, dirname) - } - } else if isDir(arg) { - dirsRun = 1 - args = append(args, arg) - } else if exists(arg) { - filesRun = 1 - args = append(args, arg) - } else { - pkgsRun = 1 - args = append(args, arg) - } - } - - if dirsRun+filesRun+pkgsRun != 1 { - usage() - os.Exit(2) - } - switch { - case dirsRun == 1: - for _, dir := range args { - lintDir(dir) - } - case filesRun == 1: - lintFiles(args...) - case pkgsRun == 1: - for _, pkg := range importPaths(args) { - lintPackage(pkg) - } - } - } - - if *setExitStatus && suggestions > 0 { - fmt.Fprintf(os.Stderr, "Found %d lint suggestions; failing.\n", suggestions) - os.Exit(1) - } -} - -func isDir(filename string) bool { - fi, err := os.Stat(filename) - return err == nil && fi.IsDir() -} - -func exists(filename string) bool { - _, err := os.Stat(filename) - return err == nil -} - -func lintFiles(filenames ...string) { - files := make(map[string][]byte) - for _, filename := range filenames { - src, err := ioutil.ReadFile(filename) - if err != nil { - fmt.Fprintln(os.Stderr, err) - continue - } - files[filename] = src - } - - l := new(lint.Linter) - ps, err := l.LintFiles(files) - if err != nil { - fmt.Fprintf(os.Stderr, "%v\n", err) - return - } - for _, p := range ps { - if p.Confidence >= *minConfidence { - fmt.Printf("%v: %s\n", p.Position, p.Text) - suggestions++ - } - } -} - -func lintDir(dirname string) { - pkg, err := build.ImportDir(dirname, 0) - lintImportedPackage(pkg, err) -} - -func lintPackage(pkgname string) { - pkg, err := build.Import(pkgname, ".", 0) - lintImportedPackage(pkg, err) -} - -func lintImportedPackage(pkg *build.Package, err error) { - if err != nil { - if _, nogo := err.(*build.NoGoError); nogo { - // Don't complain if the failure is due to no Go source files. - return - } - fmt.Fprintln(os.Stderr, err) - return - } - - var files []string - files = append(files, pkg.GoFiles...) - files = append(files, pkg.CgoFiles...) - files = append(files, pkg.TestGoFiles...) - if pkg.Dir != "." { - for i, f := range files { - files[i] = filepath.Join(pkg.Dir, f) - } - } - // TODO(dsymonds): Do foo_test too (pkg.XTestGoFiles) - - lintFiles(files...) -} diff --git a/vendor/golang.org/x/lint/golint/import.go b/vendor/golang.org/x/lint/golint/import.go deleted file mode 100644 index 2ba9dea..0000000 --- a/vendor/golang.org/x/lint/golint/import.go +++ /dev/null @@ -1,309 +0,0 @@ -package main - -/* - -This file holds a direct copy of the import path matching code of -https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be -replaced when https://golang.org/issue/8768 is resolved. - -It has been updated to follow upstream changes in a few ways. - -*/ - -import ( - "fmt" - "go/build" - "log" - "os" - "path" - "path/filepath" - "regexp" - "runtime" - "strings" -) - -var ( - buildContext = build.Default - goroot = filepath.Clean(runtime.GOROOT()) - gorootSrc = filepath.Join(goroot, "src") -) - -// importPathsNoDotExpansion returns the import paths to use for the given -// command line, but it does no ... expansion. -func importPathsNoDotExpansion(args []string) []string { - if len(args) == 0 { - return []string{"."} - } - var out []string - for _, a := range args { - // Arguments are supposed to be import paths, but - // as a courtesy to Windows developers, rewrite \ to / - // in command-line arguments. Handles .\... and so on. - if filepath.Separator == '\\' { - a = strings.Replace(a, `\`, `/`, -1) - } - - // Put argument in canonical form, but preserve leading ./. - if strings.HasPrefix(a, "./") { - a = "./" + path.Clean(a) - if a == "./." { - a = "." - } - } else { - a = path.Clean(a) - } - if a == "all" || a == "std" { - out = append(out, allPackages(a)...) - continue - } - out = append(out, a) - } - return out -} - -// importPaths returns the import paths to use for the given command line. -func importPaths(args []string) []string { - args = importPathsNoDotExpansion(args) - var out []string - for _, a := range args { - if strings.Contains(a, "...") { - if build.IsLocalImport(a) { - out = append(out, allPackagesInFS(a)...) - } else { - out = append(out, allPackages(a)...) - } - continue - } - out = append(out, a) - } - return out -} - -// matchPattern(pattern)(name) reports whether -// name matches pattern. Pattern is a limited glob -// pattern in which '...' means 'any string' and there -// is no other special syntax. -func matchPattern(pattern string) func(name string) bool { - re := regexp.QuoteMeta(pattern) - re = strings.Replace(re, `\.\.\.`, `.*`, -1) - // Special case: foo/... matches foo too. - if strings.HasSuffix(re, `/.*`) { - re = re[:len(re)-len(`/.*`)] + `(/.*)?` - } - reg := regexp.MustCompile(`^` + re + `$`) - return func(name string) bool { - return reg.MatchString(name) - } -} - -// hasPathPrefix reports whether the path s begins with the -// elements in prefix. -func hasPathPrefix(s, prefix string) bool { - switch { - default: - return false - case len(s) == len(prefix): - return s == prefix - case len(s) > len(prefix): - if prefix != "" && prefix[len(prefix)-1] == '/' { - return strings.HasPrefix(s, prefix) - } - return s[len(prefix)] == '/' && s[:len(prefix)] == prefix - } -} - -// treeCanMatchPattern(pattern)(name) reports whether -// name or children of name can possibly match pattern. -// Pattern is the same limited glob accepted by matchPattern. -func treeCanMatchPattern(pattern string) func(name string) bool { - wildCard := false - if i := strings.Index(pattern, "..."); i >= 0 { - wildCard = true - pattern = pattern[:i] - } - return func(name string) bool { - return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || - wildCard && strings.HasPrefix(name, pattern) - } -} - -// allPackages returns all the packages that can be found -// under the $GOPATH directories and $GOROOT matching pattern. -// The pattern is either "all" (all packages), "std" (standard packages) -// or a path including "...". -func allPackages(pattern string) []string { - pkgs := matchPackages(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -func matchPackages(pattern string) []string { - match := func(string) bool { return true } - treeCanMatch := func(string) bool { return true } - if pattern != "all" && pattern != "std" { - match = matchPattern(pattern) - treeCanMatch = treeCanMatchPattern(pattern) - } - - have := map[string]bool{ - "builtin": true, // ignore pseudo-package that exists only for documentation - } - if !buildContext.CgoEnabled { - have["runtime/cgo"] = true // ignore during walk - } - var pkgs []string - - // Commands - cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) - filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == cmd { - return nil - } - name := path[len(cmd):] - if !treeCanMatch(name) { - return filepath.SkipDir - } - // Commands are all in cmd/, not in subdirectories. - if strings.Contains(name, string(filepath.Separator)) { - return filepath.SkipDir - } - - // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. - name = "cmd/" + name - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = buildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - - for _, src := range buildContext.SrcDirs() { - if (pattern == "std" || pattern == "cmd") && src != gorootSrc { - continue - } - src = filepath.Clean(src) + string(filepath.Separator) - root := src - if pattern == "cmd" { - root += "cmd" + string(filepath.Separator) - } - filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() || path == src { - return nil - } - - // Avoid .foo, _foo, and testdata directory trees. - _, elem := filepath.Split(path) - if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := filepath.ToSlash(path[len(src):]) - if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { - // The name "std" is only the standard library. - // If the name is cmd, it's the root of the command tree. - return filepath.SkipDir - } - if !treeCanMatch(name) { - return filepath.SkipDir - } - if have[name] { - return nil - } - have[name] = true - if !match(name) { - return nil - } - _, err = buildContext.ImportDir(path, 0) - if err != nil { - if _, noGo := err.(*build.NoGoError); noGo { - return nil - } - } - pkgs = append(pkgs, name) - return nil - }) - } - return pkgs -} - -// allPackagesInFS is like allPackages but is passed a pattern -// beginning ./ or ../, meaning it should scan the tree rooted -// at the given directory. There are ... in the pattern too. -func allPackagesInFS(pattern string) []string { - pkgs := matchPackagesInFS(pattern) - if len(pkgs) == 0 { - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) - } - return pkgs -} - -func matchPackagesInFS(pattern string) []string { - // Find directory to begin the scan. - // Could be smarter but this one optimization - // is enough for now, since ... is usually at the - // end of a path. - i := strings.Index(pattern, "...") - dir, _ := path.Split(pattern[:i]) - - // pattern begins with ./ or ../. - // path.Clean will discard the ./ but not the ../. - // We need to preserve the ./ for pattern matching - // and in the returned import paths. - prefix := "" - if strings.HasPrefix(pattern, "./") { - prefix = "./" - } - match := matchPattern(pattern) - - var pkgs []string - filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { - if err != nil || !fi.IsDir() { - return nil - } - if path == dir { - // filepath.Walk starts at dir and recurses. For the recursive case, - // the path is the result of filepath.Join, which calls filepath.Clean. - // The initial case is not Cleaned, though, so we do this explicitly. - // - // This converts a path like "./io/" to "io". Without this step, running - // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io - // package, because prepending the prefix "./" to the unclean path would - // result in "././io", and match("././io") returns false. - path = filepath.Clean(path) - } - - // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". - _, elem := filepath.Split(path) - dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." - if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { - return filepath.SkipDir - } - - name := prefix + filepath.ToSlash(path) - if !match(name) { - return nil - } - if _, err = build.ImportDir(path, 0); err != nil { - if _, noGo := err.(*build.NoGoError); !noGo { - log.Print(err) - } - return nil - } - pkgs = append(pkgs, name) - return nil - }) - return pkgs -} diff --git a/vendor/golang.org/x/lint/lint.go b/vendor/golang.org/x/lint/lint.go deleted file mode 100644 index 46bd45f..0000000 --- a/vendor/golang.org/x/lint/lint.go +++ /dev/null @@ -1,1671 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lint contains a linter for Go source code. -package lint - -import ( - "bufio" - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/printer" - "go/token" - "go/types" - "regexp" - "sort" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/gcexportdata" -) - -const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" - -// A Linter lints Go source code. -type Linter struct { -} - -// Problem represents a problem in some source code. -type Problem struct { - Position token.Position // position in source file - Text string // the prose that describes the problem - Link string // (optional) the link to the style guide for the problem - Confidence float64 // a value in (0,1] estimating the confidence in this problem's correctness - LineText string // the source line - Category string // a short name for the general category of the problem - - // If the problem has a suggested fix (the minority case), - // ReplacementLine is a full replacement for the relevant line of the source file. - ReplacementLine string -} - -func (p *Problem) String() string { - if p.Link != "" { - return p.Text + "\n\n" + p.Link - } - return p.Text -} - -type byPosition []Problem - -func (p byPosition) Len() int { return len(p) } -func (p byPosition) Swap(i, j int) { p[i], p[j] = p[j], p[i] } - -func (p byPosition) Less(i, j int) bool { - pi, pj := p[i].Position, p[j].Position - - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename - } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return p[i].Text < p[j].Text -} - -// Lint lints src. -func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) { - return l.LintFiles(map[string][]byte{filename: src}) -} - -// LintFiles lints a set of files of a single package. -// The argument is a map of filename to source. -func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) { - pkg := &pkg{ - fset: token.NewFileSet(), - files: make(map[string]*file), - } - var pkgName string - for filename, src := range files { - if isGenerated(src) { - continue // See issue #239 - } - f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments) - if err != nil { - return nil, err - } - if pkgName == "" { - pkgName = f.Name.Name - } else if f.Name.Name != pkgName { - return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) - } - pkg.files[filename] = &file{ - pkg: pkg, - f: f, - fset: pkg.fset, - src: src, - filename: filename, - } - } - if len(pkg.files) == 0 { - return nil, nil - } - return pkg.lint(), nil -} - -var ( - genHdr = []byte("// Code generated ") - genFtr = []byte(" DO NOT EDIT.") -) - -// isGenerated reports whether the source file is generated code -// according the rules from https://golang.org/s/generatedcode. -func isGenerated(src []byte) bool { - sc := bufio.NewScanner(bytes.NewReader(src)) - for sc.Scan() { - b := sc.Bytes() - if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { - return true - } - } - return false -} - -// pkg represents a package being linted. -type pkg struct { - fset *token.FileSet - files map[string]*file - - typesPkg *types.Package - typesInfo *types.Info - - // sortable is the set of types in the package that implement sort.Interface. - sortable map[string]bool - // main is whether this is a "main" package. - main bool - - problems []Problem -} - -func (p *pkg) lint() []Problem { - if err := p.typeCheck(); err != nil { - /* TODO(dsymonds): Consider reporting these errors when golint operates on entire packages. - if e, ok := err.(types.Error); ok { - pos := p.fset.Position(e.Pos) - conf := 1.0 - if strings.Contains(e.Msg, "can't find import: ") { - // Golint is probably being run in a context that doesn't support - // typechecking (e.g. package files aren't found), so don't warn about it. - conf = 0 - } - if conf > 0 { - p.errorfAt(pos, conf, category("typechecking"), e.Msg) - } - - // TODO(dsymonds): Abort if !e.Soft? - } - */ - } - - p.scanSortable() - p.main = p.isMain() - - for _, f := range p.files { - f.lint() - } - - sort.Sort(byPosition(p.problems)) - - return p.problems -} - -// file represents a file being linted. -type file struct { - pkg *pkg - f *ast.File - fset *token.FileSet - src []byte - filename string -} - -func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") } - -func (f *file) lint() { - f.lintPackageComment() - f.lintImports() - f.lintBlankImports() - f.lintExported() - f.lintNames() - f.lintVarDecls() - f.lintElses() - f.lintRanges() - f.lintErrorf() - f.lintErrors() - f.lintErrorStrings() - f.lintReceiverNames() - f.lintIncDec() - f.lintErrorReturn() - f.lintUnexportedReturn() - f.lintTimeNames() - f.lintContextKeyTypes() - f.lintContextArgs() -} - -type link string -type category string - -// The variadic arguments may start with link and category types, -// and must end with a format string and any arguments. -// It returns the new Problem. -func (f *file) errorf(n ast.Node, confidence float64, args ...interface{}) *Problem { - pos := f.fset.Position(n.Pos()) - if pos.Filename == "" { - pos.Filename = f.filename - } - return f.pkg.errorfAt(pos, confidence, args...) -} - -func (p *pkg) errorfAt(pos token.Position, confidence float64, args ...interface{}) *Problem { - problem := Problem{ - Position: pos, - Confidence: confidence, - } - if pos.Filename != "" { - // The file might not exist in our mapping if a //line directive was encountered. - if f, ok := p.files[pos.Filename]; ok { - problem.LineText = srcLine(f.src, pos) - } - } - -argLoop: - for len(args) > 1 { // always leave at least the format string in args - switch v := args[0].(type) { - case link: - problem.Link = string(v) - case category: - problem.Category = string(v) - default: - break argLoop - } - args = args[1:] - } - - problem.Text = fmt.Sprintf(args[0].(string), args[1:]...) - - p.problems = append(p.problems, problem) - return &p.problems[len(p.problems)-1] -} - -var newImporter = func(fset *token.FileSet) types.ImporterFrom { - return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) -} - -func (p *pkg) typeCheck() error { - config := &types.Config{ - // By setting a no-op error reporter, the type checker does as much work as possible. - Error: func(error) {}, - Importer: newImporter(p.fset), - } - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - } - var anyFile *file - var astFiles []*ast.File - for _, f := range p.files { - anyFile = f - astFiles = append(astFiles, f.f) - } - pkg, err := config.Check(anyFile.f.Name.Name, p.fset, astFiles, info) - // Remember the typechecking info, even if config.Check failed, - // since we will get partial information. - p.typesPkg = pkg - p.typesInfo = info - return err -} - -func (p *pkg) typeOf(expr ast.Expr) types.Type { - if p.typesInfo == nil { - return nil - } - return p.typesInfo.TypeOf(expr) -} - -func (p *pkg) isNamedType(typ types.Type, importPath, name string) bool { - n, ok := typ.(*types.Named) - if !ok { - return false - } - tn := n.Obj() - return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name -} - -// scopeOf returns the tightest scope encompassing id. -func (p *pkg) scopeOf(id *ast.Ident) *types.Scope { - var scope *types.Scope - if obj := p.typesInfo.ObjectOf(id); obj != nil { - scope = obj.Parent() - } - if scope == p.typesPkg.Scope() { - // We were given a top-level identifier. - // Use the file-level scope instead of the package-level scope. - pos := id.Pos() - for _, f := range p.files { - if f.f.Pos() <= pos && pos < f.f.End() { - scope = p.typesInfo.Scopes[f.f] - break - } - } - } - return scope -} - -func (p *pkg) scanSortable() { - p.sortable = make(map[string]bool) - - // bitfield for which methods exist on each type. - const ( - Len = 1 << iota - Less - Swap - ) - nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} - has := make(map[string]int) - for _, f := range p.files { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return true - } - // TODO(dsymonds): We could check the signature to be more precise. - recv := receiverType(fn) - if i, ok := nmap[fn.Name.Name]; ok { - has[recv] |= i - } - return false - }) - } - for typ, ms := range has { - if ms == Len|Less|Swap { - p.sortable[typ] = true - } - } -} - -func (p *pkg) isMain() bool { - for _, f := range p.files { - if f.isMain() { - return true - } - } - return false -} - -func (f *file) isMain() bool { - if f.f.Name.Name == "main" { - return true - } - return false -} - -// lintPackageComment checks package comments. It complains if -// there is no package comment, or if it is not of the right form. -// This has a notable false positive in that a package comment -// could rightfully appear in a different file of the same package, -// but that's not easy to fix since this linter is file-oriented. -func (f *file) lintPackageComment() { - if f.isTest() { - return - } - - const ref = styleGuideBase + "#package-comments" - prefix := "Package " + f.f.Name.Name + " " - - // Look for a detached package comment. - // First, scan for the last comment that occurs before the "package" keyword. - var lastCG *ast.CommentGroup - for _, cg := range f.f.Comments { - if cg.Pos() > f.f.Package { - // Gone past "package" keyword. - break - } - lastCG = cg - } - if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { - endPos := f.fset.Position(lastCG.End()) - pkgPos := f.fset.Position(f.f.Package) - if endPos.Line+1 < pkgPos.Line { - // There isn't a great place to anchor this error; - // the start of the blank lines between the doc and the package statement - // is at least pointing at the location of the problem. - pos := token.Position{ - Filename: endPos.Filename, - // Offset not set; it is non-trivial, and doesn't appear to be needed. - Line: endPos.Line + 1, - Column: 1, - } - f.pkg.errorfAt(pos, 0.9, link(ref), category("comments"), "package comment is detached; there should be no blank lines between it and the package statement") - return - } - } - - if f.f.Doc == nil { - f.errorf(f.f, 0.2, link(ref), category("comments"), "should have a package comment, unless it's in another file for this package") - return - } - s := f.f.Doc.Text() - if ts := strings.TrimLeft(s, " \t"); ts != s { - f.errorf(f.f.Doc, 1, link(ref), category("comments"), "package comment should not have leading space") - s = ts - } - // Only non-main packages need to keep to this form. - if !f.pkg.main && !strings.HasPrefix(s, prefix) { - f.errorf(f.f.Doc, 1, link(ref), category("comments"), `package comment should be of the form "%s..."`, prefix) - } -} - -// lintBlankImports complains if a non-main package has blank imports that are -// not documented. -func (f *file) lintBlankImports() { - // In package main and in tests, we don't complain about blank imports. - if f.pkg.main || f.isTest() { - return - } - - // The first element of each contiguous group of blank imports should have - // an explanatory comment of some kind. - for i, imp := range f.f.Imports { - pos := f.fset.Position(imp.Pos()) - - if !isBlank(imp.Name) { - continue // Ignore non-blank imports. - } - if i > 0 { - prev := f.f.Imports[i-1] - prevPos := f.fset.Position(prev.Pos()) - if isBlank(prev.Name) && prevPos.Line+1 == pos.Line { - continue // A subsequent blank in a group. - } - } - - // This is the first blank import of a group. - if imp.Doc == nil && imp.Comment == nil { - ref := "" - f.errorf(imp, 1, link(ref), category("imports"), "a blank import should be only in a main or test package, or have a comment justifying it") - } - } -} - -// lintImports examines import blocks. -func (f *file) lintImports() { - for i, is := range f.f.Imports { - _ = i - if is.Name != nil && is.Name.Name == "." && !f.isTest() { - f.errorf(is, 1, link(styleGuideBase+"#import-dot"), category("imports"), "should not use dot imports") - } - - } -} - -const docCommentsLink = styleGuideBase + "#doc-comments" - -// lintExported examines the exported names. -// It complains if any required doc comments are missing, -// or if they are not of the right form. The exact rules are in -// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function -// also tracks the GenDecl structure being traversed to permit -// doc comments for constants to be on top of the const block. -// It also complains if the names stutter when combined with -// the package name. -func (f *file) lintExported() { - if f.isTest() { - return - } - - var lastGen *ast.GenDecl // last GenDecl entered. - - // Set of GenDecls that have already had missing comments flagged. - genDeclMissingComments := make(map[*ast.GenDecl]bool) - - f.walk(func(node ast.Node) bool { - switch v := node.(type) { - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return false - } - // token.CONST, token.TYPE or token.VAR - lastGen = v - return true - case *ast.FuncDecl: - f.lintFuncDoc(v) - if v.Recv == nil { - // Only check for stutter on functions, not methods. - // Method names are not used package-qualified. - f.checkStutter(v.Name, "func") - } - // Don't proceed inside funcs. - return false - case *ast.TypeSpec: - // inside a GenDecl, which usually has the doc - doc := v.Doc - if doc == nil { - doc = lastGen.Doc - } - f.lintTypeDoc(v, doc) - f.checkStutter(v.Name, "type") - // Don't proceed inside types. - return false - case *ast.ValueSpec: - f.lintValueSpecDoc(v, lastGen, genDeclMissingComments) - return false - } - return true - }) -} - -var ( - allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) - anyCapsRE = regexp.MustCompile(`[A-Z]`) -) - -// knownNameExceptions is a set of names that are known to be exempt from naming checks. -// This is usually because they are constrained by having to match names in the -// standard library. -var knownNameExceptions = map[string]bool{ - "LastInsertId": true, // must match database/sql - "kWh": true, -} - -// lintNames examines all names in the file. -// It complains if any use underscores or incorrect known initialisms. -func (f *file) lintNames() { - // Package names need slightly different handling than other names. - if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") { - f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("naming"), "don't use an underscore in package name") - } - if anyCapsRE.MatchString(f.f.Name.Name) { - f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("mixed-caps"), "don't use MixedCaps in package name; %s should be %s", f.f.Name.Name, strings.ToLower(f.f.Name.Name)) - } - - check := func(id *ast.Ident, thing string) { - if id.Name == "_" { - return - } - if knownNameExceptions[id.Name] { - return - } - - // Handle two common styles from other languages that don't belong in Go. - if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { - f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use ALL_CAPS in Go names; use CamelCase") - return - } - if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' { - should := string(id.Name[1]+'a'-'A') + id.Name[2:] - f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should) - } - - should := lintName(id.Name) - if id.Name == should { - return - } - - if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { - f.errorf(id, 0.9, link("http://golang.org/doc/effective_go.html#mixed-caps"), category("naming"), "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should) - return - } - f.errorf(id, 0.8, link(styleGuideBase+"#initialisms"), category("naming"), "%s %s should be %s", thing, id.Name, should) - } - checkList := func(fl *ast.FieldList, thing string) { - if fl == nil { - return - } - for _, f := range fl.List { - for _, id := range f.Names { - check(id, thing) - } - } - } - f.walk(func(node ast.Node) bool { - switch v := node.(type) { - case *ast.AssignStmt: - if v.Tok == token.ASSIGN { - return true - } - for _, exp := range v.Lhs { - if id, ok := exp.(*ast.Ident); ok { - check(id, "var") - } - } - case *ast.FuncDecl: - if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { - return true - } - - thing := "func" - if v.Recv != nil { - thing = "method" - } - - // Exclude naming warnings for functions that are exported to C but - // not exported in the Go API. - // See https://github.com/golang/lint/issues/144. - if ast.IsExported(v.Name.Name) || !isCgoExported(v) { - check(v.Name, thing) - } - - checkList(v.Type.Params, thing+" parameter") - checkList(v.Type.Results, thing+" result") - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return true - } - var thing string - switch v.Tok { - case token.CONST: - thing = "const" - case token.TYPE: - thing = "type" - case token.VAR: - thing = "var" - } - for _, spec := range v.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - check(s.Name, thing) - case *ast.ValueSpec: - for _, id := range s.Names { - check(id, thing) - } - } - } - case *ast.InterfaceType: - // Do not check interface method names. - // They are often constrainted by the method names of concrete types. - for _, x := range v.Methods.List { - ft, ok := x.Type.(*ast.FuncType) - if !ok { // might be an embedded interface name - continue - } - checkList(ft.Params, "interface method parameter") - checkList(ft.Results, "interface method result") - } - case *ast.RangeStmt: - if v.Tok == token.ASSIGN { - return true - } - if id, ok := v.Key.(*ast.Ident); ok { - check(id, "range var") - } - if id, ok := v.Value.(*ast.Ident); ok { - check(id, "range var") - } - case *ast.StructType: - for _, f := range v.Fields.List { - for _, id := range f.Names { - check(id, "struct field") - } - } - } - return true - }) -} - -// lintName returns a different name if it should be different. -func lintName(name string) (should string) { - // Fast path for simple cases: "_" and all lowercase. - if name == "_" { - return name - } - allLower := true - for _, r := range name { - if !unicode.IsLower(r) { - allLower = false - break - } - } - if allLower { - return name - } - - // Split camelCase at any lower->upper transition, and split on underscores. - // Check each word for common initialisms. - runes := []rune(name) - w, i := 0, 0 // index of start of word, scan - for i+1 <= len(runes) { - eow := false // whether we hit the end of a word - if i+1 == len(runes) { - eow = true - } else if runes[i+1] == '_' { - // underscore; shift the remainder forward over any run of underscores - eow = true - n := 1 - for i+n+1 < len(runes) && runes[i+n+1] == '_' { - n++ - } - - // Leave at most one underscore if the underscore is between two digits - if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { - n-- - } - - copy(runes[i+1:], runes[i+n+1:]) - runes = runes[:len(runes)-n] - } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { - // lower->non-lower - eow = true - } - i++ - if !eow { - continue - } - - // [w,i) is a word. - word := string(runes[w:i]) - if u := strings.ToUpper(word); commonInitialisms[u] { - // Keep consistent case, which is lowercase only at the start. - if w == 0 && unicode.IsLower(runes[w]) { - u = strings.ToLower(u) - } - // All the common initialisms are ASCII, - // so we can replace the bytes exactly. - copy(runes[w:], []rune(u)) - } else if w > 0 && strings.ToLower(word) == word { - // already all lowercase, and not the first word, so uppercase the first character. - runes[w] = unicode.ToUpper(runes[w]) - } - w = i - } - return string(runes) -} - -// commonInitialisms is a set of common initialisms. -// Only add entries that are highly unlikely to be non-initialisms. -// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. -var commonInitialisms = map[string]bool{ - "ACL": true, - "API": true, - "ASCII": true, - "CPU": true, - "CSS": true, - "DNS": true, - "EOF": true, - "GUID": true, - "HTML": true, - "HTTP": true, - "HTTPS": true, - "ID": true, - "IP": true, - "JSON": true, - "LHS": true, - "QPS": true, - "RAM": true, - "RHS": true, - "RPC": true, - "SLA": true, - "SMTP": true, - "SQL": true, - "SSH": true, - "TCP": true, - "TLS": true, - "TTL": true, - "UDP": true, - "UI": true, - "UID": true, - "UUID": true, - "URI": true, - "URL": true, - "UTF8": true, - "VM": true, - "XML": true, - "XMPP": true, - "XSRF": true, - "XSS": true, -} - -// lintTypeDoc examines the doc comment on a type. -// It complains if they are missing from an exported type, -// or if they are not of the standard form. -func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { - if !ast.IsExported(t.Name.Name) { - return - } - if doc == nil { - f.errorf(t, 1, link(docCommentsLink), category("comments"), "exported type %v should have comment or be unexported", t.Name) - return - } - - s := doc.Text() - articles := [...]string{"A", "An", "The"} - for _, a := range articles { - if strings.HasPrefix(s, a+" ") { - s = s[len(a)+1:] - break - } - } - if !strings.HasPrefix(s, t.Name.Name+" ") { - f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name) - } -} - -var commonMethods = map[string]bool{ - "Error": true, - "Read": true, - "ServeHTTP": true, - "String": true, - "Write": true, -} - -// lintFuncDoc examines doc comments on functions and methods. -// It complains if they are missing, or not of the right form. -// It has specific exclusions for well-known methods (see commonMethods above). -func (f *file) lintFuncDoc(fn *ast.FuncDecl) { - if !ast.IsExported(fn.Name.Name) { - // func is unexported - return - } - kind := "function" - name := fn.Name.Name - if fn.Recv != nil && len(fn.Recv.List) > 0 { - // method - kind = "method" - recv := receiverType(fn) - if !ast.IsExported(recv) { - // receiver is unexported - return - } - if commonMethods[name] { - return - } - switch name { - case "Len", "Less", "Swap": - if f.pkg.sortable[recv] { - return - } - } - name = recv + "." + name - } - if fn.Doc == nil { - f.errorf(fn, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment or be unexported", kind, name) - return - } - s := fn.Doc.Text() - prefix := fn.Name.Name + " " - if !strings.HasPrefix(s, prefix) { - f.errorf(fn.Doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) - } -} - -// lintValueSpecDoc examines package-global variables and constants. -// It complains if they are not individually declared, -// or if they are not suitably documented in the right form (unless they are in a block that is commented). -func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { - kind := "var" - if gd.Tok == token.CONST { - kind = "const" - } - - if len(vs.Names) > 1 { - // Check that none are exported except for the first. - for _, n := range vs.Names[1:] { - if ast.IsExported(n.Name) { - f.errorf(vs, 1, category("comments"), "exported %s %s should have its own declaration", kind, n.Name) - return - } - } - } - - // Only one name. - name := vs.Names[0].Name - if !ast.IsExported(name) { - return - } - - if vs.Doc == nil && gd.Doc == nil { - if genDeclMissingComments[gd] { - return - } - block := "" - if kind == "const" && gd.Lparen.IsValid() { - block = " (or a comment on this block)" - } - f.errorf(vs, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment%s or be unexported", kind, name, block) - genDeclMissingComments[gd] = true - return - } - // If this GenDecl has parens and a comment, we don't check its comment form. - if gd.Lparen.IsValid() && gd.Doc != nil { - return - } - // The relevant text to check will be on either vs.Doc or gd.Doc. - // Use vs.Doc preferentially. - doc := vs.Doc - if doc == nil { - doc = gd.Doc - } - prefix := name + " " - if !strings.HasPrefix(doc.Text(), prefix) { - f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) - } -} - -func (f *file) checkStutter(id *ast.Ident, thing string) { - pkg, name := f.f.Name.Name, id.Name - if !ast.IsExported(name) { - // unexported name - return - } - // A name stutters if the package name is a strict prefix - // and the next character of the name starts a new word. - if len(name) <= len(pkg) { - // name is too short to stutter. - // This permits the name to be the same as the package name. - return - } - if !strings.EqualFold(pkg, name[:len(pkg)]) { - return - } - // We can assume the name is well-formed UTF-8. - // If the next rune after the package name is uppercase or an underscore - // the it's starting a new word and thus this name stutters. - rem := name[len(pkg):] - if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { - f.errorf(id, 0.8, link(styleGuideBase+"#package-names"), category("naming"), "%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem) - } -} - -// zeroLiteral is a set of ast.BasicLit values that are zero values. -// It is not exhaustive. -var zeroLiteral = map[string]bool{ - "false": true, // bool - // runes - `'\x00'`: true, - `'\000'`: true, - // strings - `""`: true, - "``": true, - // numerics - "0": true, - "0.": true, - "0.0": true, - "0i": true, -} - -// lintVarDecls examines variable declarations. It complains about declarations with -// redundant LHS types that can be inferred from the RHS. -func (f *file) lintVarDecls() { - var lastGen *ast.GenDecl // last GenDecl entered. - - f.walk(func(node ast.Node) bool { - switch v := node.(type) { - case *ast.GenDecl: - if v.Tok != token.CONST && v.Tok != token.VAR { - return false - } - lastGen = v - return true - case *ast.ValueSpec: - if lastGen.Tok == token.CONST { - return false - } - if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 { - return false - } - rhs := v.Values[0] - // An underscore var appears in a common idiom for compile-time interface satisfaction, - // as in "var _ Interface = (*Concrete)(nil)". - if isIdent(v.Names[0], "_") { - return false - } - // If the RHS is a zero value, suggest dropping it. - zero := false - if lit, ok := rhs.(*ast.BasicLit); ok { - zero = zeroLiteral[lit.Value] - } else if isIdent(rhs, "nil") { - zero = true - } - if zero { - f.errorf(rhs, 0.9, category("zero-value"), "should drop = %s from declaration of var %s; it is the zero value", f.render(rhs), v.Names[0]) - return false - } - lhsTyp := f.pkg.typeOf(v.Type) - rhsTyp := f.pkg.typeOf(rhs) - - if !validType(lhsTyp) || !validType(rhsTyp) { - // Type checking failed (often due to missing imports). - return false - } - - if !types.Identical(lhsTyp, rhsTyp) { - // Assignment to a different type is not redundant. - return false - } - - // The next three conditions are for suppressing the warning in situations - // where we were unable to typecheck. - - // If the LHS type is an interface, don't warn, since it is probably a - // concrete type on the RHS. Note that our feeble lexical check here - // will only pick up interface{} and other literal interface types; - // that covers most of the cases we care to exclude right now. - if _, ok := v.Type.(*ast.InterfaceType); ok { - return false - } - // If the RHS is an untyped const, only warn if the LHS type is its default type. - if defType, ok := f.isUntypedConst(rhs); ok && !isIdent(v.Type, defType) { - return false - } - - f.errorf(v.Type, 0.8, category("type-inference"), "should omit type %s from declaration of var %s; it will be inferred from the right-hand side", f.render(v.Type), v.Names[0]) - return false - } - return true - }) -} - -func validType(T types.Type) bool { - return T != nil && - T != types.Typ[types.Invalid] && - !strings.Contains(T.String(), "invalid type") // good but not foolproof -} - -// lintElses examines else blocks. It complains about any else block whose if block ends in a return. -func (f *file) lintElses() { - // We don't want to flag if { } else if { } else { } constructions. - // They will appear as an IfStmt whose Else field is also an IfStmt. - // Record such a node so we ignore it when we visit it. - ignore := make(map[*ast.IfStmt]bool) - - f.walk(func(node ast.Node) bool { - ifStmt, ok := node.(*ast.IfStmt) - if !ok || ifStmt.Else == nil { - return true - } - if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { - ignore[elseif] = true - return true - } - if ignore[ifStmt] { - return true - } - if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { - // only care about elses without conditions - return true - } - if len(ifStmt.Body.List) == 0 { - return true - } - shortDecl := false // does the if statement have a ":=" initialization statement? - if ifStmt.Init != nil { - if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { - shortDecl = true - } - } - lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] - if _, ok := lastStmt.(*ast.ReturnStmt); ok { - extra := "" - if shortDecl { - extra = " (move short variable declaration to its own line if necessary)" - } - f.errorf(ifStmt.Else, 1, link(styleGuideBase+"#indent-error-flow"), category("indent"), "if block ends with a return statement, so drop this else and outdent its block"+extra) - } - return true - }) -} - -// lintRanges examines range clauses. It complains about redundant constructions. -func (f *file) lintRanges() { - f.walk(func(node ast.Node) bool { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return true - } - - if isIdent(rs.Key, "_") && (rs.Value == nil || isIdent(rs.Value, "_")) { - p := f.errorf(rs.Key, 1, category("range-loop"), "should omit values from range; this loop is equivalent to `for range ...`") - - newRS := *rs // shallow copy - newRS.Value = nil - newRS.Key = nil - p.ReplacementLine = f.firstLineOf(&newRS, rs) - - return true - } - - if isIdent(rs.Value, "_") { - p := f.errorf(rs.Value, 1, category("range-loop"), "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok) - - newRS := *rs // shallow copy - newRS.Value = nil - p.ReplacementLine = f.firstLineOf(&newRS, rs) - } - - return true - }) -} - -// lintErrorf examines errors.New and testing.Error calls. It complains if its only argument is an fmt.Sprintf invocation. -func (f *file) lintErrorf() { - f.walk(func(node ast.Node) bool { - ce, ok := node.(*ast.CallExpr) - if !ok || len(ce.Args) != 1 { - return true - } - isErrorsNew := isPkgDot(ce.Fun, "errors", "New") - var isTestingError bool - se, ok := ce.Fun.(*ast.SelectorExpr) - if ok && se.Sel.Name == "Error" { - if typ := f.pkg.typeOf(se.X); typ != nil { - isTestingError = typ.String() == "*testing.T" - } - } - if !isErrorsNew && !isTestingError { - return true - } - if !f.imports("errors") { - return true - } - arg := ce.Args[0] - ce, ok = arg.(*ast.CallExpr) - if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { - return true - } - errorfPrefix := "fmt" - if isTestingError { - errorfPrefix = f.render(se.X) - } - p := f.errorf(node, 1, category("errors"), "should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", f.render(se), errorfPrefix) - - m := f.srcLineWithMatch(ce, `^(.*)`+f.render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) - if m != nil { - p.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] - } - - return true - }) -} - -// lintErrors examines global error vars. It complains if they aren't named in the standard way. -func (f *file) lintErrors() { - for _, decl := range f.f.Decls { - gd, ok := decl.(*ast.GenDecl) - if !ok || gd.Tok != token.VAR { - continue - } - for _, spec := range gd.Specs { - spec := spec.(*ast.ValueSpec) - if len(spec.Names) != 1 || len(spec.Values) != 1 { - continue - } - ce, ok := spec.Values[0].(*ast.CallExpr) - if !ok { - continue - } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - continue - } - - id := spec.Names[0] - prefix := "err" - if id.IsExported() { - prefix = "Err" - } - if !strings.HasPrefix(id.Name, prefix) { - f.errorf(id, 0.9, category("naming"), "error var %s should have name of the form %sFoo", id.Name, prefix) - } - } - } -} - -func lintErrorString(s string) (isClean bool, conf float64) { - const basicConfidence = 0.8 - const capConfidence = basicConfidence - 0.2 - first, firstN := utf8.DecodeRuneInString(s) - last, _ := utf8.DecodeLastRuneInString(s) - if last == '.' || last == ':' || last == '!' || last == '\n' { - return false, basicConfidence - } - if unicode.IsUpper(first) { - // People use proper nouns and exported Go identifiers in error strings, - // so decrease the confidence of warnings for capitalization. - if len(s) <= firstN { - return false, capConfidence - } - // Flag strings starting with something that doesn't look like an initialism. - if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { - return false, capConfidence - } - } - return true, 0 -} - -// lintErrorStrings examines error strings. -// It complains if they are capitalized or end in punctuation or a newline. -func (f *file) lintErrorStrings() { - f.walk(func(node ast.Node) bool { - ce, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { - return true - } - if len(ce.Args) < 1 { - return true - } - str, ok := ce.Args[0].(*ast.BasicLit) - if !ok || str.Kind != token.STRING { - return true - } - s, _ := strconv.Unquote(str.Value) // can assume well-formed Go - if s == "" { - return true - } - clean, conf := lintErrorString(s) - if clean { - return true - } - - f.errorf(str, conf, link(styleGuideBase+"#error-strings"), category("errors"), - "error strings should not be capitalized or end with punctuation or a newline") - return true - }) -} - -// lintReceiverNames examines receiver names. It complains about inconsistent -// names used for the same type and names such as "this". -func (f *file) lintReceiverNames() { - typeReceiver := map[string]string{} - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { - return true - } - names := fn.Recv.List[0].Names - if len(names) < 1 { - return true - } - name := names[0].Name - const ref = styleGuideBase + "#receiver-names" - if name == "_" { - f.errorf(n, 1, link(ref), category("naming"), `receiver name should not be an underscore, omit the name if it is unused`) - return true - } - if name == "this" || name == "self" { - f.errorf(n, 1, link(ref), category("naming"), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) - return true - } - recv := receiverType(fn) - if prev, ok := typeReceiver[recv]; ok && prev != name { - f.errorf(n, 1, link(ref), category("naming"), "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv) - return true - } - typeReceiver[recv] = name - return true - }) -} - -// lintIncDec examines statements that increment or decrement a variable. -// It complains if they don't use x++ or x--. -func (f *file) lintIncDec() { - f.walk(func(n ast.Node) bool { - as, ok := n.(*ast.AssignStmt) - if !ok { - return true - } - if len(as.Lhs) != 1 { - return true - } - if !isOne(as.Rhs[0]) { - return true - } - var suffix string - switch as.Tok { - case token.ADD_ASSIGN: - suffix = "++" - case token.SUB_ASSIGN: - suffix = "--" - default: - return true - } - f.errorf(as, 0.8, category("unary-op"), "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix) - return true - }) -} - -// lintErrorReturn examines function declarations that return an error. -// It complains if the error isn't the last parameter. -func (f *file) lintErrorReturn() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || fn.Type.Results == nil { - return true - } - ret := fn.Type.Results.List - if len(ret) <= 1 { - return true - } - if isIdent(ret[len(ret)-1].Type, "error") { - return true - } - // An error return parameter should be the last parameter. - // Flag any error parameters found before the last. - for _, r := range ret[:len(ret)-1] { - if isIdent(r.Type, "error") { - f.errorf(fn, 0.9, category("arg-order"), "error should be the last type when returning multiple items") - break // only flag one - } - } - return true - }) -} - -// lintUnexportedReturn examines exported function declarations. -// It complains if any return an unexported type. -func (f *file) lintUnexportedReturn() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok { - return true - } - if fn.Type.Results == nil { - return false - } - if !fn.Name.IsExported() { - return false - } - thing := "func" - if fn.Recv != nil && len(fn.Recv.List) > 0 { - thing = "method" - if !ast.IsExported(receiverType(fn)) { - // Don't report exported methods of unexported types, - // such as private implementations of sort.Interface. - return false - } - } - for _, ret := range fn.Type.Results.List { - typ := f.pkg.typeOf(ret.Type) - if exportedType(typ) { - continue - } - f.errorf(ret.Type, 0.8, category("unexported-type-in-api"), - "exported %s %s returns unexported type %s, which can be annoying to use", - thing, fn.Name.Name, typ) - break // only flag one - } - return false - }) -} - -// exportedType reports whether typ is an exported type. -// It is imprecise, and will err on the side of returning true, -// such as for composite types. -func exportedType(typ types.Type) bool { - switch T := typ.(type) { - case *types.Named: - // Builtin types have no package. - return T.Obj().Pkg() == nil || T.Obj().Exported() - case *types.Map: - return exportedType(T.Key()) && exportedType(T.Elem()) - case interface { - Elem() types.Type - }: // array, slice, pointer, chan - return exportedType(T.Elem()) - } - // Be conservative about other types, such as struct, interface, etc. - return true -} - -// timeSuffixes is a list of name suffixes that imply a time unit. -// This is not an exhaustive list. -var timeSuffixes = []string{ - "Sec", "Secs", "Seconds", - "Msec", "Msecs", - "Milli", "Millis", "Milliseconds", - "Usec", "Usecs", "Microseconds", - "MS", "Ms", -} - -func (f *file) lintTimeNames() { - f.walk(func(node ast.Node) bool { - v, ok := node.(*ast.ValueSpec) - if !ok { - return true - } - for _, name := range v.Names { - origTyp := f.pkg.typeOf(name) - // Look for time.Duration or *time.Duration; - // the latter is common when using flag.Duration. - typ := origTyp - if pt, ok := typ.(*types.Pointer); ok { - typ = pt.Elem() - } - if !f.pkg.isNamedType(typ, "time", "Duration") { - continue - } - suffix := "" - for _, suf := range timeSuffixes { - if strings.HasSuffix(name.Name, suf) { - suffix = suf - break - } - } - if suffix == "" { - continue - } - f.errorf(v, 0.9, category("time"), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix) - } - return true - }) -} - -// lintContextKeyTypes checks for call expressions to context.WithValue with -// basic types used for the key argument. -// See: https://golang.org/issue/17293 -func (f *file) lintContextKeyTypes() { - f.walk(func(node ast.Node) bool { - switch node := node.(type) { - case *ast.CallExpr: - f.checkContextKeyType(node) - } - - return true - }) -} - -// checkContextKeyType reports an error if the call expression calls -// context.WithValue with a key argument of basic type. -func (f *file) checkContextKeyType(x *ast.CallExpr) { - sel, ok := x.Fun.(*ast.SelectorExpr) - if !ok { - return - } - pkg, ok := sel.X.(*ast.Ident) - if !ok || pkg.Name != "context" { - return - } - if sel.Sel.Name != "WithValue" { - return - } - - // key is second argument to context.WithValue - if len(x.Args) != 3 { - return - } - key := f.pkg.typesInfo.Types[x.Args[1]] - - if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { - f.errorf(x, 1.0, category("context"), fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type)) - } -} - -// lintContextArgs examines function declarations that contain an -// argument with a type of context.Context -// It complains if that argument isn't the first parameter. -func (f *file) lintContextArgs() { - f.walk(func(n ast.Node) bool { - fn, ok := n.(*ast.FuncDecl) - if !ok || len(fn.Type.Params.List) <= 1 { - return true - } - // A context.Context should be the first parameter of a function. - // Flag any that show up after the first. - for _, arg := range fn.Type.Params.List[1:] { - if isPkgDot(arg.Type, "context", "Context") { - f.errorf(fn, 0.9, link("https://golang.org/pkg/context/"), category("arg-order"), "context.Context should be the first parameter of a function") - break // only flag one - } - } - return true - }) -} - -// containsComments returns whether the interval [start, end) contains any -// comments without "// MATCH " prefix. -func (f *file) containsComments(start, end token.Pos) bool { - for _, cgroup := range f.f.Comments { - comments := cgroup.List - if comments[0].Slash >= end { - // All comments starting with this group are after end pos. - return false - } - if comments[len(comments)-1].Slash < start { - // Comments group ends before start pos. - continue - } - for _, c := range comments { - if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { - return true - } - } - } - return false -} - -// receiverType returns the named type of the method receiver, sans "*", -// or "invalid-type" if fn.Recv is ill formed. -func receiverType(fn *ast.FuncDecl) string { - switch e := fn.Recv.List[0].Type.(type) { - case *ast.Ident: - return e.Name - case *ast.StarExpr: - if id, ok := e.X.(*ast.Ident); ok { - return id.Name - } - } - // The parser accepts much more than just the legal forms. - return "invalid-type" -} - -func (f *file) walk(fn func(ast.Node) bool) { - ast.Walk(walker(fn), f.f) -} - -func (f *file) render(x interface{}) string { - var buf bytes.Buffer - if err := printer.Fprint(&buf, f.fset, x); err != nil { - panic(err) - } - return buf.String() -} - -func (f *file) debugRender(x interface{}) string { - var buf bytes.Buffer - if err := ast.Fprint(&buf, f.fset, x, nil); err != nil { - panic(err) - } - return buf.String() -} - -// walker adapts a function to satisfy the ast.Visitor interface. -// The function return whether the walk should proceed into the node's children. -type walker func(ast.Node) bool - -func (w walker) Visit(node ast.Node) ast.Visitor { - if w(node) { - return w - } - return nil -} - -func isIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } - -func isPkgDot(expr ast.Expr, pkg, name string) bool { - sel, ok := expr.(*ast.SelectorExpr) - return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) -} - -func isOne(expr ast.Expr) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "1" -} - -func isCgoExported(f *ast.FuncDecl) bool { - if f.Recv != nil || f.Doc == nil { - return false - } - - cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) - for _, c := range f.Doc.List { - if cgoExport.MatchString(c.Text) { - return true - } - } - return false -} - -var basicTypeKinds = map[types.BasicKind]string{ - types.UntypedBool: "bool", - types.UntypedInt: "int", - types.UntypedRune: "rune", - types.UntypedFloat: "float64", - types.UntypedComplex: "complex128", - types.UntypedString: "string", -} - -// isUntypedConst reports whether expr is an untyped constant, -// and indicates what its default type is. -// scope may be nil. -func (f *file) isUntypedConst(expr ast.Expr) (defType string, ok bool) { - // Re-evaluate expr outside of its context to see if it's untyped. - // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) - exprStr := f.render(expr) - tv, err := types.Eval(f.fset, f.pkg.typesPkg, expr.Pos(), exprStr) - if err != nil { - return "", false - } - if b, ok := tv.Type.(*types.Basic); ok { - if dt, ok := basicTypeKinds[b.Kind()]; ok { - return dt, true - } - } - - return "", false -} - -// firstLineOf renders the given node and returns its first line. -// It will also match the indentation of another node. -func (f *file) firstLineOf(node, match ast.Node) string { - line := f.render(node) - if i := strings.Index(line, "\n"); i >= 0 { - line = line[:i] - } - return f.indentOf(match) + line -} - -func (f *file) indentOf(node ast.Node) string { - line := srcLine(f.src, f.fset.Position(node.Pos())) - for i, r := range line { - switch r { - case ' ', '\t': - default: - return line[:i] - } - } - return line // unusual or empty line -} - -func (f *file) srcLineWithMatch(node ast.Node, pattern string) (m []string) { - line := srcLine(f.src, f.fset.Position(node.Pos())) - line = strings.TrimSuffix(line, "\n") - rx := regexp.MustCompile(pattern) - return rx.FindStringSubmatch(line) -} - -// imports returns true if the current file imports the specified package path. -func (f *file) imports(importPath string) bool { - all := astutil.Imports(f.fset, f.f) - for _, p := range all { - for _, i := range p { - uq, err := strconv.Unquote(i.Path.Value) - if err == nil && importPath == uq { - return true - } - } - } - return false -} - -// srcLine returns the complete line at p, including the terminating newline. -func srcLine(src []byte, p token.Position) string { - // Run to end of line in both directions if not at line start/end. - lo, hi := p.Offset, p.Offset+1 - for lo > 0 && src[lo-1] != '\n' { - lo-- - } - for hi < len(src) && src[hi-1] != '\n' { - hi++ - } - return string(src[lo:hi]) -} diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS deleted file mode 100644 index 15167cd..0000000 --- a/vendor/golang.org/x/tools/AUTHORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code refers to The Go Authors for copyright purposes. -# The master list of authors is in the main Go distribution, -# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS deleted file mode 100644 index 1c4577e..0000000 --- a/vendor/golang.org/x/tools/CONTRIBUTORS +++ /dev/null @@ -1,3 +0,0 @@ -# This source code was written by the Go contributors. -# The master list of contributors is in the main Go distribution, -# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE deleted file mode 100644 index 6a66aea..0000000 --- a/vendor/golang.org/x/tools/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS deleted file mode 100644 index 7330990..0000000 --- a/vendor/golang.org/x/tools/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/cmd/getgo/LICENSE b/vendor/golang.org/x/tools/cmd/getgo/LICENSE deleted file mode 100644 index 32017f8..0000000 --- a/vendor/golang.org/x/tools/cmd/getgo/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go deleted file mode 100644 index 6b7052b..0000000 --- a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go +++ /dev/null @@ -1,627 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package astutil - -// This file defines utilities for working with source positions. - -import ( - "fmt" - "go/ast" - "go/token" - "sort" -) - -// PathEnclosingInterval returns the node that encloses the source -// interval [start, end), and all its ancestors up to the AST root. -// -// The definition of "enclosing" used by this function considers -// additional whitespace abutting a node to be enclosed by it. -// In this example: -// -// z := x + y // add them -// <-A-> -// <----B-----> -// -// the ast.BinaryExpr(+) node is considered to enclose interval B -// even though its [Pos()..End()) is actually only interval A. -// This behaviour makes user interfaces more tolerant of imperfect -// input. -// -// This function treats tokens as nodes, though they are not included -// in the result. e.g. PathEnclosingInterval("+") returns the -// enclosing ast.BinaryExpr("x + y"). -// -// If start==end, the 1-char interval following start is used instead. -// -// The 'exact' result is true if the interval contains only path[0] -// and perhaps some adjacent whitespace. It is false if the interval -// overlaps multiple children of path[0], or if it contains only -// interior whitespace of path[0]. -// In this example: -// -// z := x + y // add them -// <--C--> <---E--> -// ^ -// D -// -// intervals C, D and E are inexact. C is contained by the -// z-assignment statement, because it spans three of its children (:=, -// x, +). So too is the 1-char interval D, because it contains only -// interior whitespace of the assignment. E is considered interior -// whitespace of the BlockStmt containing the assignment. -// -// Precondition: [start, end) both lie within the same file as root. -// TODO(adonovan): return (nil, false) in this case and remove precond. -// Requires FileSet; see loader.tokenFileContainsPos. -// -// Postcondition: path is never nil; it always contains at least 'root'. -// -func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { - // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging - - // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). - var visit func(node ast.Node) bool - visit = func(node ast.Node) bool { - path = append(path, node) - - nodePos := node.Pos() - nodeEnd := node.End() - - // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging - - // Intersect [start, end) with interval of node. - if start < nodePos { - start = nodePos - } - if end > nodeEnd { - end = nodeEnd - } - - // Find sole child that contains [start, end). - children := childrenOf(node) - l := len(children) - for i, child := range children { - // [childPos, childEnd) is unaugmented interval of child. - childPos := child.Pos() - childEnd := child.End() - - // [augPos, augEnd) is whitespace-augmented interval of child. - augPos := childPos - augEnd := childEnd - if i > 0 { - augPos = children[i-1].End() // start of preceding whitespace - } - if i < l-1 { - nextChildPos := children[i+1].Pos() - // Does [start, end) lie between child and next child? - if start >= augEnd && end <= nextChildPos { - return false // inexact match - } - augEnd = nextChildPos // end of following whitespace - } - - // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", - // i, augPos, augEnd, start, end) // debugging - - // Does augmented child strictly contain [start, end)? - if augPos <= start && end <= augEnd { - _, isToken := child.(tokenNode) - return isToken || visit(child) - } - - // Does [start, end) overlap multiple children? - // i.e. left-augmented child contains start - // but LR-augmented child does not contain end. - if start < childEnd && end > augEnd { - break - } - } - - // No single child contained [start, end), - // so node is the result. Is it exact? - - // (It's tempting to put this condition before the - // child loop, but it gives the wrong result in the - // case where a node (e.g. ExprStmt) and its sole - // child have equal intervals.) - if start == nodePos && end == nodeEnd { - return true // exact match - } - - return false // inexact: overlaps multiple children - } - - if start > end { - start, end = end, start - } - - if start < root.End() && end > root.Pos() { - if start == end { - end = start + 1 // empty interval => interval of size 1 - } - exact = visit(root) - - // Reverse the path: - for i, l := 0, len(path); i < l/2; i++ { - path[i], path[l-1-i] = path[l-1-i], path[i] - } - } else { - // Selection lies within whitespace preceding the - // first (or following the last) declaration in the file. - // The result nonetheless always includes the ast.File. - path = append(path, root) - } - - return -} - -// tokenNode is a dummy implementation of ast.Node for a single token. -// They are used transiently by PathEnclosingInterval but never escape -// this package. -// -type tokenNode struct { - pos token.Pos - end token.Pos -} - -func (n tokenNode) Pos() token.Pos { - return n.pos -} - -func (n tokenNode) End() token.Pos { - return n.end -} - -func tok(pos token.Pos, len int) ast.Node { - return tokenNode{pos, pos + token.Pos(len)} -} - -// childrenOf returns the direct non-nil children of ast.Node n. -// It may include fake ast.Node implementations for bare tokens. -// it is not safe to call (e.g.) ast.Walk on such nodes. -// -func childrenOf(n ast.Node) []ast.Node { - var children []ast.Node - - // First add nodes for all true subtrees. - ast.Inspect(n, func(node ast.Node) bool { - if node == n { // push n - return true // recur - } - if node != nil { // push child - children = append(children, node) - } - return false // no recursion - }) - - // Then add fake Nodes for bare tokens. - switch n := n.(type) { - case *ast.ArrayType: - children = append(children, - tok(n.Lbrack, len("[")), - tok(n.Elt.End(), len("]"))) - - case *ast.AssignStmt: - children = append(children, - tok(n.TokPos, len(n.Tok.String()))) - - case *ast.BasicLit: - children = append(children, - tok(n.ValuePos, len(n.Value))) - - case *ast.BinaryExpr: - children = append(children, tok(n.OpPos, len(n.Op.String()))) - - case *ast.BlockStmt: - children = append(children, - tok(n.Lbrace, len("{")), - tok(n.Rbrace, len("}"))) - - case *ast.BranchStmt: - children = append(children, - tok(n.TokPos, len(n.Tok.String()))) - - case *ast.CallExpr: - children = append(children, - tok(n.Lparen, len("(")), - tok(n.Rparen, len(")"))) - if n.Ellipsis != 0 { - children = append(children, tok(n.Ellipsis, len("..."))) - } - - case *ast.CaseClause: - if n.List == nil { - children = append(children, - tok(n.Case, len("default"))) - } else { - children = append(children, - tok(n.Case, len("case"))) - } - children = append(children, tok(n.Colon, len(":"))) - - case *ast.ChanType: - switch n.Dir { - case ast.RECV: - children = append(children, tok(n.Begin, len("<-chan"))) - case ast.SEND: - children = append(children, tok(n.Begin, len("chan<-"))) - case ast.RECV | ast.SEND: - children = append(children, tok(n.Begin, len("chan"))) - } - - case *ast.CommClause: - if n.Comm == nil { - children = append(children, - tok(n.Case, len("default"))) - } else { - children = append(children, - tok(n.Case, len("case"))) - } - children = append(children, tok(n.Colon, len(":"))) - - case *ast.Comment: - // nop - - case *ast.CommentGroup: - // nop - - case *ast.CompositeLit: - children = append(children, - tok(n.Lbrace, len("{")), - tok(n.Rbrace, len("{"))) - - case *ast.DeclStmt: - // nop - - case *ast.DeferStmt: - children = append(children, - tok(n.Defer, len("defer"))) - - case *ast.Ellipsis: - children = append(children, - tok(n.Ellipsis, len("..."))) - - case *ast.EmptyStmt: - // nop - - case *ast.ExprStmt: - // nop - - case *ast.Field: - // TODO(adonovan): Field.{Doc,Comment,Tag}? - - case *ast.FieldList: - children = append(children, - tok(n.Opening, len("(")), - tok(n.Closing, len(")"))) - - case *ast.File: - // TODO test: Doc - children = append(children, - tok(n.Package, len("package"))) - - case *ast.ForStmt: - children = append(children, - tok(n.For, len("for"))) - - case *ast.FuncDecl: - // TODO(adonovan): FuncDecl.Comment? - - // Uniquely, FuncDecl breaks the invariant that - // preorder traversal yields tokens in lexical order: - // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. - // - // As a workaround, we inline the case for FuncType - // here and order things correctly. - // - children = nil // discard ast.Walk(FuncDecl) info subtrees - children = append(children, tok(n.Type.Func, len("func"))) - if n.Recv != nil { - children = append(children, n.Recv) - } - children = append(children, n.Name) - if n.Type.Params != nil { - children = append(children, n.Type.Params) - } - if n.Type.Results != nil { - children = append(children, n.Type.Results) - } - if n.Body != nil { - children = append(children, n.Body) - } - - case *ast.FuncLit: - // nop - - case *ast.FuncType: - if n.Func != 0 { - children = append(children, - tok(n.Func, len("func"))) - } - - case *ast.GenDecl: - children = append(children, - tok(n.TokPos, len(n.Tok.String()))) - if n.Lparen != 0 { - children = append(children, - tok(n.Lparen, len("(")), - tok(n.Rparen, len(")"))) - } - - case *ast.GoStmt: - children = append(children, - tok(n.Go, len("go"))) - - case *ast.Ident: - children = append(children, - tok(n.NamePos, len(n.Name))) - - case *ast.IfStmt: - children = append(children, - tok(n.If, len("if"))) - - case *ast.ImportSpec: - // TODO(adonovan): ImportSpec.{Doc,EndPos}? - - case *ast.IncDecStmt: - children = append(children, - tok(n.TokPos, len(n.Tok.String()))) - - case *ast.IndexExpr: - children = append(children, - tok(n.Lbrack, len("{")), - tok(n.Rbrack, len("}"))) - - case *ast.InterfaceType: - children = append(children, - tok(n.Interface, len("interface"))) - - case *ast.KeyValueExpr: - children = append(children, - tok(n.Colon, len(":"))) - - case *ast.LabeledStmt: - children = append(children, - tok(n.Colon, len(":"))) - - case *ast.MapType: - children = append(children, - tok(n.Map, len("map"))) - - case *ast.ParenExpr: - children = append(children, - tok(n.Lparen, len("(")), - tok(n.Rparen, len(")"))) - - case *ast.RangeStmt: - children = append(children, - tok(n.For, len("for")), - tok(n.TokPos, len(n.Tok.String()))) - - case *ast.ReturnStmt: - children = append(children, - tok(n.Return, len("return"))) - - case *ast.SelectStmt: - children = append(children, - tok(n.Select, len("select"))) - - case *ast.SelectorExpr: - // nop - - case *ast.SendStmt: - children = append(children, - tok(n.Arrow, len("<-"))) - - case *ast.SliceExpr: - children = append(children, - tok(n.Lbrack, len("[")), - tok(n.Rbrack, len("]"))) - - case *ast.StarExpr: - children = append(children, tok(n.Star, len("*"))) - - case *ast.StructType: - children = append(children, tok(n.Struct, len("struct"))) - - case *ast.SwitchStmt: - children = append(children, tok(n.Switch, len("switch"))) - - case *ast.TypeAssertExpr: - children = append(children, - tok(n.Lparen-1, len(".")), - tok(n.Lparen, len("(")), - tok(n.Rparen, len(")"))) - - case *ast.TypeSpec: - // TODO(adonovan): TypeSpec.{Doc,Comment}? - - case *ast.TypeSwitchStmt: - children = append(children, tok(n.Switch, len("switch"))) - - case *ast.UnaryExpr: - children = append(children, tok(n.OpPos, len(n.Op.String()))) - - case *ast.ValueSpec: - // TODO(adonovan): ValueSpec.{Doc,Comment}? - - case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: - // nop - } - - // TODO(adonovan): opt: merge the logic of ast.Inspect() into - // the switch above so we can make interleaved callbacks for - // both Nodes and Tokens in the right order and avoid the need - // to sort. - sort.Sort(byPos(children)) - - return children -} - -type byPos []ast.Node - -func (sl byPos) Len() int { - return len(sl) -} -func (sl byPos) Less(i, j int) bool { - return sl[i].Pos() < sl[j].Pos() -} -func (sl byPos) Swap(i, j int) { - sl[i], sl[j] = sl[j], sl[i] -} - -// NodeDescription returns a description of the concrete type of n suitable -// for a user interface. -// -// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, -// StarExpr) we could be much more specific given the path to the AST -// root. Perhaps we should do that. -// -func NodeDescription(n ast.Node) string { - switch n := n.(type) { - case *ast.ArrayType: - return "array type" - case *ast.AssignStmt: - return "assignment" - case *ast.BadDecl: - return "bad declaration" - case *ast.BadExpr: - return "bad expression" - case *ast.BadStmt: - return "bad statement" - case *ast.BasicLit: - return "basic literal" - case *ast.BinaryExpr: - return fmt.Sprintf("binary %s operation", n.Op) - case *ast.BlockStmt: - return "block" - case *ast.BranchStmt: - switch n.Tok { - case token.BREAK: - return "break statement" - case token.CONTINUE: - return "continue statement" - case token.GOTO: - return "goto statement" - case token.FALLTHROUGH: - return "fall-through statement" - } - case *ast.CallExpr: - if len(n.Args) == 1 && !n.Ellipsis.IsValid() { - return "function call (or conversion)" - } - return "function call" - case *ast.CaseClause: - return "case clause" - case *ast.ChanType: - return "channel type" - case *ast.CommClause: - return "communication clause" - case *ast.Comment: - return "comment" - case *ast.CommentGroup: - return "comment group" - case *ast.CompositeLit: - return "composite literal" - case *ast.DeclStmt: - return NodeDescription(n.Decl) + " statement" - case *ast.DeferStmt: - return "defer statement" - case *ast.Ellipsis: - return "ellipsis" - case *ast.EmptyStmt: - return "empty statement" - case *ast.ExprStmt: - return "expression statement" - case *ast.Field: - // Can be any of these: - // struct {x, y int} -- struct field(s) - // struct {T} -- anon struct field - // interface {I} -- interface embedding - // interface {f()} -- interface method - // func (A) func(B) C -- receiver, param(s), result(s) - return "field/method/parameter" - case *ast.FieldList: - return "field/method/parameter list" - case *ast.File: - return "source file" - case *ast.ForStmt: - return "for loop" - case *ast.FuncDecl: - return "function declaration" - case *ast.FuncLit: - return "function literal" - case *ast.FuncType: - return "function type" - case *ast.GenDecl: - switch n.Tok { - case token.IMPORT: - return "import declaration" - case token.CONST: - return "constant declaration" - case token.TYPE: - return "type declaration" - case token.VAR: - return "variable declaration" - } - case *ast.GoStmt: - return "go statement" - case *ast.Ident: - return "identifier" - case *ast.IfStmt: - return "if statement" - case *ast.ImportSpec: - return "import specification" - case *ast.IncDecStmt: - if n.Tok == token.INC { - return "increment statement" - } - return "decrement statement" - case *ast.IndexExpr: - return "index expression" - case *ast.InterfaceType: - return "interface type" - case *ast.KeyValueExpr: - return "key/value association" - case *ast.LabeledStmt: - return "statement label" - case *ast.MapType: - return "map type" - case *ast.Package: - return "package" - case *ast.ParenExpr: - return "parenthesized " + NodeDescription(n.X) - case *ast.RangeStmt: - return "range loop" - case *ast.ReturnStmt: - return "return statement" - case *ast.SelectStmt: - return "select statement" - case *ast.SelectorExpr: - return "selector" - case *ast.SendStmt: - return "channel send" - case *ast.SliceExpr: - return "slice expression" - case *ast.StarExpr: - return "*-operation" // load/store expr or pointer type - case *ast.StructType: - return "struct type" - case *ast.SwitchStmt: - return "switch statement" - case *ast.TypeAssertExpr: - return "type assertion" - case *ast.TypeSpec: - return "type specification" - case *ast.TypeSwitchStmt: - return "type switch" - case *ast.UnaryExpr: - return fmt.Sprintf("unary %s operation", n.Op) - case *ast.ValueSpec: - return "value specification" - - } - panic(fmt.Sprintf("unexpected node type: %T", n)) -} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go deleted file mode 100644 index 83f196c..0000000 --- a/vendor/golang.org/x/tools/go/ast/astutil/imports.go +++ /dev/null @@ -1,470 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package astutil contains common utilities for working with the Go AST. -package astutil // import "golang.org/x/tools/go/ast/astutil" - -import ( - "fmt" - "go/ast" - "go/token" - "strconv" - "strings" -) - -// AddImport adds the import path to the file f, if absent. -func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) { - return AddNamedImport(fset, f, "", ipath) -} - -// AddNamedImport adds the import path to the file f, if absent. -// If name is not empty, it is used to rename the import. -// -// For example, calling -// AddNamedImport(fset, f, "pathpkg", "path") -// adds -// import pathpkg "path" -func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) { - if imports(f, ipath) { - return false - } - - newImport := &ast.ImportSpec{ - Path: &ast.BasicLit{ - Kind: token.STRING, - Value: strconv.Quote(ipath), - }, - } - if name != "" { - newImport.Name = &ast.Ident{Name: name} - } - - // Find an import decl to add to. - // The goal is to find an existing import - // whose import path has the longest shared - // prefix with ipath. - var ( - bestMatch = -1 // length of longest shared prefix - lastImport = -1 // index in f.Decls of the file's final import decl - impDecl *ast.GenDecl // import decl containing the best match - impIndex = -1 // spec index in impDecl containing the best match - - isThirdPartyPath = isThirdParty(ipath) - ) - for i, decl := range f.Decls { - gen, ok := decl.(*ast.GenDecl) - if ok && gen.Tok == token.IMPORT { - lastImport = i - // Do not add to import "C", to avoid disrupting the - // association with its doc comment, breaking cgo. - if declImports(gen, "C") { - continue - } - - // Match an empty import decl if that's all that is available. - if len(gen.Specs) == 0 && bestMatch == -1 { - impDecl = gen - } - - // Compute longest shared prefix with imports in this group and find best - // matched import spec. - // 1. Always prefer import spec with longest shared prefix. - // 2. While match length is 0, - // - for stdlib package: prefer first import spec. - // - for third party package: prefer first third party import spec. - // We cannot use last import spec as best match for third party package - // because grouped imports are usually placed last by goimports -local - // flag. - // See issue #19190. - seenAnyThirdParty := false - for j, spec := range gen.Specs { - impspec := spec.(*ast.ImportSpec) - p := importPath(impspec) - n := matchLen(p, ipath) - if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { - bestMatch = n - impDecl = gen - impIndex = j - } - seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) - } - } - } - - // If no import decl found, add one after the last import. - if impDecl == nil { - impDecl = &ast.GenDecl{ - Tok: token.IMPORT, - } - if lastImport >= 0 { - impDecl.TokPos = f.Decls[lastImport].End() - } else { - // There are no existing imports. - // Our new import goes after the package declaration and after - // the comment, if any, that starts on the same line as the - // package declaration. - impDecl.TokPos = f.Package - - file := fset.File(f.Package) - pkgLine := file.Line(f.Package) - for _, c := range f.Comments { - if file.Line(c.Pos()) > pkgLine { - break - } - impDecl.TokPos = c.End() - } - } - f.Decls = append(f.Decls, nil) - copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) - f.Decls[lastImport+1] = impDecl - } - - // Insert new import at insertAt. - insertAt := 0 - if impIndex >= 0 { - // insert after the found import - insertAt = impIndex + 1 - } - impDecl.Specs = append(impDecl.Specs, nil) - copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) - impDecl.Specs[insertAt] = newImport - pos := impDecl.Pos() - if insertAt > 0 { - // If there is a comment after an existing import, preserve the comment - // position by adding the new import after the comment. - if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { - pos = spec.Comment.End() - } else { - // Assign same position as the previous import, - // so that the sorter sees it as being in the same block. - pos = impDecl.Specs[insertAt-1].Pos() - } - } - if newImport.Name != nil { - newImport.Name.NamePos = pos - } - newImport.Path.ValuePos = pos - newImport.EndPos = pos - - // Clean up parens. impDecl contains at least one spec. - if len(impDecl.Specs) == 1 { - // Remove unneeded parens. - impDecl.Lparen = token.NoPos - } else if !impDecl.Lparen.IsValid() { - // impDecl needs parens added. - impDecl.Lparen = impDecl.Specs[0].Pos() - } - - f.Imports = append(f.Imports, newImport) - - if len(f.Decls) <= 1 { - return true - } - - // Merge all the import declarations into the first one. - var first *ast.GenDecl - for i := 0; i < len(f.Decls); i++ { - decl := f.Decls[i] - gen, ok := decl.(*ast.GenDecl) - if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { - continue - } - if first == nil { - first = gen - continue // Don't touch the first one. - } - // We now know there is more than one package in this import - // declaration. Ensure that it ends up parenthesized. - first.Lparen = first.Pos() - // Move the imports of the other import declaration to the first one. - for _, spec := range gen.Specs { - spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() - first.Specs = append(first.Specs, spec) - } - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) - i-- - } - - return true -} - -func isThirdParty(importPath string) bool { - // Third party package import path usually contains "." (".com", ".org", ...) - // This logic is taken from golang.org/x/tools/imports package. - return strings.Contains(importPath, ".") -} - -// DeleteImport deletes the import path from the file f, if present. -func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { - return DeleteNamedImport(fset, f, "", path) -} - -// DeleteNamedImport deletes the import with the given name and path from the file f, if present. -func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { - var delspecs []*ast.ImportSpec - var delcomments []*ast.CommentGroup - - // Find the import nodes that import path, if any. - for i := 0; i < len(f.Decls); i++ { - decl := f.Decls[i] - gen, ok := decl.(*ast.GenDecl) - if !ok || gen.Tok != token.IMPORT { - continue - } - for j := 0; j < len(gen.Specs); j++ { - spec := gen.Specs[j] - impspec := spec.(*ast.ImportSpec) - if impspec.Name == nil && name != "" { - continue - } - if impspec.Name != nil && impspec.Name.Name != name { - continue - } - if importPath(impspec) != path { - continue - } - - // We found an import spec that imports path. - // Delete it. - delspecs = append(delspecs, impspec) - deleted = true - copy(gen.Specs[j:], gen.Specs[j+1:]) - gen.Specs = gen.Specs[:len(gen.Specs)-1] - - // If this was the last import spec in this decl, - // delete the decl, too. - if len(gen.Specs) == 0 { - copy(f.Decls[i:], f.Decls[i+1:]) - f.Decls = f.Decls[:len(f.Decls)-1] - i-- - break - } else if len(gen.Specs) == 1 { - if impspec.Doc != nil { - delcomments = append(delcomments, impspec.Doc) - } - if impspec.Comment != nil { - delcomments = append(delcomments, impspec.Comment) - } - for _, cg := range f.Comments { - // Found comment on the same line as the import spec. - if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { - delcomments = append(delcomments, cg) - break - } - } - - spec := gen.Specs[0].(*ast.ImportSpec) - - // Move the documentation right after the import decl. - if spec.Doc != nil { - for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { - fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) - } - } - for _, cg := range f.Comments { - if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { - for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { - fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) - } - break - } - } - } - if j > 0 { - lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) - lastLine := fset.Position(lastImpspec.Path.ValuePos).Line - line := fset.Position(impspec.Path.ValuePos).Line - - // We deleted an entry but now there may be - // a blank line-sized hole where the import was. - if line-lastLine > 1 { - // There was a blank line immediately preceding the deleted import, - // so there's no need to close the hole. - // Do nothing. - } else if line != fset.File(gen.Rparen).LineCount() { - // There was no blank line. Close the hole. - fset.File(gen.Rparen).MergeLine(line) - } - } - j-- - } - } - - // Delete imports from f.Imports. - for i := 0; i < len(f.Imports); i++ { - imp := f.Imports[i] - for j, del := range delspecs { - if imp == del { - copy(f.Imports[i:], f.Imports[i+1:]) - f.Imports = f.Imports[:len(f.Imports)-1] - copy(delspecs[j:], delspecs[j+1:]) - delspecs = delspecs[:len(delspecs)-1] - i-- - break - } - } - } - - // Delete comments from f.Comments. - for i := 0; i < len(f.Comments); i++ { - cg := f.Comments[i] - for j, del := range delcomments { - if cg == del { - copy(f.Comments[i:], f.Comments[i+1:]) - f.Comments = f.Comments[:len(f.Comments)-1] - copy(delcomments[j:], delcomments[j+1:]) - delcomments = delcomments[:len(delcomments)-1] - i-- - break - } - } - } - - if len(delspecs) > 0 { - panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) - } - - return -} - -// RewriteImport rewrites any import of path oldPath to path newPath. -func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { - for _, imp := range f.Imports { - if importPath(imp) == oldPath { - rewrote = true - // record old End, because the default is to compute - // it using the length of imp.Path.Value. - imp.EndPos = imp.End() - imp.Path.Value = strconv.Quote(newPath) - } - } - return -} - -// UsesImport reports whether a given import is used. -func UsesImport(f *ast.File, path string) (used bool) { - spec := importSpec(f, path) - if spec == nil { - return - } - - name := spec.Name.String() - switch name { - case "": - // If the package name is not explicitly specified, - // make an educated guess. This is not guaranteed to be correct. - lastSlash := strings.LastIndex(path, "/") - if lastSlash == -1 { - name = path - } else { - name = path[lastSlash+1:] - } - case "_", ".": - // Not sure if this import is used - err on the side of caution. - return true - } - - ast.Walk(visitFn(func(n ast.Node) { - sel, ok := n.(*ast.SelectorExpr) - if ok && isTopName(sel.X, name) { - used = true - } - }), f) - - return -} - -type visitFn func(node ast.Node) - -func (fn visitFn) Visit(node ast.Node) ast.Visitor { - fn(node) - return fn -} - -// imports returns true if f imports path. -func imports(f *ast.File, path string) bool { - return importSpec(f, path) != nil -} - -// importSpec returns the import spec if f imports path, -// or nil otherwise. -func importSpec(f *ast.File, path string) *ast.ImportSpec { - for _, s := range f.Imports { - if importPath(s) == path { - return s - } - } - return nil -} - -// importPath returns the unquoted import path of s, -// or "" if the path is not properly quoted. -func importPath(s *ast.ImportSpec) string { - t, err := strconv.Unquote(s.Path.Value) - if err == nil { - return t - } - return "" -} - -// declImports reports whether gen contains an import of path. -func declImports(gen *ast.GenDecl, path string) bool { - if gen.Tok != token.IMPORT { - return false - } - for _, spec := range gen.Specs { - impspec := spec.(*ast.ImportSpec) - if importPath(impspec) == path { - return true - } - } - return false -} - -// matchLen returns the length of the longest path segment prefix shared by x and y. -func matchLen(x, y string) int { - n := 0 - for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { - if x[i] == '/' { - n++ - } - } - return n -} - -// isTopName returns true if n is a top-level unresolved identifier with the given name. -func isTopName(n ast.Expr, name string) bool { - id, ok := n.(*ast.Ident) - return ok && id.Name == name && id.Obj == nil -} - -// Imports returns the file imports grouped by paragraph. -func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { - var groups [][]*ast.ImportSpec - - for _, decl := range f.Decls { - genDecl, ok := decl.(*ast.GenDecl) - if !ok || genDecl.Tok != token.IMPORT { - break - } - - group := []*ast.ImportSpec{} - - var lastLine int - for _, spec := range genDecl.Specs { - importSpec := spec.(*ast.ImportSpec) - pos := importSpec.Path.ValuePos - line := fset.Position(pos).Line - if lastLine > 0 && pos > 0 && line-lastLine > 1 { - groups = append(groups, group) - group = []*ast.ImportSpec{} - } - group = append(group, importSpec) - lastLine = line - } - groups = append(groups, group) - } - - return groups -} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go deleted file mode 100644 index cf72ea9..0000000 --- a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go +++ /dev/null @@ -1,477 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package astutil - -import ( - "fmt" - "go/ast" - "reflect" - "sort" -) - -// An ApplyFunc is invoked by Apply for each node n, even if n is nil, -// before and/or after the node's children, using a Cursor describing -// the current node and providing operations on it. -// -// The return value of ApplyFunc controls the syntax tree traversal. -// See Apply for details. -type ApplyFunc func(*Cursor) bool - -// Apply traverses a syntax tree recursively, starting with root, -// and calling pre and post for each node as described below. -// Apply returns the syntax tree, possibly modified. -// -// If pre is not nil, it is called for each node before the node's -// children are traversed (pre-order). If pre returns false, no -// children are traversed, and post is not called for that node. -// -// If post is not nil, and a prior call of pre didn't return false, -// post is called for each node after its children are traversed -// (post-order). If post returns false, traversal is terminated and -// Apply returns immediately. -// -// Only fields that refer to AST nodes are considered children; -// i.e., token.Pos, Scopes, Objects, and fields of basic types -// (strings, etc.) are ignored. -// -// Children are traversed in the order in which they appear in the -// respective node's struct definition. A package's files are -// traversed in the filenames' alphabetical order. -// -func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { - parent := &struct{ ast.Node }{root} - defer func() { - if r := recover(); r != nil && r != abort { - panic(r) - } - result = parent.Node - }() - a := &application{pre: pre, post: post} - a.apply(parent, "Node", nil, root) - return -} - -var abort = new(int) // singleton, to signal termination of Apply - -// A Cursor describes a node encountered during Apply. -// Information about the node and its parent is available -// from the Node, Parent, Name, and Index methods. -// -// If p is a variable of type and value of the current parent node -// c.Parent(), and f is the field identifier with name c.Name(), -// the following invariants hold: -// -// p.f == c.Node() if c.Index() < 0 -// p.f[c.Index()] == c.Node() if c.Index() >= 0 -// -// The methods Replace, Delete, InsertBefore, and InsertAfter -// can be used to change the AST without disrupting Apply. -type Cursor struct { - parent ast.Node - name string - iter *iterator // valid if non-nil - node ast.Node -} - -// Node returns the current Node. -func (c *Cursor) Node() ast.Node { return c.node } - -// Parent returns the parent of the current Node. -func (c *Cursor) Parent() ast.Node { return c.parent } - -// Name returns the name of the parent Node field that contains the current Node. -// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns -// the filename for the current Node. -func (c *Cursor) Name() string { return c.name } - -// Index reports the index >= 0 of the current Node in the slice of Nodes that -// contains it, or a value < 0 if the current Node is not part of a slice. -// The index of the current node changes if InsertBefore is called while -// processing the current node. -func (c *Cursor) Index() int { - if c.iter != nil { - return c.iter.index - } - return -1 -} - -// field returns the current node's parent field value. -func (c *Cursor) field() reflect.Value { - return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) -} - -// Replace replaces the current Node with n. -// The replacement node is not walked by Apply. -func (c *Cursor) Replace(n ast.Node) { - if _, ok := c.node.(*ast.File); ok { - file, ok := n.(*ast.File) - if !ok { - panic("attempt to replace *ast.File with non-*ast.File") - } - c.parent.(*ast.Package).Files[c.name] = file - return - } - - v := c.field() - if i := c.Index(); i >= 0 { - v = v.Index(i) - } - v.Set(reflect.ValueOf(n)) -} - -// Delete deletes the current Node from its containing slice. -// If the current Node is not part of a slice, Delete panics. -// As a special case, if the current node is a package file, -// Delete removes it from the package's Files map. -func (c *Cursor) Delete() { - if _, ok := c.node.(*ast.File); ok { - delete(c.parent.(*ast.Package).Files, c.name) - return - } - - i := c.Index() - if i < 0 { - panic("Delete node not contained in slice") - } - v := c.field() - l := v.Len() - reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) - v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) - v.SetLen(l - 1) - c.iter.step-- -} - -// InsertAfter inserts n after the current Node in its containing slice. -// If the current Node is not part of a slice, InsertAfter panics. -// Apply does not walk n. -func (c *Cursor) InsertAfter(n ast.Node) { - i := c.Index() - if i < 0 { - panic("InsertAfter node not contained in slice") - } - v := c.field() - v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) - l := v.Len() - reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) - v.Index(i + 1).Set(reflect.ValueOf(n)) - c.iter.step++ -} - -// InsertBefore inserts n before the current Node in its containing slice. -// If the current Node is not part of a slice, InsertBefore panics. -// Apply will not walk n. -func (c *Cursor) InsertBefore(n ast.Node) { - i := c.Index() - if i < 0 { - panic("InsertBefore node not contained in slice") - } - v := c.field() - v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) - l := v.Len() - reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) - v.Index(i).Set(reflect.ValueOf(n)) - c.iter.index++ -} - -// application carries all the shared data so we can pass it around cheaply. -type application struct { - pre, post ApplyFunc - cursor Cursor - iter iterator -} - -func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { - // convert typed nil into untyped nil - if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { - n = nil - } - - // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead - saved := a.cursor - a.cursor.parent = parent - a.cursor.name = name - a.cursor.iter = iter - a.cursor.node = n - - if a.pre != nil && !a.pre(&a.cursor) { - a.cursor = saved - return - } - - // walk children - // (the order of the cases matches the order of the corresponding node types in go/ast) - switch n := n.(type) { - case nil: - // nothing to do - - // Comments and fields - case *ast.Comment: - // nothing to do - - case *ast.CommentGroup: - if n != nil { - a.applyList(n, "List") - } - - case *ast.Field: - a.apply(n, "Doc", nil, n.Doc) - a.applyList(n, "Names") - a.apply(n, "Type", nil, n.Type) - a.apply(n, "Tag", nil, n.Tag) - a.apply(n, "Comment", nil, n.Comment) - - case *ast.FieldList: - a.applyList(n, "List") - - // Expressions - case *ast.BadExpr, *ast.Ident, *ast.BasicLit: - // nothing to do - - case *ast.Ellipsis: - a.apply(n, "Elt", nil, n.Elt) - - case *ast.FuncLit: - a.apply(n, "Type", nil, n.Type) - a.apply(n, "Body", nil, n.Body) - - case *ast.CompositeLit: - a.apply(n, "Type", nil, n.Type) - a.applyList(n, "Elts") - - case *ast.ParenExpr: - a.apply(n, "X", nil, n.X) - - case *ast.SelectorExpr: - a.apply(n, "X", nil, n.X) - a.apply(n, "Sel", nil, n.Sel) - - case *ast.IndexExpr: - a.apply(n, "X", nil, n.X) - a.apply(n, "Index", nil, n.Index) - - case *ast.SliceExpr: - a.apply(n, "X", nil, n.X) - a.apply(n, "Low", nil, n.Low) - a.apply(n, "High", nil, n.High) - a.apply(n, "Max", nil, n.Max) - - case *ast.TypeAssertExpr: - a.apply(n, "X", nil, n.X) - a.apply(n, "Type", nil, n.Type) - - case *ast.CallExpr: - a.apply(n, "Fun", nil, n.Fun) - a.applyList(n, "Args") - - case *ast.StarExpr: - a.apply(n, "X", nil, n.X) - - case *ast.UnaryExpr: - a.apply(n, "X", nil, n.X) - - case *ast.BinaryExpr: - a.apply(n, "X", nil, n.X) - a.apply(n, "Y", nil, n.Y) - - case *ast.KeyValueExpr: - a.apply(n, "Key", nil, n.Key) - a.apply(n, "Value", nil, n.Value) - - // Types - case *ast.ArrayType: - a.apply(n, "Len", nil, n.Len) - a.apply(n, "Elt", nil, n.Elt) - - case *ast.StructType: - a.apply(n, "Fields", nil, n.Fields) - - case *ast.FuncType: - a.apply(n, "Params", nil, n.Params) - a.apply(n, "Results", nil, n.Results) - - case *ast.InterfaceType: - a.apply(n, "Methods", nil, n.Methods) - - case *ast.MapType: - a.apply(n, "Key", nil, n.Key) - a.apply(n, "Value", nil, n.Value) - - case *ast.ChanType: - a.apply(n, "Value", nil, n.Value) - - // Statements - case *ast.BadStmt: - // nothing to do - - case *ast.DeclStmt: - a.apply(n, "Decl", nil, n.Decl) - - case *ast.EmptyStmt: - // nothing to do - - case *ast.LabeledStmt: - a.apply(n, "Label", nil, n.Label) - a.apply(n, "Stmt", nil, n.Stmt) - - case *ast.ExprStmt: - a.apply(n, "X", nil, n.X) - - case *ast.SendStmt: - a.apply(n, "Chan", nil, n.Chan) - a.apply(n, "Value", nil, n.Value) - - case *ast.IncDecStmt: - a.apply(n, "X", nil, n.X) - - case *ast.AssignStmt: - a.applyList(n, "Lhs") - a.applyList(n, "Rhs") - - case *ast.GoStmt: - a.apply(n, "Call", nil, n.Call) - - case *ast.DeferStmt: - a.apply(n, "Call", nil, n.Call) - - case *ast.ReturnStmt: - a.applyList(n, "Results") - - case *ast.BranchStmt: - a.apply(n, "Label", nil, n.Label) - - case *ast.BlockStmt: - a.applyList(n, "List") - - case *ast.IfStmt: - a.apply(n, "Init", nil, n.Init) - a.apply(n, "Cond", nil, n.Cond) - a.apply(n, "Body", nil, n.Body) - a.apply(n, "Else", nil, n.Else) - - case *ast.CaseClause: - a.applyList(n, "List") - a.applyList(n, "Body") - - case *ast.SwitchStmt: - a.apply(n, "Init", nil, n.Init) - a.apply(n, "Tag", nil, n.Tag) - a.apply(n, "Body", nil, n.Body) - - case *ast.TypeSwitchStmt: - a.apply(n, "Init", nil, n.Init) - a.apply(n, "Assign", nil, n.Assign) - a.apply(n, "Body", nil, n.Body) - - case *ast.CommClause: - a.apply(n, "Comm", nil, n.Comm) - a.applyList(n, "Body") - - case *ast.SelectStmt: - a.apply(n, "Body", nil, n.Body) - - case *ast.ForStmt: - a.apply(n, "Init", nil, n.Init) - a.apply(n, "Cond", nil, n.Cond) - a.apply(n, "Post", nil, n.Post) - a.apply(n, "Body", nil, n.Body) - - case *ast.RangeStmt: - a.apply(n, "Key", nil, n.Key) - a.apply(n, "Value", nil, n.Value) - a.apply(n, "X", nil, n.X) - a.apply(n, "Body", nil, n.Body) - - // Declarations - case *ast.ImportSpec: - a.apply(n, "Doc", nil, n.Doc) - a.apply(n, "Name", nil, n.Name) - a.apply(n, "Path", nil, n.Path) - a.apply(n, "Comment", nil, n.Comment) - - case *ast.ValueSpec: - a.apply(n, "Doc", nil, n.Doc) - a.applyList(n, "Names") - a.apply(n, "Type", nil, n.Type) - a.applyList(n, "Values") - a.apply(n, "Comment", nil, n.Comment) - - case *ast.TypeSpec: - a.apply(n, "Doc", nil, n.Doc) - a.apply(n, "Name", nil, n.Name) - a.apply(n, "Type", nil, n.Type) - a.apply(n, "Comment", nil, n.Comment) - - case *ast.BadDecl: - // nothing to do - - case *ast.GenDecl: - a.apply(n, "Doc", nil, n.Doc) - a.applyList(n, "Specs") - - case *ast.FuncDecl: - a.apply(n, "Doc", nil, n.Doc) - a.apply(n, "Recv", nil, n.Recv) - a.apply(n, "Name", nil, n.Name) - a.apply(n, "Type", nil, n.Type) - a.apply(n, "Body", nil, n.Body) - - // Files and packages - case *ast.File: - a.apply(n, "Doc", nil, n.Doc) - a.apply(n, "Name", nil, n.Name) - a.applyList(n, "Decls") - // Don't walk n.Comments; they have either been walked already if - // they are Doc comments, or they can be easily walked explicitly. - - case *ast.Package: - // collect and sort names for reproducible behavior - var names []string - for name := range n.Files { - names = append(names, name) - } - sort.Strings(names) - for _, name := range names { - a.apply(n, name, nil, n.Files[name]) - } - - default: - panic(fmt.Sprintf("Apply: unexpected node type %T", n)) - } - - if a.post != nil && !a.post(&a.cursor) { - panic(abort) - } - - a.cursor = saved -} - -// An iterator controls iteration over a slice of nodes. -type iterator struct { - index, step int -} - -func (a *application) applyList(parent ast.Node, name string) { - // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead - saved := a.iter - a.iter.index = 0 - for { - // must reload parent.name each time, since cursor modifications might change it - v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) - if a.iter.index >= v.Len() { - break - } - - // element x may be nil in a bad AST - be cautious - var x ast.Node - if e := v.Index(a.iter.index); e.IsValid() { - x = e.Interface().(ast.Node) - } - - a.iter.step = 1 - a.apply(parent, name, &a.iter, x) - a.iter.index += a.iter.step - } - a.iter = saved -} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go deleted file mode 100644 index 7630629..0000000 --- a/vendor/golang.org/x/tools/go/ast/astutil/util.go +++ /dev/null @@ -1,14 +0,0 @@ -package astutil - -import "go/ast" - -// Unparen returns e with any enclosing parentheses stripped. -func Unparen(e ast.Expr) ast.Expr { - for { - p, ok := e.(*ast.ParenExpr) - if !ok { - return e - } - e = p.X - } -} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go deleted file mode 100644 index c0cb03e..0000000 --- a/vendor/golang.org/x/tools/go/buildutil/allpackages.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package buildutil provides utilities related to the go/build -// package in the standard library. -// -// All I/O is done via the build.Context file system interface, which must -// be concurrency-safe. -package buildutil // import "golang.org/x/tools/go/buildutil" - -import ( - "go/build" - "os" - "path/filepath" - "sort" - "strings" - "sync" -) - -// AllPackages returns the package path of each Go package in any source -// directory of the specified build context (e.g. $GOROOT or an element -// of $GOPATH). Errors are ignored. The results are sorted. -// All package paths are canonical, and thus may contain "/vendor/". -// -// The result may include import paths for directories that contain no -// *.go files, such as "archive" (in $GOROOT/src). -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -// -func AllPackages(ctxt *build.Context) []string { - var list []string - ForEachPackage(ctxt, func(pkg string, _ error) { - list = append(list, pkg) - }) - sort.Strings(list) - return list -} - -// ForEachPackage calls the found function with the package path of -// each Go package it finds in any source directory of the specified -// build context (e.g. $GOROOT or an element of $GOPATH). -// All package paths are canonical, and thus may contain "/vendor/". -// -// If the package directory exists but could not be read, the second -// argument to the found function provides the error. -// -// All I/O is done via the build.Context file system interface, -// which must be concurrency-safe. -// -func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { - ch := make(chan item) - - var wg sync.WaitGroup - for _, root := range ctxt.SrcDirs() { - root := root - wg.Add(1) - go func() { - allPackages(ctxt, root, ch) - wg.Done() - }() - } - go func() { - wg.Wait() - close(ch) - }() - - // All calls to found occur in the caller's goroutine. - for i := range ch { - found(i.importPath, i.err) - } -} - -type item struct { - importPath string - err error // (optional) -} - -// We use a process-wide counting semaphore to limit -// the number of parallel calls to ReadDir. -var ioLimit = make(chan bool, 20) - -func allPackages(ctxt *build.Context, root string, ch chan<- item) { - root = filepath.Clean(root) + string(os.PathSeparator) - - var wg sync.WaitGroup - - var walkDir func(dir string) - walkDir = func(dir string) { - // Avoid .foo, _foo, and testdata directory trees. - base := filepath.Base(dir) - if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { - return - } - - pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) - - // Prune search if we encounter any of these import paths. - switch pkg { - case "builtin": - return - } - - ioLimit <- true - files, err := ReadDir(ctxt, dir) - <-ioLimit - if pkg != "" || err != nil { - ch <- item{pkg, err} - } - for _, fi := range files { - fi := fi - if fi.IsDir() { - wg.Add(1) - go func() { - walkDir(filepath.Join(dir, fi.Name())) - wg.Done() - }() - } - } - } - - walkDir(root) - wg.Wait() -} - -// ExpandPatterns returns the set of packages matched by patterns, -// which may have the following forms: -// -// golang.org/x/tools/cmd/guru # a single package -// golang.org/x/tools/... # all packages beneath dir -// ... # the entire workspace. -// -// Order is significant: a pattern preceded by '-' removes matching -// packages from the set. For example, these patterns match all encoding -// packages except encoding/xml: -// -// encoding/... -encoding/xml -// -// A trailing slash in a pattern is ignored. (Path components of Go -// package names are separated by slash, not the platform's path separator.) -// -func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { - // TODO(adonovan): support other features of 'go list': - // - "std"/"cmd"/"all" meta-packages - // - "..." not at the end of a pattern - // - relative patterns using "./" or "../" prefix - - pkgs := make(map[string]bool) - doPkg := func(pkg string, neg bool) { - if neg { - delete(pkgs, pkg) - } else { - pkgs[pkg] = true - } - } - - // Scan entire workspace if wildcards are present. - // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. - var all []string - for _, arg := range patterns { - if strings.HasSuffix(arg, "...") { - all = AllPackages(ctxt) - break - } - } - - for _, arg := range patterns { - if arg == "" { - continue - } - - neg := arg[0] == '-' - if neg { - arg = arg[1:] - } - - if arg == "..." { - // ... matches all packages - for _, pkg := range all { - doPkg(pkg, neg) - } - } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { - // dir/... matches all packages beneath dir - for _, pkg := range all { - if strings.HasPrefix(pkg, dir) && - (len(pkg) == len(dir) || pkg[len(dir)] == '/') { - doPkg(pkg, neg) - } - } - } else { - // single package - doPkg(strings.TrimSuffix(arg, "/"), neg) - } - } - - return pkgs -} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go deleted file mode 100644 index 24cbcbe..0000000 --- a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go +++ /dev/null @@ -1,108 +0,0 @@ -package buildutil - -import ( - "fmt" - "go/build" - "io" - "io/ioutil" - "os" - "path" - "path/filepath" - "sort" - "strings" - "time" -) - -// FakeContext returns a build.Context for the fake file tree specified -// by pkgs, which maps package import paths to a mapping from file base -// names to contents. -// -// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides -// the necessary file access methods to read from memory instead of the -// real file system. -// -// Unlike a real file tree, the fake one has only two levels---packages -// and files---so ReadDir("/go/src/") returns all packages under -// /go/src/ including, for instance, "math" and "math/big". -// ReadDir("/go/src/math/big") would return all the files in the -// "math/big" package. -// -func FakeContext(pkgs map[string]map[string]string) *build.Context { - clean := func(filename string) string { - f := path.Clean(filepath.ToSlash(filename)) - // Removing "/go/src" while respecting segment - // boundaries has this unfortunate corner case: - if f == "/go/src" { - return "" - } - return strings.TrimPrefix(f, "/go/src/") - } - - ctxt := build.Default // copy - ctxt.GOROOT = "/go" - ctxt.GOPATH = "" - ctxt.IsDir = func(dir string) bool { - dir = clean(dir) - if dir == "" { - return true // needed by (*build.Context).SrcDirs - } - return pkgs[dir] != nil - } - ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { - dir = clean(dir) - var fis []os.FileInfo - if dir == "" { - // enumerate packages - for importPath := range pkgs { - fis = append(fis, fakeDirInfo(importPath)) - } - } else { - // enumerate files of package - for basename := range pkgs[dir] { - fis = append(fis, fakeFileInfo(basename)) - } - } - sort.Sort(byName(fis)) - return fis, nil - } - ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { - filename = clean(filename) - dir, base := path.Split(filename) - content, ok := pkgs[path.Clean(dir)][base] - if !ok { - return nil, fmt.Errorf("file not found: %s", filename) - } - return ioutil.NopCloser(strings.NewReader(content)), nil - } - ctxt.IsAbsPath = func(path string) bool { - path = filepath.ToSlash(path) - // Don't rely on the default (filepath.Path) since on - // Windows, it reports virtual paths as non-absolute. - return strings.HasPrefix(path, "/") - } - return &ctxt -} - -type byName []os.FileInfo - -func (s byName) Len() int { return len(s) } -func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } -func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } - -type fakeFileInfo string - -func (fi fakeFileInfo) Name() string { return string(fi) } -func (fakeFileInfo) Sys() interface{} { return nil } -func (fakeFileInfo) ModTime() time.Time { return time.Time{} } -func (fakeFileInfo) IsDir() bool { return false } -func (fakeFileInfo) Size() int64 { return 0 } -func (fakeFileInfo) Mode() os.FileMode { return 0644 } - -type fakeDirInfo string - -func (fd fakeDirInfo) Name() string { return string(fd) } -func (fakeDirInfo) Sys() interface{} { return nil } -func (fakeDirInfo) ModTime() time.Time { return time.Time{} } -func (fakeDirInfo) IsDir() bool { return true } -func (fakeDirInfo) Size() int64 { return 0 } -func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go deleted file mode 100644 index 3f71c4f..0000000 --- a/vendor/golang.org/x/tools/go/buildutil/overlay.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "bufio" - "bytes" - "fmt" - "go/build" - "io" - "io/ioutil" - "path/filepath" - "strconv" - "strings" -) - -// OverlayContext overlays a build.Context with additional files from -// a map. Files in the map take precedence over other files. -// -// In addition to plain string comparison, two file names are -// considered equal if their base names match and their directory -// components point at the same directory on the file system. That is, -// symbolic links are followed for directories, but not files. -// -// A common use case for OverlayContext is to allow editors to pass in -// a set of unsaved, modified files. -// -// Currently, only the Context.OpenFile function will respect the -// overlay. This may change in the future. -func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { - // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir - - rc := func(data []byte) (io.ReadCloser, error) { - return ioutil.NopCloser(bytes.NewBuffer(data)), nil - } - - copy := *orig // make a copy - ctxt := © - ctxt.OpenFile = func(path string) (io.ReadCloser, error) { - // Fast path: names match exactly. - if content, ok := overlay[path]; ok { - return rc(content) - } - - // Slow path: check for same file under a different - // alias, perhaps due to a symbolic link. - for filename, content := range overlay { - if sameFile(path, filename) { - return rc(content) - } - } - - return OpenFile(orig, path) - } - return ctxt -} - -// ParseOverlayArchive parses an archive containing Go files and their -// contents. The result is intended to be used with OverlayContext. -// -// -// Archive format -// -// The archive consists of a series of files. Each file consists of a -// name, a decimal file size and the file contents, separated by -// newlinews. No newline follows after the file contents. -func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { - overlay := make(map[string][]byte) - r := bufio.NewReader(archive) - for { - // Read file name. - filename, err := r.ReadString('\n') - if err != nil { - if err == io.EOF { - break // OK - } - return nil, fmt.Errorf("reading archive file name: %v", err) - } - filename = filepath.Clean(strings.TrimSpace(filename)) - - // Read file size. - sz, err := r.ReadString('\n') - if err != nil { - return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) - } - sz = strings.TrimSpace(sz) - size, err := strconv.ParseUint(sz, 10, 32) - if err != nil { - return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) - } - - // Read file content. - content := make([]byte, size) - if _, err := io.ReadFull(r, content); err != nil { - return nil, fmt.Errorf("reading archive file %s: %v", filename, err) - } - overlay[filename] = content - } - - return overlay, nil -} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go deleted file mode 100644 index 486606f..0000000 --- a/vendor/golang.org/x/tools/go/buildutil/tags.go +++ /dev/null @@ -1,75 +0,0 @@ -package buildutil - -// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go. - -import "fmt" - -const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + - "For more information about build tags, see the description of " + - "build constraints in the documentation for the go/build package" - -// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses -// a flag value in the same manner as go build's -tags flag and -// populates a []string slice. -// -// See $GOROOT/src/go/build/doc.go for description of build tags. -// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. -// -// Example: -// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) -type TagsFlag []string - -func (v *TagsFlag) Set(s string) error { - var err error - *v, err = splitQuotedFields(s) - if *v == nil { - *v = []string{} - } - return err -} - -func (v *TagsFlag) Get() interface{} { return *v } - -func splitQuotedFields(s string) ([]string, error) { - // Split fields allowing '' or "" around elements. - // Quotes further inside the string do not count. - var f []string - for len(s) > 0 { - for len(s) > 0 && isSpaceByte(s[0]) { - s = s[1:] - } - if len(s) == 0 { - break - } - // Accepted quoted string. No unescaping inside. - if s[0] == '"' || s[0] == '\'' { - quote := s[0] - s = s[1:] - i := 0 - for i < len(s) && s[i] != quote { - i++ - } - if i >= len(s) { - return nil, fmt.Errorf("unterminated %c string", quote) - } - f = append(f, s[:i]) - s = s[i+1:] - continue - } - i := 0 - for i < len(s) && !isSpaceByte(s[i]) { - i++ - } - f = append(f, s[:i]) - s = s[i:] - } - return f, nil -} - -func (v *TagsFlag) String() string { - return "" -} - -func isSpaceByte(c byte) bool { - return c == ' ' || c == '\t' || c == '\n' || c == '\r' -} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go deleted file mode 100644 index fc923d7..0000000 --- a/vendor/golang.org/x/tools/go/buildutil/util.go +++ /dev/null @@ -1,212 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package buildutil - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "io/ioutil" - "os" - "path" - "path/filepath" - "strings" -) - -// ParseFile behaves like parser.ParseFile, -// but uses the build context's file system interface, if any. -// -// If file is not absolute (as defined by IsAbsPath), the (dir, file) -// components are joined using JoinPath; dir must be absolute. -// -// The displayPath function, if provided, is used to transform the -// filename that will be attached to the ASTs. -// -// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. -// -func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { - if !IsAbsPath(ctxt, file) { - file = JoinPath(ctxt, dir, file) - } - rd, err := OpenFile(ctxt, file) - if err != nil { - return nil, err - } - defer rd.Close() // ignore error - if displayPath != nil { - file = displayPath(file) - } - return parser.ParseFile(fset, file, rd, mode) -} - -// ContainingPackage returns the package containing filename. -// -// If filename is not absolute, it is interpreted relative to working directory dir. -// All I/O is via the build context's file system interface, if any. -// -// The '...Files []string' fields of the resulting build.Package are not -// populated (build.FindOnly mode). -// -func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { - if !IsAbsPath(ctxt, filename) { - filename = JoinPath(ctxt, dir, filename) - } - - // We must not assume the file tree uses - // "/" always, - // `\` always, - // or os.PathSeparator (which varies by platform), - // but to make any progress, we are forced to assume that - // paths will not use `\` unless the PathSeparator - // is also `\`, thus we can rely on filepath.ToSlash for some sanity. - - dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" - - // We assume that no source root (GOPATH[i] or GOROOT) contains any other. - for _, srcdir := range ctxt.SrcDirs() { - srcdirSlash := filepath.ToSlash(srcdir) + "/" - if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { - return ctxt.Import(importPath, dir, build.FindOnly) - } - } - - return nil, fmt.Errorf("can't find package containing %s", filename) -} - -// -- Effective methods of file system interface ------------------------- - -// (go/build.Context defines these as methods, but does not export them.) - -// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses -// the local file system to answer the question. -func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { - if f := ctxt.HasSubdir; f != nil { - return f(root, dir) - } - - // Try using paths we received. - if rel, ok = hasSubdir(root, dir); ok { - return - } - - // Try expanding symlinks and comparing - // expanded against unexpanded and - // expanded against expanded. - rootSym, _ := filepath.EvalSymlinks(root) - dirSym, _ := filepath.EvalSymlinks(dir) - - if rel, ok = hasSubdir(rootSym, dir); ok { - return - } - if rel, ok = hasSubdir(root, dirSym); ok { - return - } - return hasSubdir(rootSym, dirSym) -} - -func hasSubdir(root, dir string) (rel string, ok bool) { - const sep = string(filepath.Separator) - root = filepath.Clean(root) - if !strings.HasSuffix(root, sep) { - root += sep - } - - dir = filepath.Clean(dir) - if !strings.HasPrefix(dir, root) { - return "", false - } - - return filepath.ToSlash(dir[len(root):]), true -} - -// FileExists returns true if the specified file exists, -// using the build context's file system interface. -func FileExists(ctxt *build.Context, path string) bool { - if ctxt.OpenFile != nil { - r, err := ctxt.OpenFile(path) - if err != nil { - return false - } - r.Close() // ignore error - return true - } - _, err := os.Stat(path) - return err == nil -} - -// OpenFile behaves like os.Open, -// but uses the build context's file system interface, if any. -func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { - if ctxt.OpenFile != nil { - return ctxt.OpenFile(path) - } - return os.Open(path) -} - -// IsAbsPath behaves like filepath.IsAbs, -// but uses the build context's file system interface, if any. -func IsAbsPath(ctxt *build.Context, path string) bool { - if ctxt.IsAbsPath != nil { - return ctxt.IsAbsPath(path) - } - return filepath.IsAbs(path) -} - -// JoinPath behaves like filepath.Join, -// but uses the build context's file system interface, if any. -func JoinPath(ctxt *build.Context, path ...string) string { - if ctxt.JoinPath != nil { - return ctxt.JoinPath(path...) - } - return filepath.Join(path...) -} - -// IsDir behaves like os.Stat plus IsDir, -// but uses the build context's file system interface, if any. -func IsDir(ctxt *build.Context, path string) bool { - if ctxt.IsDir != nil { - return ctxt.IsDir(path) - } - fi, err := os.Stat(path) - return err == nil && fi.IsDir() -} - -// ReadDir behaves like ioutil.ReadDir, -// but uses the build context's file system interface, if any. -func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { - if ctxt.ReadDir != nil { - return ctxt.ReadDir(path) - } - return ioutil.ReadDir(path) -} - -// SplitPathList behaves like filepath.SplitList, -// but uses the build context's file system interface, if any. -func SplitPathList(ctxt *build.Context, s string) []string { - if ctxt.SplitPathList != nil { - return ctxt.SplitPathList(s) - } - return filepath.SplitList(s) -} - -// sameFile returns true if x and y have the same basename and denote -// the same file. -// -func sameFile(x, y string) bool { - if path.Clean(x) == path.Clean(y) { - return true - } - if filepath.Base(x) == filepath.Base(y) { // (optimisation) - if xi, err := os.Stat(x); err == nil { - if yi, err := os.Stat(y); err == nil { - return os.SameFile(xi, yi) - } - } - } - return false -} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go deleted file mode 100644 index e53270e..0000000 --- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gcexportdata provides functions for locating, reading, and -// writing export data files containing type information produced by the -// gc compiler. This package supports go1.7 export data format and all -// later versions. -// -// This package replaces the deprecated golang.org/x/tools/go/gcimporter15 -// package, which will be deleted in October 2017. -// -// Although it might seem convenient for this package to live alongside -// go/types in the standard library, this would cause version skew -// problems for developer tools that use it, since they must be able to -// consume the outputs of the gc compiler both before and after a Go -// update such as from Go 1.7 to Go 1.8. Because this package lives in -// golang.org/x/tools, sites can update their version of this repo some -// time before the Go 1.8 release and rebuild and redeploy their -// developer tools, which will then be able to consume both Go 1.7 and -// Go 1.8 export data files, so they will work before and after the -// Go update. (See discussion at https://github.com/golang/go/issues/15651.) -// -package gcexportdata // import "golang.org/x/tools/go/gcexportdata" - -import ( - "bufio" - "bytes" - "fmt" - "go/token" - "go/types" - "io" - "io/ioutil" - - gcimporter "golang.org/x/tools/go/gcimporter15" -) - -// Find returns the name of an object (.o) or archive (.a) file -// containing type information for the specified import path, -// using the workspace layout conventions of go/build. -// If no file was found, an empty filename is returned. -// -// A relative srcDir is interpreted relative to the current working directory. -// -// Find also returns the package's resolved (canonical) import path, -// reflecting the effects of srcDir and vendoring on importPath. -func Find(importPath, srcDir string) (filename, path string) { - return gcimporter.FindPkg(importPath, srcDir) -} - -// NewReader returns a reader for the export data section of an object -// (.o) or archive (.a) file read from r. The new reader may provide -// additional trailing data beyond the end of the export data. -func NewReader(r io.Reader) (io.Reader, error) { - buf := bufio.NewReader(r) - _, err := gcimporter.FindExportData(buf) - // If we ever switch to a zip-like archive format with the ToC - // at the end, we can return the correct portion of export data, - // but for now we must return the entire rest of the file. - return buf, err -} - -// Read reads export data from in, decodes it, and returns type -// information for the package. -// The package name is specified by path. -// File position information is added to fset. -// -// Read may inspect and add to the imports map to ensure that references -// within the export data to other packages are consistent. The caller -// must ensure that imports[path] does not exist, or exists but is -// incomplete (see types.Package.Complete), and Read inserts the -// resulting package into this map entry. -// -// On return, the state of the reader is undefined. -func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { - data, err := ioutil.ReadAll(in) - if err != nil { - return nil, fmt.Errorf("reading export data for %q: %v", path, err) - } - - if bytes.HasPrefix(data, []byte("!")) { - return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) - } - - // The App Engine Go runtime v1.6 uses the old export data format. - // TODO(adonovan): delete once v1.7 has been around for a while. - if bytes.HasPrefix(data, []byte("package ")) { - return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) - } - - _, pkg, err := gcimporter.BImportData(fset, imports, data, path) - return pkg, err -} - -// Write writes encoded type information for the specified package to out. -// The FileSet provides file position information for named objects. -func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { - _, err := out.Write(gcimporter.BExportData(fset, pkg)) - return err -} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go deleted file mode 100644 index efe221e..0000000 --- a/vendor/golang.org/x/tools/go/gcexportdata/importer.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gcexportdata - -import ( - "fmt" - "go/token" - "go/types" - "os" -) - -// NewImporter returns a new instance of the types.Importer interface -// that reads type information from export data files written by gc. -// The Importer also satisfies types.ImporterFrom. -// -// Export data files are located using "go build" workspace conventions -// and the build.Default context. -// -// Use this importer instead of go/importer.For("gc", ...) to avoid the -// version-skew problems described in the documentation of this package, -// or to control the FileSet or access the imports map populated during -// package loading. -// -func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { - return importer{fset, imports} -} - -type importer struct { - fset *token.FileSet - imports map[string]*types.Package -} - -func (imp importer) Import(importPath string) (*types.Package, error) { - return imp.ImportFrom(importPath, "", 0) -} - -func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { - filename, path := Find(importPath, srcDir) - if filename == "" { - if importPath == "unsafe" { - // Even for unsafe, call Find first in case - // the package was vendored. - return types.Unsafe, nil - } - return nil, fmt.Errorf("can't find import: %s", importPath) - } - - if pkg, ok := imp.imports[path]; ok && pkg.Complete() { - return pkg, nil // cache hit - } - - // open file - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer func() { - f.Close() - if err != nil { - // add file name to error - err = fmt.Errorf("reading export data: %s: %v", filename, err) - } - }() - - r, err := NewReader(f) - if err != nil { - return nil, err - } - - return Read(r, imp.fset, imp.imports, path) -} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go deleted file mode 100644 index 106046c..0000000 --- a/vendor/golang.org/x/tools/go/gcexportdata/main.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// The gcexportdata command is a diagnostic tool that displays the -// contents of gc export data files. -package main - -import ( - "flag" - "fmt" - "go/token" - "go/types" - "log" - "os" - - "golang.org/x/tools/go/gcexportdata" - "golang.org/x/tools/go/types/typeutil" -) - -func main() { - log.SetPrefix("gcexportdata: ") - log.SetFlags(0) - flag.Usage = func() { - fmt.Fprintln(os.Stderr, "usage: gcexportdata file.a") - } - flag.Parse() - if flag.NArg() != 1 { - flag.Usage() - os.Exit(2) - } - filename := flag.Args()[0] - - f, err := os.Open(filename) - if err != nil { - log.Fatal(err) - } - - r, err := gcexportdata.NewReader(f) - if err != nil { - log.Fatalf("%s: %s", filename, err) - } - - // Decode the package. - imports := make(map[string]*types.Package) - fset := token.NewFileSet() - pkg, err := gcexportdata.Read(r, fset, imports, "dummy") - if err != nil { - log.Fatal("%s: %s", filename, err) - } - - // Print all package-level declarations, including non-exported ones. - fmt.Printf("package %s\n", pkg.Name()) - for _, imp := range pkg.Imports() { - fmt.Printf("import %q\n", imp.Path()) - } - qual := func(p *types.Package) string { - if pkg == p { - return "" - } - return p.Name() - } - scope := pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - fmt.Printf("%s: %s\n", - fset.Position(obj.Pos()), - types.ObjectString(obj, qual)) - - // For types, print each method. - if _, ok := obj.(*types.TypeName); ok { - for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { - fmt.Printf("%s: %s\n", - fset.Position(method.Obj().Pos()), - types.SelectionString(method, qual)) - } - } - } -} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/bexport.go b/vendor/golang.org/x/tools/go/gcimporter15/bexport.go deleted file mode 100644 index cbf8bc0..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/bexport.go +++ /dev/null @@ -1,828 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; -// see that file for specification of the format. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "log" - "math" - "math/big" - "sort" - "strings" -) - -// If debugFormat is set, each integer and string value is preceded by a marker -// and position information in the encoding. This mechanism permits an importer -// to recognize immediately when it is out of sync. The importer recognizes this -// mode automatically (i.e., it can import export data produced with debugging -// support even if debugFormat is not set at the time of import). This mode will -// lead to massively larger export data (by a factor of 2 to 3) and should only -// be enabled during development and debugging. -// -// NOTE: This flag is the first flag to enable if importing dies because of -// (suspected) format errors, and whenever a change is made to the format. -const debugFormat = false // default: false - -// If trace is set, debugging output is printed to std out. -const trace = false // default: false - -// Current export format version. Increase with each format change. -// 4: type name objects support type aliases, uses aliasTag -// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) -// 2: removed unused bool in ODCL export (compiler only) -// 1: header format change (more regular), export package for _ struct fields -// 0: Go1.7 encoding -const exportVersion = 4 - -// trackAllTypes enables cycle tracking for all types, not just named -// types. The existing compiler invariants assume that unnamed types -// that are not completely set up are not used, or else there are spurious -// errors. -// If disabled, only named types are tracked, possibly leading to slightly -// less efficient encoding in rare cases. It also prevents the export of -// some corner-case type declarations (but those are not handled correctly -// with with the textual export format either). -// TODO(gri) enable and remove once issues caused by it are fixed -const trackAllTypes = false - -type exporter struct { - fset *token.FileSet - out bytes.Buffer - - // object -> index maps, indexed in order of serialization - strIndex map[string]int - pkgIndex map[*types.Package]int - typIndex map[types.Type]int - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - - // debugging support - written int // bytes written - indent int // for trace -} - -// BExportData returns binary export data for pkg. -// If no file set is provided, position info will be missing. -func BExportData(fset *token.FileSet, pkg *types.Package) []byte { - p := exporter{ - fset: fset, - strIndex: map[string]int{"": 0}, // empty string is mapped to 0 - pkgIndex: make(map[*types.Package]int), - typIndex: make(map[types.Type]int), - posInfoFormat: true, // TODO(gri) might become a flag, eventually - } - - // write version info - // The version string must start with "version %d" where %d is the version - // number. Additional debugging information may follow after a blank; that - // text is ignored by the importer. - p.rawStringln(fmt.Sprintf("version %d", exportVersion)) - var debug string - if debugFormat { - debug = "debug" - } - p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly - p.bool(trackAllTypes) - p.bool(p.posInfoFormat) - - // --- generic export data --- - - // populate type map with predeclared "known" types - for index, typ := range predeclared { - p.typIndex[typ] = index - } - if len(p.typIndex) != len(predeclared) { - log.Fatalf("gcimporter: duplicate entries in type map?") - } - - // write package data - p.pkg(pkg, true) - if trace { - p.tracef("\n") - } - - // write objects - objcount := 0 - scope := pkg.Scope() - for _, name := range scope.Names() { - if !ast.IsExported(name) { - continue - } - if trace { - p.tracef("\n") - } - p.obj(scope.Lookup(name)) - objcount++ - } - - // indicate end of list - if trace { - p.tracef("\n") - } - p.tag(endTag) - - // for self-verification only (redundant) - p.int(objcount) - - if trace { - p.tracef("\n") - } - - // --- end of export data --- - - return p.out.Bytes() -} - -func (p *exporter) pkg(pkg *types.Package, emptypath bool) { - if pkg == nil { - log.Fatalf("gcimporter: unexpected nil pkg") - } - - // if we saw the package before, write its index (>= 0) - if i, ok := p.pkgIndex[pkg]; ok { - p.index('P', i) - return - } - - // otherwise, remember the package, write the package tag (< 0) and package data - if trace { - p.tracef("P%d = { ", len(p.pkgIndex)) - defer p.tracef("} ") - } - p.pkgIndex[pkg] = len(p.pkgIndex) - - p.tag(packageTag) - p.string(pkg.Name()) - if emptypath { - p.string("") - } else { - p.string(pkg.Path()) - } -} - -func (p *exporter) obj(obj types.Object) { - switch obj := obj.(type) { - case *types.Const: - p.tag(constTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - p.value(obj.Val()) - - case *types.TypeName: - if isAlias(obj) { - p.tag(aliasTag) - p.pos(obj) - p.qualifiedName(obj) - } else { - p.tag(typeTag) - } - p.typ(obj.Type()) - - case *types.Var: - p.tag(varTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - - case *types.Func: - p.tag(funcTag) - p.pos(obj) - p.qualifiedName(obj) - sig := obj.Type().(*types.Signature) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - - default: - log.Fatalf("gcimporter: unexpected object %v (%T)", obj, obj) - } -} - -func (p *exporter) pos(obj types.Object) { - if !p.posInfoFormat { - return - } - - file, line := p.fileLine(obj) - if file == p.prevFile { - // common case: write line delta - // delta == 0 means different file or no line change - delta := line - p.prevLine - p.int(delta) - if delta == 0 { - p.int(-1) // -1 means no file change - } - } else { - // different file - p.int(0) - // Encode filename as length of common prefix with previous - // filename, followed by (possibly empty) suffix. Filenames - // frequently share path prefixes, so this can save a lot - // of space and make export data size less dependent on file - // path length. The suffix is unlikely to be empty because - // file names tend to end in ".go". - n := commonPrefixLen(p.prevFile, file) - p.int(n) // n >= 0 - p.string(file[n:]) // write suffix only - p.prevFile = file - p.int(line) - } - p.prevLine = line -} - -func (p *exporter) fileLine(obj types.Object) (file string, line int) { - if p.fset != nil { - pos := p.fset.Position(obj.Pos()) - file = pos.Filename - line = pos.Line - } - return -} - -func commonPrefixLen(a, b string) int { - if len(a) > len(b) { - a, b = b, a - } - // len(a) <= len(b) - i := 0 - for i < len(a) && a[i] == b[i] { - i++ - } - return i -} - -func (p *exporter) qualifiedName(obj types.Object) { - p.string(obj.Name()) - p.pkg(obj.Pkg(), false) -} - -func (p *exporter) typ(t types.Type) { - if t == nil { - log.Fatalf("gcimporter: nil type") - } - - // Possible optimization: Anonymous pointer types *T where - // T is a named type are common. We could canonicalize all - // such types *T to a single type PT = *T. This would lead - // to at most one *T entry in typIndex, and all future *T's - // would be encoded as the respective index directly. Would - // save 1 byte (pointerTag) per *T and reduce the typIndex - // size (at the cost of a canonicalization map). We can do - // this later, without encoding format change. - - // if we saw the type before, write its index (>= 0) - if i, ok := p.typIndex[t]; ok { - p.index('T', i) - return - } - - // otherwise, remember the type, write the type tag (< 0) and type data - if trackAllTypes { - if trace { - p.tracef("T%d = {>\n", len(p.typIndex)) - defer p.tracef("<\n} ") - } - p.typIndex[t] = len(p.typIndex) - } - - switch t := t.(type) { - case *types.Named: - if !trackAllTypes { - // if we don't track all types, track named types now - p.typIndex[t] = len(p.typIndex) - } - - p.tag(namedTag) - p.pos(t.Obj()) - p.qualifiedName(t.Obj()) - p.typ(t.Underlying()) - if !types.IsInterface(t) { - p.assocMethods(t) - } - - case *types.Array: - p.tag(arrayTag) - p.int64(t.Len()) - p.typ(t.Elem()) - - case *types.Slice: - p.tag(sliceTag) - p.typ(t.Elem()) - - case *dddSlice: - p.tag(dddTag) - p.typ(t.elem) - - case *types.Struct: - p.tag(structTag) - p.fieldList(t) - - case *types.Pointer: - p.tag(pointerTag) - p.typ(t.Elem()) - - case *types.Signature: - p.tag(signatureTag) - p.paramList(t.Params(), t.Variadic()) - p.paramList(t.Results(), false) - - case *types.Interface: - p.tag(interfaceTag) - p.iface(t) - - case *types.Map: - p.tag(mapTag) - p.typ(t.Key()) - p.typ(t.Elem()) - - case *types.Chan: - p.tag(chanTag) - p.int(int(3 - t.Dir())) // hack - p.typ(t.Elem()) - - default: - log.Fatalf("gcimporter: unexpected type %T: %s", t, t) - } -} - -func (p *exporter) assocMethods(named *types.Named) { - // Sort methods (for determinism). - var methods []*types.Func - for i := 0; i < named.NumMethods(); i++ { - methods = append(methods, named.Method(i)) - } - sort.Sort(methodsByName(methods)) - - p.int(len(methods)) - - if trace && methods != nil { - p.tracef("associated methods {>\n") - } - - for i, m := range methods { - if trace && i > 0 { - p.tracef("\n") - } - - p.pos(m) - name := m.Name() - p.string(name) - if !exported(name) { - p.pkg(m.Pkg(), false) - } - - sig := m.Type().(*types.Signature) - p.paramList(types.NewTuple(sig.Recv()), false) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - p.int(0) // dummy value for go:nointerface pragma - ignored by importer - } - - if trace && methods != nil { - p.tracef("<\n} ") - } -} - -type methodsByName []*types.Func - -func (x methodsByName) Len() int { return len(x) } -func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } -func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } - -func (p *exporter) fieldList(t *types.Struct) { - if trace && t.NumFields() > 0 { - p.tracef("fields {>\n") - defer p.tracef("<\n} ") - } - - p.int(t.NumFields()) - for i := 0; i < t.NumFields(); i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.field(t.Field(i)) - p.string(t.Tag(i)) - } -} - -func (p *exporter) field(f *types.Var) { - if !f.IsField() { - log.Fatalf("gcimporter: field expected") - } - - p.pos(f) - p.fieldName(f) - p.typ(f.Type()) -} - -func (p *exporter) iface(t *types.Interface) { - // TODO(gri): enable importer to load embedded interfaces, - // then emit Embeddeds and ExplicitMethods separately here. - p.int(0) - - n := t.NumMethods() - if trace && n > 0 { - p.tracef("methods {>\n") - defer p.tracef("<\n} ") - } - p.int(n) - for i := 0; i < n; i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.method(t.Method(i)) - } -} - -func (p *exporter) method(m *types.Func) { - sig := m.Type().(*types.Signature) - if sig.Recv() == nil { - log.Fatalf("gcimporter: method expected") - } - - p.pos(m) - p.string(m.Name()) - if m.Name() != "_" && !ast.IsExported(m.Name()) { - p.pkg(m.Pkg(), false) - } - - // interface method; no need to encode receiver. - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) -} - -func (p *exporter) fieldName(f *types.Var) { - name := f.Name() - - if f.Anonymous() { - // anonymous field - we distinguish between 3 cases: - // 1) field name matches base type name and is exported - // 2) field name matches base type name and is not exported - // 3) field name doesn't match base type name (alias name) - bname := basetypeName(f.Type()) - if name == bname { - if ast.IsExported(name) { - name = "" // 1) we don't need to know the field name or package - } else { - name = "?" // 2) use unexported name "?" to force package export - } - } else { - // 3) indicate alias and export name as is - // (this requires an extra "@" but this is a rare case) - p.string("@") - } - } - - p.string(name) - if name != "" && !ast.IsExported(name) { - p.pkg(f.Pkg(), false) - } -} - -func basetypeName(typ types.Type) string { - switch typ := deref(typ).(type) { - case *types.Basic: - return typ.Name() - case *types.Named: - return typ.Obj().Name() - default: - return "" // unnamed type - } -} - -func (p *exporter) paramList(params *types.Tuple, variadic bool) { - // use negative length to indicate unnamed parameters - // (look at the first parameter only since either all - // names are present or all are absent) - n := params.Len() - if n > 0 && params.At(0).Name() == "" { - n = -n - } - p.int(n) - for i := 0; i < params.Len(); i++ { - q := params.At(i) - t := q.Type() - if variadic && i == params.Len()-1 { - t = &dddSlice{t.(*types.Slice).Elem()} - } - p.typ(t) - if n > 0 { - name := q.Name() - p.string(name) - if name != "_" { - p.pkg(q.Pkg(), false) - } - } - p.string("") // no compiler-specific info - } -} - -func (p *exporter) value(x constant.Value) { - if trace { - p.tracef("= ") - } - - switch x.Kind() { - case constant.Bool: - tag := falseTag - if constant.BoolVal(x) { - tag = trueTag - } - p.tag(tag) - - case constant.Int: - if v, exact := constant.Int64Val(x); exact { - // common case: x fits into an int64 - use compact encoding - p.tag(int64Tag) - p.int64(v) - return - } - // uncommon case: large x - use float encoding - // (powers of 2 will be encoded efficiently with exponent) - p.tag(floatTag) - p.float(constant.ToFloat(x)) - - case constant.Float: - p.tag(floatTag) - p.float(x) - - case constant.Complex: - p.tag(complexTag) - p.float(constant.Real(x)) - p.float(constant.Imag(x)) - - case constant.String: - p.tag(stringTag) - p.string(constant.StringVal(x)) - - case constant.Unknown: - // package contains type errors - p.tag(unknownTag) - - default: - log.Fatalf("gcimporter: unexpected value %v (%T)", x, x) - } -} - -func (p *exporter) float(x constant.Value) { - if x.Kind() != constant.Float { - log.Fatalf("gcimporter: unexpected constant %v, want float", x) - } - // extract sign (there is no -0) - sign := constant.Sign(x) - if sign == 0 { - // x == 0 - p.int(0) - return - } - // x != 0 - - var f big.Float - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - r := valueToRat(num) - f.SetRat(r.Quo(r, valueToRat(denom))) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - f.SetFloat64(math.MaxFloat64) // FIXME - } - - // extract exponent such that 0.5 <= m < 1.0 - var m big.Float - exp := f.MantExp(&m) - - // extract mantissa as *big.Int - // - set exponent large enough so mant satisfies mant.IsInt() - // - get *big.Int from mant - m.SetMantExp(&m, int(m.MinPrec())) - mant, acc := m.Int(nil) - if acc != big.Exact { - log.Fatalf("gcimporter: internal error") - } - - p.int(sign) - p.int(exp) - p.string(string(mant.Bytes())) -} - -func valueToRat(x constant.Value) *big.Rat { - // Convert little-endian to big-endian. - // I can't believe this is necessary. - bytes := constant.Bytes(x) - for i := 0; i < len(bytes)/2; i++ { - bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] - } - return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) -} - -func (p *exporter) bool(b bool) bool { - if trace { - p.tracef("[") - defer p.tracef("= %v] ", b) - } - - x := 0 - if b { - x = 1 - } - p.int(x) - return b -} - -// ---------------------------------------------------------------------------- -// Low-level encoders - -func (p *exporter) index(marker byte, index int) { - if index < 0 { - log.Fatalf("gcimporter: invalid index < 0") - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%c%d ", marker, index) - } - p.rawInt64(int64(index)) -} - -func (p *exporter) tag(tag int) { - if tag >= 0 { - log.Fatalf("gcimporter: invalid tag >= 0") - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%s ", tagString[-tag]) - } - p.rawInt64(int64(tag)) -} - -func (p *exporter) int(x int) { - p.int64(int64(x)) -} - -func (p *exporter) int64(x int64) { - if debugFormat { - p.marker('i') - } - if trace { - p.tracef("%d ", x) - } - p.rawInt64(x) -} - -func (p *exporter) string(s string) { - if debugFormat { - p.marker('s') - } - if trace { - p.tracef("%q ", s) - } - // if we saw the string before, write its index (>= 0) - // (the empty string is mapped to 0) - if i, ok := p.strIndex[s]; ok { - p.rawInt64(int64(i)) - return - } - // otherwise, remember string and write its negative length and bytes - p.strIndex[s] = len(p.strIndex) - p.rawInt64(-int64(len(s))) - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } -} - -// marker emits a marker byte and position information which makes -// it easy for a reader to detect if it is "out of sync". Used for -// debugFormat format only. -func (p *exporter) marker(m byte) { - p.rawByte(m) - // Enable this for help tracking down the location - // of an incorrect marker when running in debugFormat. - if false && trace { - p.tracef("#%d ", p.written) - } - p.rawInt64(int64(p.written)) -} - -// rawInt64 should only be used by low-level encoders. -func (p *exporter) rawInt64(x int64) { - var tmp [binary.MaxVarintLen64]byte - n := binary.PutVarint(tmp[:], x) - for i := 0; i < n; i++ { - p.rawByte(tmp[i]) - } -} - -// rawStringln should only be used to emit the initial version string. -func (p *exporter) rawStringln(s string) { - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } - p.rawByte('\n') -} - -// rawByte is the bottleneck interface to write to p.out. -// rawByte escapes b as follows (any encoding does that -// hides '$'): -// -// '$' => '|' 'S' -// '|' => '|' '|' -// -// Necessary so other tools can find the end of the -// export data by searching for "$$". -// rawByte should only be used by low-level encoders. -func (p *exporter) rawByte(b byte) { - switch b { - case '$': - // write '$' as '|' 'S' - b = 'S' - fallthrough - case '|': - // write '|' as '|' '|' - p.out.WriteByte('|') - p.written++ - } - p.out.WriteByte(b) - p.written++ -} - -// tracef is like fmt.Printf but it rewrites the format string -// to take care of indentation. -func (p *exporter) tracef(format string, args ...interface{}) { - if strings.ContainsAny(format, "<>\n") { - var buf bytes.Buffer - for i := 0; i < len(format); i++ { - // no need to deal with runes - ch := format[i] - switch ch { - case '>': - p.indent++ - continue - case '<': - p.indent-- - continue - } - buf.WriteByte(ch) - if ch == '\n' { - for j := p.indent; j > 0; j-- { - buf.WriteString(". ") - } - } - } - format = buf.String() - } - fmt.Printf(format, args...) -} - -// Debugging support. -// (tagString is only used when tracing is enabled) -var tagString = [...]string{ - // Packages - -packageTag: "package", - - // Types - -namedTag: "named type", - -arrayTag: "array", - -sliceTag: "slice", - -dddTag: "ddd", - -structTag: "struct", - -pointerTag: "pointer", - -signatureTag: "signature", - -interfaceTag: "interface", - -mapTag: "map", - -chanTag: "chan", - - // Values - -falseTag: "false", - -trueTag: "true", - -int64Tag: "int64", - -floatTag: "float", - -fractionTag: "fraction", - -complexTag: "complex", - -stringTag: "string", - -unknownTag: "unknown", - - // Type aliases - -aliasTag: "alias", -} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/bimport.go b/vendor/golang.org/x/tools/go/gcimporter15/bimport.go deleted file mode 100644 index 1936a7f..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/bimport.go +++ /dev/null @@ -1,993 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. - -package gcimporter - -import ( - "encoding/binary" - "fmt" - "go/constant" - "go/token" - "go/types" - "sort" - "strconv" - "strings" - "sync" - "unicode" - "unicode/utf8" -) - -type importer struct { - imports map[string]*types.Package - data []byte - importpath string - buf []byte // for reading strings - version int // export format version - - // object lists - strList []string // in order of appearance - pathList []string // in order of appearance - pkgList []*types.Package // in order of appearance - typList []types.Type // in order of appearance - interfaceList []*types.Interface // for delayed completion only - trackAllTypes bool - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - fset *token.FileSet - files map[string]*token.File - - // debugging support - debugFormat bool - read int // bytes read -} - -// BImportData imports a package from the serialized package data -// and returns the number of bytes consumed and a reference to the package. -// If the export data version is not recognized or the format is otherwise -// compromised, an error is returned. -func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - // catch panics and return them as errors - defer func() { - if e := recover(); e != nil { - // The package (filename) causing the problem is added to this - // error by a wrapper in the caller (Import in gcimporter.go). - // Return a (possibly nil or incomplete) package unchanged (see #16088). - err = fmt.Errorf("cannot import, possibly version skew (%v) - reinstall package", e) - } - }() - - p := importer{ - imports: imports, - data: data, - importpath: path, - version: -1, // unknown version - strList: []string{""}, // empty string is mapped to 0 - pathList: []string{""}, // empty string is mapped to 0 - fset: fset, - files: make(map[string]*token.File), - } - - // read version info - var versionstr string - if b := p.rawByte(); b == 'c' || b == 'd' { - // Go1.7 encoding; first byte encodes low-level - // encoding format (compact vs debug). - // For backward-compatibility only (avoid problems with - // old installed packages). Newly compiled packages use - // the extensible format string. - // TODO(gri) Remove this support eventually; after Go1.8. - if b == 'd' { - p.debugFormat = true - } - p.trackAllTypes = p.rawByte() == 'a' - p.posInfoFormat = p.int() != 0 - versionstr = p.string() - if versionstr == "v1" { - p.version = 0 - } - } else { - // Go1.8 extensible encoding - // read version string and extract version number (ignore anything after the version number) - versionstr = p.rawStringln(b) - if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { - if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { - p.version = v - } - } - } - - // read version specific flags - extend as necessary - switch p.version { - // case 6: - // ... - // fallthrough - case 5, 4, 3, 2, 1: - p.debugFormat = p.rawStringln(p.rawByte()) == "debug" - p.trackAllTypes = p.int() != 0 - p.posInfoFormat = p.int() != 0 - case 0: - // Go1.7 encoding format - nothing to do here - default: - errorf("unknown export format version %d (%q)", p.version, versionstr) - } - - // --- generic export data --- - - // populate typList with predeclared "known" types - p.typList = append(p.typList, predeclared...) - - // read package data - pkg = p.pkg() - - // read objects of phase 1 only (see cmd/compiler/internal/gc/bexport.go) - objcount := 0 - for { - tag := p.tagOrIndex() - if tag == endTag { - break - } - p.obj(tag) - objcount++ - } - - // self-verification - if count := p.int(); count != objcount { - errorf("got %d objects; want %d", objcount, count) - } - - // ignore compiler-specific import data - - // complete interfaces - // TODO(gri) re-investigate if we still need to do this in a delayed fashion - for _, typ := range p.interfaceList { - typ.Complete() - } - - // record all referenced packages as imports - list := append(([]*types.Package)(nil), p.pkgList[1:]...) - sort.Sort(byPath(list)) - pkg.SetImports(list) - - // package was imported completely and without errors - pkg.MarkComplete() - - return p.read, pkg, nil -} - -func errorf(format string, args ...interface{}) { - panic(fmt.Sprintf(format, args...)) -} - -func (p *importer) pkg() *types.Package { - // if the package was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.pkgList[i] - } - - // otherwise, i is the package tag (< 0) - if i != packageTag { - errorf("unexpected package tag %d version %d", i, p.version) - } - - // read package data - name := p.string() - var path string - if p.version >= 5 { - path = p.path() - } else { - path = p.string() - } - - // we should never see an empty package name - if name == "" { - errorf("empty package name in import") - } - - // an empty path denotes the package we are currently importing; - // it must be the first package we see - if (path == "") != (len(p.pkgList) == 0) { - errorf("package path %q for pkg index %d", path, len(p.pkgList)) - } - - // if the package was imported before, use that one; otherwise create a new one - if path == "" { - path = p.importpath - } - pkg := p.imports[path] - if pkg == nil { - pkg = types.NewPackage(path, name) - p.imports[path] = pkg - } else if pkg.Name() != name { - errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) - } - p.pkgList = append(p.pkgList, pkg) - - return pkg -} - -// objTag returns the tag value for each object kind. -func objTag(obj types.Object) int { - switch obj.(type) { - case *types.Const: - return constTag - case *types.TypeName: - return typeTag - case *types.Var: - return varTag - case *types.Func: - return funcTag - default: - errorf("unexpected object: %v (%T)", obj, obj) // panics - panic("unreachable") - } -} - -func sameObj(a, b types.Object) bool { - // Because unnamed types are not canonicalized, we cannot simply compare types for - // (pointer) identity. - // Ideally we'd check equality of constant values as well, but this is good enough. - return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) -} - -func (p *importer) declare(obj types.Object) { - pkg := obj.Pkg() - if alt := pkg.Scope().Insert(obj); alt != nil { - // This can only trigger if we import a (non-type) object a second time. - // Excluding type aliases, this cannot happen because 1) we only import a package - // once; and b) we ignore compiler-specific export data which may contain - // functions whose inlined function bodies refer to other functions that - // were already imported. - // However, type aliases require reexporting the original type, so we need - // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, - // method importer.obj, switch case importing functions). - // TODO(gri) review/update this comment once the gc compiler handles type aliases. - if !sameObj(obj, alt) { - errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) - } - } -} - -func (p *importer) obj(tag int) { - switch tag { - case constTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil) - val := p.value() - p.declare(types.NewConst(pos, pkg, name, typ, val)) - - case aliasTag: - // TODO(gri) verify type alias hookup is correct - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil) - p.declare(types.NewTypeName(pos, pkg, name, typ)) - - case typeTag: - p.typ(nil) - - case varTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil) - p.declare(types.NewVar(pos, pkg, name, typ)) - - case funcTag: - pos := p.pos() - pkg, name := p.qualifiedName() - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(nil, params, result, isddd) - p.declare(types.NewFunc(pos, pkg, name, sig)) - - default: - errorf("unexpected object tag %d", tag) - } -} - -const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go - -func (p *importer) pos() token.Pos { - if !p.posInfoFormat { - return token.NoPos - } - - file := p.prevFile - line := p.prevLine - delta := p.int() - line += delta - if p.version >= 5 { - if delta == deltaNewFile { - if n := p.int(); n >= 0 { - // file changed - file = p.path() - line = n - } - } - } else { - if delta == 0 { - if n := p.int(); n >= 0 { - // file changed - file = p.prevFile[:n] + p.string() - line = p.int() - } - } - } - p.prevFile = file - p.prevLine = line - - // Synthesize a token.Pos - - // Since we don't know the set of needed file positions, we - // reserve maxlines positions per file. - const maxlines = 64 * 1024 - f := p.files[file] - if f == nil { - f = p.fset.AddFile(file, -1, maxlines) - p.files[file] = f - // Allocate the fake linebreak indices on first use. - // TODO(adonovan): opt: save ~512KB using a more complex scheme? - fakeLinesOnce.Do(func() { - fakeLines = make([]int, maxlines) - for i := range fakeLines { - fakeLines[i] = i - } - }) - f.SetLines(fakeLines) - } - - if line > maxlines { - line = 1 - } - - // Treat the file as if it contained only newlines - // and column=1: use the line number as the offset. - return f.Pos(line - 1) -} - -var ( - fakeLines []int - fakeLinesOnce sync.Once -) - -func (p *importer) qualifiedName() (pkg *types.Package, name string) { - name = p.string() - pkg = p.pkg() - return -} - -func (p *importer) record(t types.Type) { - p.typList = append(p.typList, t) -} - -// A dddSlice is a types.Type representing ...T parameters. -// It only appears for parameter types and does not escape -// the importer. -type dddSlice struct { - elem types.Type -} - -func (t *dddSlice) Underlying() types.Type { return t } -func (t *dddSlice) String() string { return "..." + t.elem.String() } - -// parent is the package which declared the type; parent == nil means -// the package currently imported. The parent package is needed for -// exported struct fields and interface methods which don't contain -// explicit package information in the export data. -func (p *importer) typ(parent *types.Package) types.Type { - // if the type was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.typList[i] - } - - // otherwise, i is the type tag (< 0) - switch i { - case namedTag: - // read type object - pos := p.pos() - parent, name := p.qualifiedName() - scope := parent.Scope() - obj := scope.Lookup(name) - - // if the object doesn't exist yet, create and insert it - if obj == nil { - obj = types.NewTypeName(pos, parent, name, nil) - scope.Insert(obj) - } - - if _, ok := obj.(*types.TypeName); !ok { - errorf("pkg = %s, name = %s => %s", parent, name, obj) - } - - // associate new named type with obj if it doesn't exist yet - t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) - - // but record the existing type, if any - t := obj.Type().(*types.Named) - p.record(t) - - // read underlying type - t0.SetUnderlying(p.typ(parent)) - - // interfaces don't have associated methods - if types.IsInterface(t0) { - return t - } - - // read associated methods - for i := p.int(); i > 0; i-- { - // TODO(gri) replace this with something closer to fieldName - pos := p.pos() - name := p.string() - if !exported(name) { - p.pkg() - } - - recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? - params, isddd := p.paramList() - result, _ := p.paramList() - p.int() // go:nointerface pragma - discarded - - sig := types.NewSignature(recv.At(0), params, result, isddd) - t0.AddMethod(types.NewFunc(pos, parent, name, sig)) - } - - return t - - case arrayTag: - t := new(types.Array) - if p.trackAllTypes { - p.record(t) - } - - n := p.int64() - *t = *types.NewArray(p.typ(parent), n) - return t - - case sliceTag: - t := new(types.Slice) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewSlice(p.typ(parent)) - return t - - case dddTag: - t := new(dddSlice) - if p.trackAllTypes { - p.record(t) - } - - t.elem = p.typ(parent) - return t - - case structTag: - t := new(types.Struct) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewStruct(p.fieldList(parent)) - return t - - case pointerTag: - t := new(types.Pointer) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewPointer(p.typ(parent)) - return t - - case signatureTag: - t := new(types.Signature) - if p.trackAllTypes { - p.record(t) - } - - params, isddd := p.paramList() - result, _ := p.paramList() - *t = *types.NewSignature(nil, params, result, isddd) - return t - - case interfaceTag: - // Create a dummy entry in the type list. This is safe because we - // cannot expect the interface type to appear in a cycle, as any - // such cycle must contain a named type which would have been - // first defined earlier. - n := len(p.typList) - if p.trackAllTypes { - p.record(nil) - } - - var embeddeds []*types.Named - for n := p.int(); n > 0; n-- { - p.pos() - embeddeds = append(embeddeds, p.typ(parent).(*types.Named)) - } - - t := types.NewInterface(p.methodList(parent), embeddeds) - p.interfaceList = append(p.interfaceList, t) - if p.trackAllTypes { - p.typList[n] = t - } - return t - - case mapTag: - t := new(types.Map) - if p.trackAllTypes { - p.record(t) - } - - key := p.typ(parent) - val := p.typ(parent) - *t = *types.NewMap(key, val) - return t - - case chanTag: - t := new(types.Chan) - if p.trackAllTypes { - p.record(t) - } - - var dir types.ChanDir - // tag values must match the constants in cmd/compile/internal/gc/go.go - switch d := p.int(); d { - case 1 /* Crecv */ : - dir = types.RecvOnly - case 2 /* Csend */ : - dir = types.SendOnly - case 3 /* Cboth */ : - dir = types.SendRecv - default: - errorf("unexpected channel dir %d", d) - } - val := p.typ(parent) - *t = *types.NewChan(dir, val) - return t - - default: - errorf("unexpected type tag %d", i) // panics - panic("unreachable") - } -} - -func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { - if n := p.int(); n > 0 { - fields = make([]*types.Var, n) - tags = make([]string, n) - for i := range fields { - fields[i], tags[i] = p.field(parent) - } - } - return -} - -func (p *importer) field(parent *types.Package) (*types.Var, string) { - pos := p.pos() - pkg, name, alias := p.fieldName(parent) - typ := p.typ(parent) - tag := p.string() - - anonymous := false - if name == "" { - // anonymous field - typ must be T or *T and T must be a type name - switch typ := deref(typ).(type) { - case *types.Basic: // basic types are named types - pkg = nil // // objects defined in Universe scope have no package - name = typ.Name() - case *types.Named: - name = typ.Obj().Name() - default: - errorf("named base type expected") - } - anonymous = true - } else if alias { - // anonymous field: we have an explicit name because it's an alias - anonymous = true - } - - return types.NewField(pos, pkg, name, typ, anonymous), tag -} - -func (p *importer) methodList(parent *types.Package) (methods []*types.Func) { - if n := p.int(); n > 0 { - methods = make([]*types.Func, n) - for i := range methods { - methods[i] = p.method(parent) - } - } - return -} - -func (p *importer) method(parent *types.Package) *types.Func { - pos := p.pos() - pkg, name, _ := p.fieldName(parent) - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(nil, params, result, isddd) - return types.NewFunc(pos, pkg, name, sig) -} - -func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { - name = p.string() - pkg = parent - if pkg == nil { - // use the imported package instead - pkg = p.pkgList[0] - } - if p.version == 0 && name == "_" { - // version 0 didn't export a package for _ fields - return - } - switch name { - case "": - // 1) field name matches base type name and is exported: nothing to do - case "?": - // 2) field name matches base type name and is not exported: need package - name = "" - pkg = p.pkg() - case "@": - // 3) field name doesn't match type name (alias) - name = p.string() - alias = true - fallthrough - default: - if !exported(name) { - pkg = p.pkg() - } - } - return -} - -func (p *importer) paramList() (*types.Tuple, bool) { - n := p.int() - if n == 0 { - return nil, false - } - // negative length indicates unnamed parameters - named := true - if n < 0 { - n = -n - named = false - } - // n > 0 - params := make([]*types.Var, n) - isddd := false - for i := range params { - params[i], isddd = p.param(named) - } - return types.NewTuple(params...), isddd -} - -func (p *importer) param(named bool) (*types.Var, bool) { - t := p.typ(nil) - td, isddd := t.(*dddSlice) - if isddd { - t = types.NewSlice(td.elem) - } - - var pkg *types.Package - var name string - if named { - name = p.string() - if name == "" { - errorf("expected named parameter") - } - if name != "_" { - pkg = p.pkg() - } - if i := strings.Index(name, "·"); i > 0 { - name = name[:i] // cut off gc-specific parameter numbering - } - } - - // read and discard compiler-specific info - p.string() - - return types.NewVar(token.NoPos, pkg, name, t), isddd -} - -func exported(name string) bool { - ch, _ := utf8.DecodeRuneInString(name) - return unicode.IsUpper(ch) -} - -func (p *importer) value() constant.Value { - switch tag := p.tagOrIndex(); tag { - case falseTag: - return constant.MakeBool(false) - case trueTag: - return constant.MakeBool(true) - case int64Tag: - return constant.MakeInt64(p.int64()) - case floatTag: - return p.float() - case complexTag: - re := p.float() - im := p.float() - return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) - case stringTag: - return constant.MakeString(p.string()) - case unknownTag: - return constant.MakeUnknown() - default: - errorf("unexpected value tag %d", tag) // panics - panic("unreachable") - } -} - -func (p *importer) float() constant.Value { - sign := p.int() - if sign == 0 { - return constant.MakeInt64(0) - } - - exp := p.int() - mant := []byte(p.string()) // big endian - - // remove leading 0's if any - for len(mant) > 0 && mant[0] == 0 { - mant = mant[1:] - } - - // convert to little endian - // TODO(gri) go/constant should have a more direct conversion function - // (e.g., once it supports a big.Float based implementation) - for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { - mant[i], mant[j] = mant[j], mant[i] - } - - // adjust exponent (constant.MakeFromBytes creates an integer value, - // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) - exp -= len(mant) << 3 - if len(mant) > 0 { - for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { - exp++ - } - } - - x := constant.MakeFromBytes(mant) - switch { - case exp < 0: - d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) - x = constant.BinaryOp(x, token.QUO, d) - case exp > 0: - x = constant.Shift(x, token.SHL, uint(exp)) - } - - if sign < 0 { - x = constant.UnaryOp(token.SUB, x, 0) - } - return x -} - -// ---------------------------------------------------------------------------- -// Low-level decoders - -func (p *importer) tagOrIndex() int { - if p.debugFormat { - p.marker('t') - } - - return int(p.rawInt64()) -} - -func (p *importer) int() int { - x := p.int64() - if int64(int(x)) != x { - errorf("exported integer too large") - } - return int(x) -} - -func (p *importer) int64() int64 { - if p.debugFormat { - p.marker('i') - } - - return p.rawInt64() -} - -func (p *importer) path() string { - if p.debugFormat { - p.marker('p') - } - // if the path was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.pathList[i] - } - // otherwise, i is the negative path length (< 0) - a := make([]string, -i) - for n := range a { - a[n] = p.string() - } - s := strings.Join(a, "/") - p.pathList = append(p.pathList, s) - return s -} - -func (p *importer) string() string { - if p.debugFormat { - p.marker('s') - } - // if the string was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.strList[i] - } - // otherwise, i is the negative string length (< 0) - if n := int(-i); n <= cap(p.buf) { - p.buf = p.buf[:n] - } else { - p.buf = make([]byte, n) - } - for i := range p.buf { - p.buf[i] = p.rawByte() - } - s := string(p.buf) - p.strList = append(p.strList, s) - return s -} - -func (p *importer) marker(want byte) { - if got := p.rawByte(); got != want { - errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) - } - - pos := p.read - if n := int(p.rawInt64()); n != pos { - errorf("incorrect position: got %d; want %d", n, pos) - } -} - -// rawInt64 should only be used by low-level decoders. -func (p *importer) rawInt64() int64 { - i, err := binary.ReadVarint(p) - if err != nil { - errorf("read error: %v", err) - } - return i -} - -// rawStringln should only be used to read the initial version string. -func (p *importer) rawStringln(b byte) string { - p.buf = p.buf[:0] - for b != '\n' { - p.buf = append(p.buf, b) - b = p.rawByte() - } - return string(p.buf) -} - -// needed for binary.ReadVarint in rawInt64 -func (p *importer) ReadByte() (byte, error) { - return p.rawByte(), nil -} - -// byte is the bottleneck interface for reading p.data. -// It unescapes '|' 'S' to '$' and '|' '|' to '|'. -// rawByte should only be used by low-level decoders. -func (p *importer) rawByte() byte { - b := p.data[0] - r := 1 - if b == '|' { - b = p.data[1] - r = 2 - switch b { - case 'S': - b = '$' - case '|': - // nothing to do - default: - errorf("unexpected escape sequence in export data") - } - } - p.data = p.data[r:] - p.read += r - return b - -} - -// ---------------------------------------------------------------------------- -// Export format - -// Tags. Must be < 0. -const ( - // Objects - packageTag = -(iota + 1) - constTag - typeTag - varTag - funcTag - endTag - - // Types - namedTag - arrayTag - sliceTag - dddTag - structTag - pointerTag - signatureTag - interfaceTag - mapTag - chanTag - - // Values - falseTag - trueTag - int64Tag - floatTag - fractionTag // not used by gc - complexTag - stringTag - nilTag // only used by gc (appears in exported inlined function bodies) - unknownTag // not used by gc (only appears in packages with errors) - - // Type aliases - aliasTag -) - -var predeclared = []types.Type{ - // basic types - types.Typ[types.Bool], - types.Typ[types.Int], - types.Typ[types.Int8], - types.Typ[types.Int16], - types.Typ[types.Int32], - types.Typ[types.Int64], - types.Typ[types.Uint], - types.Typ[types.Uint8], - types.Typ[types.Uint16], - types.Typ[types.Uint32], - types.Typ[types.Uint64], - types.Typ[types.Uintptr], - types.Typ[types.Float32], - types.Typ[types.Float64], - types.Typ[types.Complex64], - types.Typ[types.Complex128], - types.Typ[types.String], - - // basic type aliases - types.Universe.Lookup("byte").Type(), - types.Universe.Lookup("rune").Type(), - - // error - types.Universe.Lookup("error").Type(), - - // untyped types - types.Typ[types.UntypedBool], - types.Typ[types.UntypedInt], - types.Typ[types.UntypedRune], - types.Typ[types.UntypedFloat], - types.Typ[types.UntypedComplex], - types.Typ[types.UntypedString], - types.Typ[types.UntypedNil], - - // package unsafe - types.Typ[types.UnsafePointer], - - // invalid type - types.Typ[types.Invalid], // only appears in packages with errors - - // used internally by gc; never used by this package or in .a files - anyType{}, -} - -type anyType struct{} - -func (t anyType) Underlying() types.Type { return t } -func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go b/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go deleted file mode 100644 index f33dc56..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/exportdata.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. - -// This file implements FindExportData. - -package gcimporter - -import ( - "bufio" - "fmt" - "io" - "strconv" - "strings" -) - -func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { - // See $GOROOT/include/ar.h. - hdr := make([]byte, 16+12+6+6+8+10+2) - _, err = io.ReadFull(r, hdr) - if err != nil { - return - } - // leave for debugging - if false { - fmt.Printf("header: %s", hdr) - } - s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) - size, err = strconv.Atoi(s) - if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { - err = fmt.Errorf("invalid archive header") - return - } - name = strings.TrimSpace(string(hdr[:16])) - return -} - -// FindExportData positions the reader r at the beginning of the -// export data section of an underlying GC-created object/archive -// file by reading from it. The reader must be positioned at the -// start of the file before calling this function. The hdr result -// is the string before the export data, either "$$" or "$$B". -// -func FindExportData(r *bufio.Reader) (hdr string, err error) { - // Read first line to make sure this is an object file. - line, err := r.ReadSlice('\n') - if err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - - if string(line) == "!\n" { - // Archive file. Scan to __.PKGDEF. - var name string - if name, _, err = readGopackHeader(r); err != nil { - return - } - - // First entry should be __.PKGDEF. - if name != "__.PKGDEF" { - err = fmt.Errorf("go archive is missing __.PKGDEF") - return - } - - // Read first line of __.PKGDEF data, so that line - // is once again the first line of the input. - if line, err = r.ReadSlice('\n'); err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - } - - // Now at __.PKGDEF in archive or still at beginning of file. - // Either way, line should begin with "go object ". - if !strings.HasPrefix(string(line), "go object ") { - err = fmt.Errorf("not a Go object file") - return - } - - // Skip over object header to export data. - // Begins after first line starting with $$. - for line[0] != '$' { - if line, err = r.ReadSlice('\n'); err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - } - hdr = string(line) - - return -} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go b/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go deleted file mode 100644 index 6fbc9d7..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/gcimporter.go +++ /dev/null @@ -1,1041 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, -// but it also contains the original source-based importer code for Go1.6. -// Once we stop supporting 1.6, we can remove that code. - -// Package gcimporter15 provides various functions for reading -// gc-generated object files that can be used to implement the -// Importer interface defined by the Go 1.5 standard library package. -// -// Deprecated: this package will be deleted in October 2017. -// New code should use golang.org/x/tools/go/gcexportdata. -// -package gcimporter // import "golang.org/x/tools/go/gcimporter15" - -import ( - "bufio" - "errors" - "fmt" - "go/build" - exact "go/constant" - "go/token" - "go/types" - "io" - "io/ioutil" - "os" - "path/filepath" - "sort" - "strconv" - "strings" - "text/scanner" -) - -// debugging/development support -const debug = false - -var pkgExts = [...]string{".a", ".o"} - -// FindPkg returns the filename and unique package id for an import -// path based on package information provided by build.Import (using -// the build.Default build.Context). A relative srcDir is interpreted -// relative to the current working directory. -// If no file was found, an empty filename is returned. -// -func FindPkg(path, srcDir string) (filename, id string) { - if path == "" { - return - } - - var noext string - switch { - default: - // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" - // Don't require the source files to be present. - if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 - srcDir = abs - } - bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) - if bp.PkgObj == "" { - return - } - noext = strings.TrimSuffix(bp.PkgObj, ".a") - id = bp.ImportPath - - case build.IsLocalImport(path): - // "./x" -> "/this/directory/x.ext", "/this/directory/x" - noext = filepath.Join(srcDir, path) - id = noext - - case filepath.IsAbs(path): - // for completeness only - go/build.Import - // does not support absolute imports - // "/x" -> "/x.ext", "/x" - noext = path - id = path - } - - if false { // for debugging - if path != id { - fmt.Printf("%s -> %s\n", path, id) - } - } - - // try extensions - for _, ext := range pkgExts { - filename = noext + ext - if f, err := os.Stat(filename); err == nil && !f.IsDir() { - return - } - } - - filename = "" // not found - return -} - -// ImportData imports a package by reading the gc-generated export data, -// adds the corresponding package object to the packages map indexed by id, -// and returns the object. -// -// The packages map must contains all packages already imported. The data -// reader position must be the beginning of the export data section. The -// filename is only used in error messages. -// -// If packages[id] contains the completely imported package, that package -// can be used directly, and there is no need to call this function (but -// there is also no harm but for extra time used). -// -func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { - // support for parser error handling - defer func() { - switch r := recover().(type) { - case nil: - // nothing to do - case importError: - err = r - default: - panic(r) // internal error - } - }() - - var p parser - p.init(filename, id, data, packages) - pkg = p.parseExport() - - return -} - -// Import imports a gc-generated package given its import path and srcDir, adds -// the corresponding package object to the packages map, and returns the object. -// The packages map must contain all packages already imported. -// -func Import(packages map[string]*types.Package, path, srcDir string) (pkg *types.Package, err error) { - filename, id := FindPkg(path, srcDir) - if filename == "" { - if path == "unsafe" { - return types.Unsafe, nil - } - err = fmt.Errorf("can't find import: %s", id) - return - } - - // no need to re-import if the package was imported completely before - if pkg = packages[id]; pkg != nil && pkg.Complete() { - return - } - - // open file - f, err := os.Open(filename) - if err != nil { - return - } - defer func() { - f.Close() - if err != nil { - // add file name to error - err = fmt.Errorf("reading export data: %s: %v", filename, err) - } - }() - - var hdr string - buf := bufio.NewReader(f) - if hdr, err = FindExportData(buf); err != nil { - return - } - - switch hdr { - case "$$\n": - return ImportData(packages, filename, id, buf) - case "$$B\n": - var data []byte - data, err = ioutil.ReadAll(buf) - if err == nil { - fset := token.NewFileSet() - _, pkg, err = BImportData(fset, packages, data, id) - return - } - default: - err = fmt.Errorf("unknown export data header: %q", hdr) - } - - return -} - -// ---------------------------------------------------------------------------- -// Parser - -// TODO(gri) Imported objects don't have position information. -// Ideally use the debug table line info; alternatively -// create some fake position (or the position of the -// import). That way error messages referring to imported -// objects can print meaningful information. - -// parser parses the exports inside a gc compiler-produced -// object/archive file and populates its scope with the results. -type parser struct { - scanner scanner.Scanner - tok rune // current token - lit string // literal string; only valid for Ident, Int, String tokens - id string // package id of imported package - sharedPkgs map[string]*types.Package // package id -> package object (across importer) - localPkgs map[string]*types.Package // package id -> package object (just this package) -} - -func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { - p.scanner.Init(src) - p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } - p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments - p.scanner.Whitespace = 1<<'\t' | 1<<' ' - p.scanner.Filename = filename // for good error messages - p.next() - p.id = id - p.sharedPkgs = packages - if debug { - // check consistency of packages map - for _, pkg := range packages { - if pkg.Name() == "" { - fmt.Printf("no package name for %s\n", pkg.Path()) - } - } - } -} - -func (p *parser) next() { - p.tok = p.scanner.Scan() - switch p.tok { - case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': - p.lit = p.scanner.TokenText() - default: - p.lit = "" - } - if debug { - fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) - } -} - -func declTypeName(pkg *types.Package, name string) *types.TypeName { - scope := pkg.Scope() - if obj := scope.Lookup(name); obj != nil { - return obj.(*types.TypeName) - } - obj := types.NewTypeName(token.NoPos, pkg, name, nil) - // a named type may be referred to before the underlying type - // is known - set it up - types.NewNamed(obj, nil, nil) - scope.Insert(obj) - return obj -} - -// ---------------------------------------------------------------------------- -// Error handling - -// Internal errors are boxed as importErrors. -type importError struct { - pos scanner.Position - err error -} - -func (e importError) Error() string { - return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) -} - -func (p *parser) error(err interface{}) { - if s, ok := err.(string); ok { - err = errors.New(s) - } - // panic with a runtime.Error if err is not an error - panic(importError{p.scanner.Pos(), err.(error)}) -} - -func (p *parser) errorf(format string, args ...interface{}) { - p.error(fmt.Sprintf(format, args...)) -} - -func (p *parser) expect(tok rune) string { - lit := p.lit - if p.tok != tok { - p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) - } - p.next() - return lit -} - -func (p *parser) expectSpecial(tok string) { - sep := 'x' // not white space - i := 0 - for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { - sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token - p.next() - i++ - } - if i < len(tok) { - p.errorf("expected %q, got %q", tok, tok[0:i]) - } -} - -func (p *parser) expectKeyword(keyword string) { - lit := p.expect(scanner.Ident) - if lit != keyword { - p.errorf("expected keyword %s, got %q", keyword, lit) - } -} - -// ---------------------------------------------------------------------------- -// Qualified and unqualified names - -// PackageId = string_lit . -// -func (p *parser) parsePackageId() string { - id, err := strconv.Unquote(p.expect(scanner.String)) - if err != nil { - p.error(err) - } - // id == "" stands for the imported package id - // (only known at time of package installation) - if id == "" { - id = p.id - } - return id -} - -// PackageName = ident . -// -func (p *parser) parsePackageName() string { - return p.expect(scanner.Ident) -} - -// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . -func (p *parser) parseDotIdent() string { - ident := "" - if p.tok != scanner.Int { - sep := 'x' // not white space - for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { - ident += p.lit - sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token - p.next() - } - } - if ident == "" { - p.expect(scanner.Ident) // use expect() for error handling - } - return ident -} - -// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . -// -func (p *parser) parseQualifiedName() (id, name string) { - p.expect('@') - id = p.parsePackageId() - p.expect('.') - // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. - if p.tok == '?' { - p.next() - } else { - name = p.parseDotIdent() - } - return -} - -// getPkg returns the package for a given id. If the package is -// not found, create the package and add it to the p.localPkgs -// and p.sharedPkgs maps. name is the (expected) name of the -// package. If name == "", the package name is expected to be -// set later via an import clause in the export data. -// -// id identifies a package, usually by a canonical package path like -// "encoding/json" but possibly by a non-canonical import path like -// "./json". -// -func (p *parser) getPkg(id, name string) *types.Package { - // package unsafe is not in the packages maps - handle explicitly - if id == "unsafe" { - return types.Unsafe - } - - pkg := p.localPkgs[id] - if pkg == nil { - // first import of id from this package - pkg = p.sharedPkgs[id] - if pkg == nil { - // first import of id by this importer; - // add (possibly unnamed) pkg to shared packages - pkg = types.NewPackage(id, name) - p.sharedPkgs[id] = pkg - } - // add (possibly unnamed) pkg to local packages - if p.localPkgs == nil { - p.localPkgs = make(map[string]*types.Package) - } - p.localPkgs[id] = pkg - } else if name != "" { - // package exists already and we have an expected package name; - // make sure names match or set package name if necessary - if pname := pkg.Name(); pname == "" { - pkg.SetName(name) - } else if pname != name { - p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) - } - } - return pkg -} - -// parseExportedName is like parseQualifiedName, but -// the package id is resolved to an imported *types.Package. -// -func (p *parser) parseExportedName() (pkg *types.Package, name string) { - id, name := p.parseQualifiedName() - pkg = p.getPkg(id, "") - return -} - -// ---------------------------------------------------------------------------- -// Types - -// BasicType = identifier . -// -func (p *parser) parseBasicType() types.Type { - id := p.expect(scanner.Ident) - obj := types.Universe.Lookup(id) - if obj, ok := obj.(*types.TypeName); ok { - return obj.Type() - } - p.errorf("not a basic type: %s", id) - return nil -} - -// ArrayType = "[" int_lit "]" Type . -// -func (p *parser) parseArrayType(parent *types.Package) types.Type { - // "[" already consumed and lookahead known not to be "]" - lit := p.expect(scanner.Int) - p.expect(']') - elem := p.parseType(parent) - n, err := strconv.ParseInt(lit, 10, 64) - if err != nil { - p.error(err) - } - return types.NewArray(elem, n) -} - -// MapType = "map" "[" Type "]" Type . -// -func (p *parser) parseMapType(parent *types.Package) types.Type { - p.expectKeyword("map") - p.expect('[') - key := p.parseType(parent) - p.expect(']') - elem := p.parseType(parent) - return types.NewMap(key, elem) -} - -// Name = identifier | "?" | QualifiedName . -// -// For unqualified and anonymous names, the returned package is the parent -// package unless parent == nil, in which case the returned package is the -// package being imported. (The parent package is not nil if the the name -// is an unqualified struct field or interface method name belonging to a -// type declared in another package.) -// -// For qualified names, the returned package is nil (and not created if -// it doesn't exist yet) unless materializePkg is set (which creates an -// unnamed package with valid package path). In the latter case, a -// subsequent import clause is expected to provide a name for the package. -// -func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { - pkg = parent - if pkg == nil { - pkg = p.sharedPkgs[p.id] - } - switch p.tok { - case scanner.Ident: - name = p.lit - p.next() - case '?': - // anonymous - p.next() - case '@': - // exported name prefixed with package path - pkg = nil - var id string - id, name = p.parseQualifiedName() - if materializePkg { - pkg = p.getPkg(id, "") - } - default: - p.error("name expected") - } - return -} - -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - return p.Elem() - } - return typ -} - -// Field = Name Type [ string_lit ] . -// -func (p *parser) parseField(parent *types.Package) (*types.Var, string) { - pkg, name := p.parseName(parent, true) - - if name == "_" { - // Blank fields should be package-qualified because they - // are unexported identifiers, but gc does not qualify them. - // Assuming that the ident belongs to the current package - // causes types to change during re-exporting, leading - // to spurious "can't assign A to B" errors from go/types. - // As a workaround, pretend all blank fields belong - // to the same unique dummy package. - const blankpkg = "<_>" - pkg = p.getPkg(blankpkg, blankpkg) - } - - typ := p.parseType(parent) - anonymous := false - if name == "" { - // anonymous field - typ must be T or *T and T must be a type name - switch typ := deref(typ).(type) { - case *types.Basic: // basic types are named types - pkg = nil // objects defined in Universe scope have no package - name = typ.Name() - case *types.Named: - name = typ.Obj().Name() - default: - p.errorf("anonymous field expected") - } - anonymous = true - } - tag := "" - if p.tok == scanner.String { - s := p.expect(scanner.String) - var err error - tag, err = strconv.Unquote(s) - if err != nil { - p.errorf("invalid struct tag %s: %s", s, err) - } - } - return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag -} - -// StructType = "struct" "{" [ FieldList ] "}" . -// FieldList = Field { ";" Field } . -// -func (p *parser) parseStructType(parent *types.Package) types.Type { - var fields []*types.Var - var tags []string - - p.expectKeyword("struct") - p.expect('{') - for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { - if i > 0 { - p.expect(';') - } - fld, tag := p.parseField(parent) - if tag != "" && tags == nil { - tags = make([]string, i) - } - if tags != nil { - tags = append(tags, tag) - } - fields = append(fields, fld) - } - p.expect('}') - - return types.NewStruct(fields, tags) -} - -// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . -// -func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { - _, name := p.parseName(nil, false) - // remove gc-specific parameter numbering - if i := strings.Index(name, "·"); i >= 0 { - name = name[:i] - } - if p.tok == '.' { - p.expectSpecial("...") - isVariadic = true - } - typ := p.parseType(nil) - if isVariadic { - typ = types.NewSlice(typ) - } - // ignore argument tag (e.g. "noescape") - if p.tok == scanner.String { - p.next() - } - // TODO(gri) should we provide a package? - par = types.NewVar(token.NoPos, nil, name, typ) - return -} - -// Parameters = "(" [ ParameterList ] ")" . -// ParameterList = { Parameter "," } Parameter . -// -func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { - p.expect('(') - for p.tok != ')' && p.tok != scanner.EOF { - if len(list) > 0 { - p.expect(',') - } - par, variadic := p.parseParameter() - list = append(list, par) - if variadic { - if isVariadic { - p.error("... not on final argument") - } - isVariadic = true - } - } - p.expect(')') - - return -} - -// Signature = Parameters [ Result ] . -// Result = Type | Parameters . -// -func (p *parser) parseSignature(recv *types.Var) *types.Signature { - params, isVariadic := p.parseParameters() - - // optional result type - var results []*types.Var - if p.tok == '(' { - var variadic bool - results, variadic = p.parseParameters() - if variadic { - p.error("... not permitted on result type") - } - } - - return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) -} - -// InterfaceType = "interface" "{" [ MethodList ] "}" . -// MethodList = Method { ";" Method } . -// Method = Name Signature . -// -// The methods of embedded interfaces are always "inlined" -// by the compiler and thus embedded interfaces are never -// visible in the export data. -// -func (p *parser) parseInterfaceType(parent *types.Package) types.Type { - var methods []*types.Func - - p.expectKeyword("interface") - p.expect('{') - for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { - if i > 0 { - p.expect(';') - } - pkg, name := p.parseName(parent, true) - sig := p.parseSignature(nil) - methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) - } - p.expect('}') - - // Complete requires the type's embedded interfaces to be fully defined, - // but we do not define any - return types.NewInterface(methods, nil).Complete() -} - -// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . -// -func (p *parser) parseChanType(parent *types.Package) types.Type { - dir := types.SendRecv - if p.tok == scanner.Ident { - p.expectKeyword("chan") - if p.tok == '<' { - p.expectSpecial("<-") - dir = types.SendOnly - } - } else { - p.expectSpecial("<-") - p.expectKeyword("chan") - dir = types.RecvOnly - } - elem := p.parseType(parent) - return types.NewChan(dir, elem) -} - -// Type = -// BasicType | TypeName | ArrayType | SliceType | StructType | -// PointerType | FuncType | InterfaceType | MapType | ChanType | -// "(" Type ")" . -// -// BasicType = ident . -// TypeName = ExportedName . -// SliceType = "[" "]" Type . -// PointerType = "*" Type . -// FuncType = "func" Signature . -// -func (p *parser) parseType(parent *types.Package) types.Type { - switch p.tok { - case scanner.Ident: - switch p.lit { - default: - return p.parseBasicType() - case "struct": - return p.parseStructType(parent) - case "func": - // FuncType - p.next() - return p.parseSignature(nil) - case "interface": - return p.parseInterfaceType(parent) - case "map": - return p.parseMapType(parent) - case "chan": - return p.parseChanType(parent) - } - case '@': - // TypeName - pkg, name := p.parseExportedName() - return declTypeName(pkg, name).Type() - case '[': - p.next() // look ahead - if p.tok == ']' { - // SliceType - p.next() - return types.NewSlice(p.parseType(parent)) - } - return p.parseArrayType(parent) - case '*': - // PointerType - p.next() - return types.NewPointer(p.parseType(parent)) - case '<': - return p.parseChanType(parent) - case '(': - // "(" Type ")" - p.next() - typ := p.parseType(parent) - p.expect(')') - return typ - } - p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) - return nil -} - -// ---------------------------------------------------------------------------- -// Declarations - -// ImportDecl = "import" PackageName PackageId . -// -func (p *parser) parseImportDecl() { - p.expectKeyword("import") - name := p.parsePackageName() - p.getPkg(p.parsePackageId(), name) -} - -// int_lit = [ "+" | "-" ] { "0" ... "9" } . -// -func (p *parser) parseInt() string { - s := "" - switch p.tok { - case '-': - s = "-" - p.next() - case '+': - p.next() - } - return s + p.expect(scanner.Int) -} - -// number = int_lit [ "p" int_lit ] . -// -func (p *parser) parseNumber() (typ *types.Basic, val exact.Value) { - // mantissa - mant := exact.MakeFromLiteral(p.parseInt(), token.INT, 0) - if mant == nil { - panic("invalid mantissa") - } - - if p.lit == "p" { - // exponent (base 2) - p.next() - exp, err := strconv.ParseInt(p.parseInt(), 10, 0) - if err != nil { - p.error(err) - } - if exp < 0 { - denom := exact.MakeInt64(1) - denom = exact.Shift(denom, token.SHL, uint(-exp)) - typ = types.Typ[types.UntypedFloat] - val = exact.BinaryOp(mant, token.QUO, denom) - return - } - if exp > 0 { - mant = exact.Shift(mant, token.SHL, uint(exp)) - } - typ = types.Typ[types.UntypedFloat] - val = mant - return - } - - typ = types.Typ[types.UntypedInt] - val = mant - return -} - -// ConstDecl = "const" ExportedName [ Type ] "=" Literal . -// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . -// bool_lit = "true" | "false" . -// complex_lit = "(" float_lit "+" float_lit "i" ")" . -// rune_lit = "(" int_lit "+" int_lit ")" . -// string_lit = `"` { unicode_char } `"` . -// -func (p *parser) parseConstDecl() { - p.expectKeyword("const") - pkg, name := p.parseExportedName() - - var typ0 types.Type - if p.tok != '=' { - // constant types are never structured - no need for parent type - typ0 = p.parseType(nil) - } - - p.expect('=') - var typ types.Type - var val exact.Value - switch p.tok { - case scanner.Ident: - // bool_lit - if p.lit != "true" && p.lit != "false" { - p.error("expected true or false") - } - typ = types.Typ[types.UntypedBool] - val = exact.MakeBool(p.lit == "true") - p.next() - - case '-', scanner.Int: - // int_lit - typ, val = p.parseNumber() - - case '(': - // complex_lit or rune_lit - p.next() - if p.tok == scanner.Char { - p.next() - p.expect('+') - typ = types.Typ[types.UntypedRune] - _, val = p.parseNumber() - p.expect(')') - break - } - _, re := p.parseNumber() - p.expect('+') - _, im := p.parseNumber() - p.expectKeyword("i") - p.expect(')') - typ = types.Typ[types.UntypedComplex] - val = exact.BinaryOp(re, token.ADD, exact.MakeImag(im)) - - case scanner.Char: - // rune_lit - typ = types.Typ[types.UntypedRune] - val = exact.MakeFromLiteral(p.lit, token.CHAR, 0) - p.next() - - case scanner.String: - // string_lit - typ = types.Typ[types.UntypedString] - val = exact.MakeFromLiteral(p.lit, token.STRING, 0) - p.next() - - default: - p.errorf("expected literal got %s", scanner.TokenString(p.tok)) - } - - if typ0 == nil { - typ0 = typ - } - - pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) -} - -// TypeDecl = "type" ExportedName Type . -// -func (p *parser) parseTypeDecl() { - p.expectKeyword("type") - pkg, name := p.parseExportedName() - obj := declTypeName(pkg, name) - - // The type object may have been imported before and thus already - // have a type associated with it. We still need to parse the type - // structure, but throw it away if the object already has a type. - // This ensures that all imports refer to the same type object for - // a given type declaration. - typ := p.parseType(pkg) - - if name := obj.Type().(*types.Named); name.Underlying() == nil { - name.SetUnderlying(typ) - } -} - -// VarDecl = "var" ExportedName Type . -// -func (p *parser) parseVarDecl() { - p.expectKeyword("var") - pkg, name := p.parseExportedName() - typ := p.parseType(pkg) - pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) -} - -// Func = Signature [ Body ] . -// Body = "{" ... "}" . -// -func (p *parser) parseFunc(recv *types.Var) *types.Signature { - sig := p.parseSignature(recv) - if p.tok == '{' { - p.next() - for i := 1; i > 0; p.next() { - switch p.tok { - case '{': - i++ - case '}': - i-- - } - } - } - return sig -} - -// MethodDecl = "func" Receiver Name Func . -// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . -// -func (p *parser) parseMethodDecl() { - // "func" already consumed - p.expect('(') - recv, _ := p.parseParameter() // receiver - p.expect(')') - - // determine receiver base type object - base := deref(recv.Type()).(*types.Named) - - // parse method name, signature, and possibly inlined body - _, name := p.parseName(nil, false) - sig := p.parseFunc(recv) - - // methods always belong to the same package as the base type object - pkg := base.Obj().Pkg() - - // add method to type unless type was imported before - // and method exists already - // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. - base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) -} - -// FuncDecl = "func" ExportedName Func . -// -func (p *parser) parseFuncDecl() { - // "func" already consumed - pkg, name := p.parseExportedName() - typ := p.parseFunc(nil) - pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) -} - -// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . -// -func (p *parser) parseDecl() { - if p.tok == scanner.Ident { - switch p.lit { - case "import": - p.parseImportDecl() - case "const": - p.parseConstDecl() - case "type": - p.parseTypeDecl() - case "var": - p.parseVarDecl() - case "func": - p.next() // look ahead - if p.tok == '(' { - p.parseMethodDecl() - } else { - p.parseFuncDecl() - } - } - } - p.expect('\n') -} - -// ---------------------------------------------------------------------------- -// Export - -// Export = "PackageClause { Decl } "$$" . -// PackageClause = "package" PackageName [ "safe" ] "\n" . -// -func (p *parser) parseExport() *types.Package { - p.expectKeyword("package") - name := p.parsePackageName() - if p.tok == scanner.Ident && p.lit == "safe" { - // package was compiled with -u option - ignore - p.next() - } - p.expect('\n') - - pkg := p.getPkg(p.id, name) - - for p.tok != '$' && p.tok != scanner.EOF { - p.parseDecl() - } - - if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { - // don't call next()/expect() since reading past the - // export data may cause scanner errors (e.g. NUL chars) - p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) - } - - if n := p.scanner.ErrorCount; n != 0 { - p.errorf("expected no scanner errors, got %d", n) - } - - // Record all locally referenced packages as imports. - var imports []*types.Package - for id, pkg2 := range p.localPkgs { - if pkg2.Name() == "" { - p.errorf("%s package has no name", id) - } - if id == p.id { - continue // avoid self-edge - } - imports = append(imports, pkg2) - } - sort.Sort(byPath(imports)) - pkg.SetImports(imports) - - // package was imported completely and without errors - pkg.MarkComplete() - - return pkg -} - -type byPath []*types.Package - -func (a byPath) Len() int { return len(a) } -func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go b/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go deleted file mode 100644 index 225ffee..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/isAlias18.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build !go1.9 - -package gcimporter - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return false // there are no type aliases before Go 1.9 -} diff --git a/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go b/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go deleted file mode 100644 index c2025d8..0000000 --- a/vendor/golang.org/x/tools/go/gcimporter15/isAlias19.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.9 - -package gcimporter - -import "go/types" - -func isAlias(obj *types.TypeName) bool { - return obj.IsAlias() -} diff --git a/vendor/golang.org/x/tools/go/loader/cgo.go b/vendor/golang.org/x/tools/go/loader/cgo.go deleted file mode 100644 index 72c6f50..0000000 --- a/vendor/golang.org/x/tools/go/loader/cgo.go +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -// This file handles cgo preprocessing of files containing `import "C"`. -// -// DESIGN -// -// The approach taken is to run the cgo processor on the package's -// CgoFiles and parse the output, faking the filenames of the -// resulting ASTs so that the synthetic file containing the C types is -// called "C" (e.g. "~/go/src/net/C") and the preprocessed files -// have their original names (e.g. "~/go/src/net/cgo_unix.go"), -// not the names of the actual temporary files. -// -// The advantage of this approach is its fidelity to 'go build'. The -// downside is that the token.Position.Offset for each AST node is -// incorrect, being an offset within the temporary file. Line numbers -// should still be correct because of the //line comments. -// -// The logic of this file is mostly plundered from the 'go build' -// tool, which also invokes the cgo preprocessor. -// -// -// REJECTED ALTERNATIVE -// -// An alternative approach that we explored is to extend go/types' -// Importer mechanism to provide the identity of the importing package -// so that each time `import "C"` appears it resolves to a different -// synthetic package containing just the objects needed in that case. -// The loader would invoke cgo but parse only the cgo_types.go file -// defining the package-level objects, discarding the other files -// resulting from preprocessing. -// -// The benefit of this approach would have been that source-level -// syntax information would correspond exactly to the original cgo -// file, with no preprocessing involved, making source tools like -// godoc, guru, and eg happy. However, the approach was rejected -// due to the additional complexity it would impose on go/types. (It -// made for a beautiful demo, though.) -// -// cgo files, despite their *.go extension, are not legal Go source -// files per the specification since they may refer to unexported -// members of package "C" such as C.int. Also, a function such as -// C.getpwent has in effect two types, one matching its C type and one -// which additionally returns (errno C.int). The cgo preprocessor -// uses name mangling to distinguish these two functions in the -// processed code, but go/types would need to duplicate this logic in -// its handling of function calls, analogous to the treatment of map -// lookups in which y=m[k] and y,ok=m[k] are both legal. - -import ( - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "io/ioutil" - "log" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" -) - -// processCgoFiles invokes the cgo preprocessor on bp.CgoFiles, parses -// the output and returns the resulting ASTs. -// -func processCgoFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { - tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") - if err != nil { - return nil, err - } - defer os.RemoveAll(tmpdir) - - pkgdir := bp.Dir - if DisplayPath != nil { - pkgdir = DisplayPath(pkgdir) - } - - cgoFiles, cgoDisplayFiles, err := runCgo(bp, pkgdir, tmpdir) - if err != nil { - return nil, err - } - var files []*ast.File - for i := range cgoFiles { - rd, err := os.Open(cgoFiles[i]) - if err != nil { - return nil, err - } - display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) - f, err := parser.ParseFile(fset, display, rd, mode) - rd.Close() - if err != nil { - return nil, err - } - files = append(files, f) - } - return files, nil -} - -var cgoRe = regexp.MustCompile(`[/\\:]`) - -// runCgo invokes the cgo preprocessor on bp.CgoFiles and returns two -// lists of files: the resulting processed files (in temporary -// directory tmpdir) and the corresponding names of the unprocessed files. -// -// runCgo is adapted from (*builder).cgo in -// $GOROOT/src/cmd/go/build.go, but these features are unsupported: -// Objective C, CGOPKGPATH, CGO_FLAGS. -// -func runCgo(bp *build.Package, pkgdir, tmpdir string) (files, displayFiles []string, err error) { - cgoCPPFLAGS, _, _, _ := cflags(bp, true) - _, cgoexeCFLAGS, _, _ := cflags(bp, false) - - if len(bp.CgoPkgConfig) > 0 { - pcCFLAGS, err := pkgConfigFlags(bp) - if err != nil { - return nil, nil, err - } - cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) - } - - // Allows including _cgo_export.h from .[ch] files in the package. - cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) - - // _cgo_gotypes.go (displayed "C") contains the type definitions. - files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) - displayFiles = append(displayFiles, "C") - for _, fn := range bp.CgoFiles { - // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. - f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") - files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) - displayFiles = append(displayFiles, fn) - } - - var cgoflags []string - if bp.Goroot && bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_runtime_cgo=false") - } - if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { - cgoflags = append(cgoflags, "-import_syscall=false") - } - - args := stringList( - "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", - cgoCPPFLAGS, cgoexeCFLAGS, bp.CgoFiles, - ) - if false { - log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) - } - cmd := exec.Command(args[0], args[1:]...) - cmd.Dir = pkgdir - cmd.Stdout = os.Stderr - cmd.Stderr = os.Stderr - if err := cmd.Run(); err != nil { - return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) - } - - return files, displayFiles, nil -} - -// -- unmodified from 'go build' --------------------------------------- - -// Return the flags to use when invoking the C or C++ compilers, or cgo. -func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { - var defaults string - if def { - defaults = "-g -O2" - } - - cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) - cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) - cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) - ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) - return -} - -// envList returns the value of the given environment variable broken -// into fields, using the default value when the variable is empty. -func envList(key, def string) []string { - v := os.Getenv(key) - if v == "" { - v = def - } - return strings.Fields(v) -} - -// stringList's arguments should be a sequence of string or []string values. -// stringList flattens them into a single []string. -func stringList(args ...interface{}) []string { - var x []string - for _, arg := range args { - switch arg := arg.(type) { - case []string: - x = append(x, arg...) - case string: - x = append(x, arg) - default: - panic("stringList: invalid argument") - } - } - return x -} diff --git a/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go deleted file mode 100644 index de57422..0000000 --- a/vendor/golang.org/x/tools/go/loader/cgo_pkgconfig.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -import ( - "errors" - "fmt" - "go/build" - "os/exec" - "strings" -) - -// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. -func pkgConfig(mode string, pkgs []string) (flags []string, err error) { - cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) - out, err := cmd.CombinedOutput() - if err != nil { - s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) - if len(out) > 0 { - s = fmt.Sprintf("%s: %s", s, out) - } - return nil, errors.New(s) - } - if len(out) > 0 { - flags = strings.Fields(string(out)) - } - return -} - -// pkgConfigFlags calls pkg-config if needed and returns the cflags -// needed to build the package. -func pkgConfigFlags(p *build.Package) (cflags []string, err error) { - if len(p.CgoPkgConfig) == 0 { - return nil, nil - } - return pkgConfig("--cflags", p.CgoPkgConfig) -} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go deleted file mode 100644 index 9b51c9e..0000000 --- a/vendor/golang.org/x/tools/go/loader/doc.go +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package loader loads a complete Go program from source code, parsing -// and type-checking the initial packages plus their transitive closure -// of dependencies. The ASTs and the derived facts are retained for -// later use. -// -// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. -// -// The package defines two primary types: Config, which specifies a -// set of initial packages to load and various other options; and -// Program, which is the result of successfully loading the packages -// specified by a configuration. -// -// The configuration can be set directly, but *Config provides various -// convenience methods to simplify the common cases, each of which can -// be called any number of times. Finally, these are followed by a -// call to Load() to actually load and type-check the program. -// -// var conf loader.Config -// -// // Use the command-line arguments to specify -// // a set of initial packages to load from source. -// // See FromArgsUsage for help. -// rest, err := conf.FromArgs(os.Args[1:], wantTests) -// -// // Parse the specified files and create an ad hoc package with path "foo". -// // All files must have the same 'package' declaration. -// conf.CreateFromFilenames("foo", "foo.go", "bar.go") -// -// // Create an ad hoc package with path "foo" from -// // the specified already-parsed files. -// // All ASTs must have the same 'package' declaration. -// conf.CreateFromFiles("foo", parsedFiles) -// -// // Add "runtime" to the set of packages to be loaded. -// conf.Import("runtime") -// -// // Adds "fmt" and "fmt_test" to the set of packages -// // to be loaded. "fmt" will include *_test.go files. -// conf.ImportWithTests("fmt") -// -// // Finally, load all the packages specified by the configuration. -// prog, err := conf.Load() -// -// See examples_test.go for examples of API usage. -// -// -// CONCEPTS AND TERMINOLOGY -// -// The WORKSPACE is the set of packages accessible to the loader. The -// workspace is defined by Config.Build, a *build.Context. The -// default context treats subdirectories of $GOROOT and $GOPATH as -// packages, but this behavior may be overridden. -// -// An AD HOC package is one specified as a set of source files on the -// command line. In the simplest case, it may consist of a single file -// such as $GOROOT/src/net/http/triv.go. -// -// EXTERNAL TEST packages are those comprised of a set of *_test.go -// files all with the same 'package foo_test' declaration, all in the -// same directory. (go/build.Package calls these files XTestFiles.) -// -// An IMPORTABLE package is one that can be referred to by some import -// spec. Every importable package is uniquely identified by its -// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", -// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path -// typically denotes a subdirectory of the workspace. -// -// An import declaration uses an IMPORT PATH to refer to a package. -// Most import declarations use the package path as the import path. -// -// Due to VENDORING (https://golang.org/s/go15vendor), the -// interpretation of an import path may depend on the directory in which -// it appears. To resolve an import path to a package path, go/build -// must search the enclosing directories for a subdirectory named -// "vendor". -// -// ad hoc packages and external test packages are NON-IMPORTABLE. The -// path of an ad hoc package is inferred from the package -// declarations of its files and is therefore not a unique package key. -// For example, Config.CreatePkgs may specify two initial ad hoc -// packages, both with path "main". -// -// An AUGMENTED package is an importable package P plus all the -// *_test.go files with same 'package foo' declaration as P. -// (go/build.Package calls these files TestFiles.) -// -// The INITIAL packages are those specified in the configuration. A -// DEPENDENCY is a package loaded to satisfy an import in an initial -// package or another dependency. -// -package loader - -// IMPLEMENTATION NOTES -// -// 'go test', in-package test files, and import cycles -// --------------------------------------------------- -// -// An external test package may depend upon members of the augmented -// package that are not in the unaugmented package, such as functions -// that expose internals. (See bufio/export_test.go for an example.) -// So, the loader must ensure that for each external test package -// it loads, it also augments the corresponding non-test package. -// -// The import graph over n unaugmented packages must be acyclic; the -// import graph over n-1 unaugmented packages plus one augmented -// package must also be acyclic. ('go test' relies on this.) But the -// import graph over n augmented packages may contain cycles. -// -// First, all the (unaugmented) non-test packages and their -// dependencies are imported in the usual way; the loader reports an -// error if it detects an import cycle. -// -// Then, each package P for which testing is desired is augmented by -// the list P' of its in-package test files, by calling -// (*types.Checker).Files. This arrangement ensures that P' may -// reference definitions within P, but P may not reference definitions -// within P'. Furthermore, P' may import any other package, including -// ones that depend upon P, without an import cycle error. -// -// Consider two packages A and B, both of which have lists of -// in-package test files we'll call A' and B', and which have the -// following import graph edges: -// B imports A -// B' imports A -// A' imports B -// This last edge would be expected to create an error were it not -// for the special type-checking discipline above. -// Cycles of size greater than two are possible. For example: -// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" -// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" -// regexp/exec_test.go (package regexp) imports "compress/bzip2" -// -// -// Concurrency -// ----------- -// -// Let us define the import dependency graph as follows. Each node is a -// list of files passed to (Checker).Files at once. Many of these lists -// are the production code of an importable Go package, so those nodes -// are labelled by the package's path. The remaining nodes are -// ad hoc packages and lists of in-package *_test.go files that augment -// an importable package; those nodes have no label. -// -// The edges of the graph represent import statements appearing within a -// file. An edge connects a node (a list of files) to the node it -// imports, which is importable and thus always labelled. -// -// Loading is controlled by this dependency graph. -// -// To reduce I/O latency, we start loading a package's dependencies -// asynchronously as soon as we've parsed its files and enumerated its -// imports (scanImports). This performs a preorder traversal of the -// import dependency graph. -// -// To exploit hardware parallelism, we type-check unrelated packages in -// parallel, where "unrelated" means not ordered by the partial order of -// the import dependency graph. -// -// We use a concurrency-safe non-blocking cache (importer.imported) to -// record the results of type-checking, whether success or failure. An -// entry is created in this cache by startLoad the first time the -// package is imported. The first goroutine to request an entry becomes -// responsible for completing the task and broadcasting completion to -// subsequent requestors, which block until then. -// -// Type checking occurs in (parallel) postorder: we cannot type-check a -// set of files until we have loaded and type-checked all of their -// immediate dependencies (and thus all of their transitive -// dependencies). If the input were guaranteed free of import cycles, -// this would be trivial: we could simply wait for completion of the -// dependencies and then invoke the typechecker. -// -// But as we saw in the 'go test' section above, some cycles in the -// import graph over packages are actually legal, so long as the -// cycle-forming edge originates in the in-package test files that -// augment the package. This explains why the nodes of the import -// dependency graph are not packages, but lists of files: the unlabelled -// nodes avoid the cycles. Consider packages A and B where B imports A -// and A's in-package tests AT import B. The naively constructed import -// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but -// the graph over lists of files is AT --> B --> A, where AT is an -// unlabelled node. -// -// Awaiting completion of the dependencies in a cyclic graph would -// deadlock, so we must materialize the import dependency graph (as -// importer.graph) and check whether each import edge forms a cycle. If -// x imports y, and the graph already contains a path from y to x, then -// there is an import cycle, in which case the processing of x must not -// wait for the completion of processing of y. -// -// When the type-checker makes a callback (doImport) to the loader for a -// given import edge, there are two possible cases. In the normal case, -// the dependency has already been completely type-checked; doImport -// does a cache lookup and returns it. In the cyclic case, the entry in -// the cache is still necessarily incomplete, indicating a cycle. We -// perform the cycle check again to obtain the error message, and return -// the error. -// -// The result of using concurrency is about a 2.5x speedup for stdlib_test. - -// TODO(adonovan): overhaul the package documentation. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go deleted file mode 100644 index de756f7..0000000 --- a/vendor/golang.org/x/tools/go/loader/loader.go +++ /dev/null @@ -1,1077 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -// See doc.go for package documentation and implementation notes. - -import ( - "errors" - "fmt" - "go/ast" - "go/build" - "go/parser" - "go/token" - "go/types" - "os" - "path/filepath" - "sort" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/ast/astutil" -) - -var ignoreVendor build.ImportMode - -const trace = false // show timing info for type-checking - -// Config specifies the configuration for loading a whole program from -// Go source code. -// The zero value for Config is a ready-to-use default configuration. -type Config struct { - // Fset is the file set for the parser to use when loading the - // program. If nil, it may be lazily initialized by any - // method of Config. - Fset *token.FileSet - - // ParserMode specifies the mode to be used by the parser when - // loading source packages. - ParserMode parser.Mode - - // TypeChecker contains options relating to the type checker. - // - // The supplied IgnoreFuncBodies is not used; the effective - // value comes from the TypeCheckFuncBodies func below. - // The supplied Import function is not used either. - TypeChecker types.Config - - // TypeCheckFuncBodies is a predicate over package paths. - // A package for which the predicate is false will - // have its package-level declarations type checked, but not - // its function bodies; this can be used to quickly load - // dependencies from source. If nil, all func bodies are type - // checked. - TypeCheckFuncBodies func(path string) bool - - // If Build is non-nil, it is used to locate source packages. - // Otherwise &build.Default is used. - // - // By default, cgo is invoked to preprocess Go files that - // import the fake package "C". This behaviour can be - // disabled by setting CGO_ENABLED=0 in the environment prior - // to startup, or by setting Build.CgoEnabled=false. - Build *build.Context - - // The current directory, used for resolving relative package - // references such as "./go/loader". If empty, os.Getwd will be - // used instead. - Cwd string - - // If DisplayPath is non-nil, it is used to transform each - // file name obtained from Build.Import(). This can be used - // to prevent a virtualized build.Config's file names from - // leaking into the user interface. - DisplayPath func(path string) string - - // If AllowErrors is true, Load will return a Program even - // if some of the its packages contained I/O, parser or type - // errors; such errors are accessible via PackageInfo.Errors. If - // false, Load will fail if any package had an error. - AllowErrors bool - - // CreatePkgs specifies a list of non-importable initial - // packages to create. The resulting packages will appear in - // the corresponding elements of the Program.Created slice. - CreatePkgs []PkgSpec - - // ImportPkgs specifies a set of initial packages to load. - // The map keys are package paths. - // - // The map value indicates whether to load tests. If true, Load - // will add and type-check two lists of files to the package: - // non-test files followed by in-package *_test.go files. In - // addition, it will append the external test package (if any) - // to Program.Created. - ImportPkgs map[string]bool - - // FindPackage is called during Load to create the build.Package - // for a given import path from a given directory. - // If FindPackage is nil, (*build.Context).Import is used. - // A client may use this hook to adapt to a proprietary build - // system that does not follow the "go build" layout - // conventions, for example. - // - // It must be safe to call concurrently from multiple goroutines. - FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) - - // AfterTypeCheck is called immediately after a list of files - // has been type-checked and appended to info.Files. - // - // This optional hook function is the earliest opportunity for - // the client to observe the output of the type checker, - // which may be useful to reduce analysis latency when loading - // a large program. - // - // The function is permitted to modify info.Info, for instance - // to clear data structures that are no longer needed, which can - // dramatically reduce peak memory consumption. - // - // The function may be called twice for the same PackageInfo: - // once for the files of the package and again for the - // in-package test files. - // - // It must be safe to call concurrently from multiple goroutines. - AfterTypeCheck func(info *PackageInfo, files []*ast.File) -} - -// A PkgSpec specifies a non-importable package to be created by Load. -// Files are processed first, but typically only one of Files and -// Filenames is provided. The path needn't be globally unique. -// -// For vendoring purposes, the package's directory is the one that -// contains the first file. -type PkgSpec struct { - Path string // package path ("" => use package declaration) - Files []*ast.File // ASTs of already-parsed files - Filenames []string // names of files to be parsed -} - -// A Program is a Go program loaded from source as specified by a Config. -type Program struct { - Fset *token.FileSet // the file set for this program - - // Created[i] contains the initial package whose ASTs or - // filenames were supplied by Config.CreatePkgs[i], followed by - // the external test package, if any, of each package in - // Config.ImportPkgs ordered by ImportPath. - // - // NOTE: these files must not import "C". Cgo preprocessing is - // only performed on imported packages, not ad hoc packages. - // - // TODO(adonovan): we need to copy and adapt the logic of - // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make - // Config.Import and Config.Create methods return the same kind - // of entity, essentially a build.Package. - // Perhaps we can even reuse that type directly. - Created []*PackageInfo - - // Imported contains the initially imported packages, - // as specified by Config.ImportPkgs. - Imported map[string]*PackageInfo - - // AllPackages contains the PackageInfo of every package - // encountered by Load: all initial packages and all - // dependencies, including incomplete ones. - AllPackages map[*types.Package]*PackageInfo - - // importMap is the canonical mapping of package paths to - // packages. It contains all Imported initial packages, but not - // Created ones, and all imported dependencies. - importMap map[string]*types.Package -} - -// PackageInfo holds the ASTs and facts derived by the type-checker -// for a single package. -// -// Not mutated once exposed via the API. -// -type PackageInfo struct { - Pkg *types.Package - Importable bool // true if 'import "Pkg.Path()"' would resolve to this - TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors - Files []*ast.File // syntax trees for the package's files - Errors []error // non-nil if the package had errors - types.Info // type-checker deductions. - dir string // package directory - - checker *types.Checker // transient type-checker state - errorFunc func(error) -} - -func (info *PackageInfo) String() string { return info.Pkg.Path() } - -func (info *PackageInfo) appendError(err error) { - if info.errorFunc != nil { - info.errorFunc(err) - } else { - fmt.Fprintln(os.Stderr, err) - } - info.Errors = append(info.Errors, err) -} - -func (conf *Config) fset() *token.FileSet { - if conf.Fset == nil { - conf.Fset = token.NewFileSet() - } - return conf.Fset -} - -// ParseFile is a convenience function (intended for testing) that invokes -// the parser using the Config's FileSet, which is initialized if nil. -// -// src specifies the parser input as a string, []byte, or io.Reader, and -// filename is its apparent name. If src is nil, the contents of -// filename are read from the file system. -// -func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { - // TODO(adonovan): use conf.build() etc like parseFiles does. - return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) -} - -// FromArgsUsage is a partial usage message that applications calling -// FromArgs may wish to include in their -help output. -const FromArgsUsage = ` - is a list of arguments denoting a set of initial packages. -It may take one of two forms: - -1. A list of *.go source files. - - All of the specified files are loaded, parsed and type-checked - as a single package. All the files must belong to the same directory. - -2. A list of import paths, each denoting a package. - - The package's directory is found relative to the $GOROOT and - $GOPATH using similar logic to 'go build', and the *.go files in - that directory are loaded, parsed and type-checked as a single - package. - - In addition, all *_test.go files in the directory are then loaded - and parsed. Those files whose package declaration equals that of - the non-*_test.go files are included in the primary package. Test - files whose package declaration ends with "_test" are type-checked - as another package, the 'external' test package, so that a single - import path may denote two packages. (Whether this behaviour is - enabled is tool-specific, and may depend on additional flags.) - -A '--' argument terminates the list of packages. -` - -// FromArgs interprets args as a set of initial packages to load from -// source and updates the configuration. It returns the list of -// unconsumed arguments. -// -// It is intended for use in command-line interfaces that require a -// set of initial packages to be specified; see FromArgsUsage message -// for details. -// -// Only superficial errors are reported at this stage; errors dependent -// on I/O are detected during Load. -// -func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { - var rest []string - for i, arg := range args { - if arg == "--" { - rest = args[i+1:] - args = args[:i] - break // consume "--" and return the remaining args - } - } - - if len(args) > 0 && strings.HasSuffix(args[0], ".go") { - // Assume args is a list of a *.go files - // denoting a single ad hoc package. - for _, arg := range args { - if !strings.HasSuffix(arg, ".go") { - return nil, fmt.Errorf("named files must be .go files: %s", arg) - } - } - conf.CreateFromFilenames("", args...) - } else { - // Assume args are directories each denoting a - // package and (perhaps) an external test, iff xtest. - for _, arg := range args { - if xtest { - conf.ImportWithTests(arg) - } else { - conf.Import(arg) - } - } - } - - return rest, nil -} - -// CreateFromFilenames is a convenience function that adds -// a conf.CreatePkgs entry to create a package of the specified *.go -// files. -// -func (conf *Config) CreateFromFilenames(path string, filenames ...string) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) -} - -// CreateFromFiles is a convenience function that adds a conf.CreatePkgs -// entry to create package of the specified path and parsed files. -// -func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { - conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) -} - -// ImportWithTests is a convenience function that adds path to -// ImportPkgs, the set of initial source packages located relative to -// $GOPATH. The package will be augmented by any *_test.go files in -// its directory that contain a "package x" (not "package x_test") -// declaration. -// -// In addition, if any *_test.go files contain a "package x_test" -// declaration, an additional package comprising just those files will -// be added to CreatePkgs. -// -func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } - -// Import is a convenience function that adds path to ImportPkgs, the -// set of initial packages that will be imported from source. -// -func (conf *Config) Import(path string) { conf.addImport(path, false) } - -func (conf *Config) addImport(path string, tests bool) { - if path == "C" { - return // ignore; not a real package - } - if conf.ImportPkgs == nil { - conf.ImportPkgs = make(map[string]bool) - } - conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests -} - -// PathEnclosingInterval returns the PackageInfo and ast.Node that -// contain source interval [start, end), and all the node's ancestors -// up to the AST root. It searches all ast.Files of all packages in prog. -// exact is defined as for astutil.PathEnclosingInterval. -// -// The zero value is returned if not found. -// -func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { - for _, info := range prog.AllPackages { - for _, f := range info.Files { - if f.Pos() == token.NoPos { - // This can happen if the parser saw - // too many errors and bailed out. - // (Use parser.AllErrors to prevent that.) - continue - } - if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { - continue - } - if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { - return info, path, exact - } - } - } - return nil, nil, false -} - -// InitialPackages returns a new slice containing the set of initial -// packages (Created + Imported) in unspecified order. -// -func (prog *Program) InitialPackages() []*PackageInfo { - infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) - infos = append(infos, prog.Created...) - for _, info := range prog.Imported { - infos = append(infos, info) - } - return infos -} - -// Package returns the ASTs and results of type checking for the -// specified package. -func (prog *Program) Package(path string) *PackageInfo { - if info, ok := prog.AllPackages[prog.importMap[path]]; ok { - return info - } - for _, info := range prog.Created { - if path == info.Pkg.Path() { - return info - } - } - return nil -} - -// ---------- Implementation ---------- - -// importer holds the working state of the algorithm. -type importer struct { - conf *Config // the client configuration - start time.Time // for logging - - progMu sync.Mutex // guards prog - prog *Program // the resulting program - - // findpkg is a memoization of FindPackage. - findpkgMu sync.Mutex // guards findpkg - findpkg map[findpkgKey]*findpkgValue - - importedMu sync.Mutex // guards imported - imported map[string]*importInfo // all imported packages (incl. failures) by import path - - // import dependency graph: graph[x][y] => x imports y - // - // Since non-importable packages cannot be cyclic, we ignore - // their imports, thus we only need the subgraph over importable - // packages. Nodes are identified by their import paths. - graphMu sync.Mutex - graph map[string]map[string]bool -} - -type findpkgKey struct { - importPath string - fromDir string - mode build.ImportMode -} - -type findpkgValue struct { - ready chan struct{} // closed to broadcast readiness - bp *build.Package - err error -} - -// importInfo tracks the success or failure of a single import. -// -// Upon completion, exactly one of info and err is non-nil: -// info on successful creation of a package, err otherwise. -// A successful package may still contain type errors. -// -type importInfo struct { - path string // import path - info *PackageInfo // results of typechecking (including errors) - complete chan struct{} // closed to broadcast that info is set. -} - -// awaitCompletion blocks until ii is complete, -// i.e. the info field is safe to inspect. -func (ii *importInfo) awaitCompletion() { - <-ii.complete // wait for close -} - -// Complete marks ii as complete. -// Its info and err fields will not be subsequently updated. -func (ii *importInfo) Complete(info *PackageInfo) { - if info == nil { - panic("info == nil") - } - ii.info = info - close(ii.complete) -} - -type importError struct { - path string // import path - err error // reason for failure to create a package -} - -// Load creates the initial packages specified by conf.{Create,Import}Pkgs, -// loading their dependencies packages as needed. -// -// On success, Load returns a Program containing a PackageInfo for -// each package. On failure, it returns an error. -// -// If AllowErrors is true, Load will return a Program even if some -// packages contained I/O, parser or type errors, or if dependencies -// were missing. (Such errors are accessible via PackageInfo.Errors. If -// false, Load will fail if any package had an error. -// -// It is an error if no packages were loaded. -// -func (conf *Config) Load() (*Program, error) { - // Create a simple default error handler for parse/type errors. - if conf.TypeChecker.Error == nil { - conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } - } - - // Set default working directory for relative package references. - if conf.Cwd == "" { - var err error - conf.Cwd, err = os.Getwd() - if err != nil { - return nil, err - } - } - - // Install default FindPackage hook using go/build logic. - if conf.FindPackage == nil { - conf.FindPackage = (*build.Context).Import - } - - prog := &Program{ - Fset: conf.fset(), - Imported: make(map[string]*PackageInfo), - importMap: make(map[string]*types.Package), - AllPackages: make(map[*types.Package]*PackageInfo), - } - - imp := importer{ - conf: conf, - prog: prog, - findpkg: make(map[findpkgKey]*findpkgValue), - imported: make(map[string]*importInfo), - start: time.Now(), - graph: make(map[string]map[string]bool), - } - - // -- loading proper (concurrent phase) -------------------------------- - - var errpkgs []string // packages that contained errors - - // Load the initially imported packages and their dependencies, - // in parallel. - // No vendor check on packages imported from the command line. - infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) - for _, ie := range importErrors { - conf.TypeChecker.Error(ie.err) // failed to create package - errpkgs = append(errpkgs, ie.path) - } - for _, info := range infos { - prog.Imported[info.Pkg.Path()] = info - } - - // Augment the designated initial packages by their tests. - // Dependencies are loaded in parallel. - var xtestPkgs []*build.Package - for importPath, augment := range conf.ImportPkgs { - if !augment { - continue - } - - // No vendor check on packages imported from command line. - bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) - if err != nil { - // Package not found, or can't even parse package declaration. - // Already reported by previous loop; ignore it. - continue - } - - // Needs external test package? - if len(bp.XTestGoFiles) > 0 { - xtestPkgs = append(xtestPkgs, bp) - } - - // Consult the cache using the canonical package path. - path := bp.ImportPath - imp.importedMu.Lock() // (unnecessary, we're sequential here) - ii, ok := imp.imported[path] - // Paranoid checks added due to issue #11012. - if !ok { - // Unreachable. - // The previous loop called importAll and thus - // startLoad for each path in ImportPkgs, which - // populates imp.imported[path] with a non-zero value. - panic(fmt.Sprintf("imported[%q] not found", path)) - } - if ii == nil { - // Unreachable. - // The ii values in this loop are the same as in - // the previous loop, which enforced the invariant - // that at least one of ii.err and ii.info is non-nil. - panic(fmt.Sprintf("imported[%q] == nil", path)) - } - if ii.info == nil { - // Unreachable. - // awaitCompletion has the postcondition - // ii.info != nil. - panic(fmt.Sprintf("imported[%q].info = nil", path)) - } - info := ii.info - imp.importedMu.Unlock() - - // Parse the in-package test files. - files, errs := imp.conf.parsePackageFiles(bp, 't') - for _, err := range errs { - info.appendError(err) - } - - // The test files augmenting package P cannot be imported, - // but may import packages that import P, - // so we must disable the cycle check. - imp.addFiles(info, files, false) - } - - createPkg := func(path, dir string, files []*ast.File, errs []error) { - info := imp.newPackageInfo(path, dir) - for _, err := range errs { - info.appendError(err) - } - - // Ad hoc packages are non-importable, - // so no cycle check is needed. - // addFiles loads dependencies in parallel. - imp.addFiles(info, files, false) - prog.Created = append(prog.Created, info) - } - - // Create packages specified by conf.CreatePkgs. - for _, cp := range conf.CreatePkgs { - files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) - files = append(files, cp.Files...) - - path := cp.Path - if path == "" { - if len(files) > 0 { - path = files[0].Name.Name - } else { - path = "(unnamed)" - } - } - - dir := conf.Cwd - if len(files) > 0 && files[0].Pos().IsValid() { - dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) - } - createPkg(path, dir, files, errs) - } - - // Create external test packages. - sort.Sort(byImportPath(xtestPkgs)) - for _, bp := range xtestPkgs { - files, errs := imp.conf.parsePackageFiles(bp, 'x') - createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) - } - - // -- finishing up (sequential) ---------------------------------------- - - if len(prog.Imported)+len(prog.Created) == 0 { - return nil, errors.New("no initial packages were loaded") - } - - // Create infos for indirectly imported packages. - // e.g. incomplete packages without syntax, loaded from export data. - for _, obj := range prog.importMap { - info := prog.AllPackages[obj] - if info == nil { - prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} - } else { - // finished - info.checker = nil - info.errorFunc = nil - } - } - - if !conf.AllowErrors { - // Report errors in indirectly imported packages. - for _, info := range prog.AllPackages { - if len(info.Errors) > 0 { - errpkgs = append(errpkgs, info.Pkg.Path()) - } - } - if errpkgs != nil { - var more string - if len(errpkgs) > 3 { - more = fmt.Sprintf(" and %d more", len(errpkgs)-3) - errpkgs = errpkgs[:3] - } - return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", - strings.Join(errpkgs, ", "), more) - } - } - - markErrorFreePackages(prog.AllPackages) - - return prog, nil -} - -type byImportPath []*build.Package - -func (b byImportPath) Len() int { return len(b) } -func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } -func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } - -// markErrorFreePackages sets the TransitivelyErrorFree flag on all -// applicable packages. -func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { - // Build the transpose of the import graph. - importedBy := make(map[*types.Package]map[*types.Package]bool) - for P := range allPackages { - for _, Q := range P.Imports() { - clients, ok := importedBy[Q] - if !ok { - clients = make(map[*types.Package]bool) - importedBy[Q] = clients - } - clients[P] = true - } - } - - // Find all packages reachable from some error package. - reachable := make(map[*types.Package]bool) - var visit func(*types.Package) - visit = func(p *types.Package) { - if !reachable[p] { - reachable[p] = true - for q := range importedBy[p] { - visit(q) - } - } - } - for _, info := range allPackages { - if len(info.Errors) > 0 { - visit(info.Pkg) - } - } - - // Mark the others as "transitively error-free". - for _, info := range allPackages { - if !reachable[info.Pkg] { - info.TransitivelyErrorFree = true - } - } -} - -// build returns the effective build context. -func (conf *Config) build() *build.Context { - if conf.Build != nil { - return conf.Build - } - return &build.Default -} - -// parsePackageFiles enumerates the files belonging to package path, -// then loads, parses and returns them, plus a list of I/O or parse -// errors that were encountered. -// -// 'which' indicates which files to include: -// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) -// 't': include in-package *_test.go source files (TestGoFiles) -// 'x': include external *_test.go source files. (XTestGoFiles) -// -func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { - if bp.ImportPath == "unsafe" { - return nil, nil - } - var filenames []string - switch which { - case 'g': - filenames = bp.GoFiles - case 't': - filenames = bp.TestGoFiles - case 'x': - filenames = bp.XTestGoFiles - default: - panic(which) - } - - files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) - - // Preprocess CgoFiles and parse the outputs (sequentially). - if which == 'g' && bp.CgoFiles != nil { - cgofiles, err := processCgoFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) - if err != nil { - errs = append(errs, err) - } else { - files = append(files, cgofiles...) - } - } - - return files, errs -} - -// doImport imports the package denoted by path. -// It implements the types.Importer signature. -// -// It returns an error if a package could not be created -// (e.g. go/build or parse error), but type errors are reported via -// the types.Config.Error callback (the first of which is also saved -// in the package's PackageInfo). -// -// Idempotent. -// -func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { - if to == "C" { - // This should be unreachable, but ad hoc packages are - // not currently subject to cgo preprocessing. - // See https://github.com/golang/go/issues/11627. - return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, - from.Pkg.Path()) - } - - bp, err := imp.findPackage(to, from.dir, 0) - if err != nil { - return nil, err - } - - // The standard unsafe package is handled specially, - // and has no PackageInfo. - if bp.ImportPath == "unsafe" { - return types.Unsafe, nil - } - - // Look for the package in the cache using its canonical path. - path := bp.ImportPath - imp.importedMu.Lock() - ii := imp.imported[path] - imp.importedMu.Unlock() - if ii == nil { - panic("internal error: unexpected import: " + path) - } - if ii.info != nil { - return ii.info.Pkg, nil - } - - // Import of incomplete package: this indicates a cycle. - fromPath := from.Pkg.Path() - if cycle := imp.findPath(path, fromPath); cycle != nil { - cycle = append([]string{fromPath}, cycle...) - return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) - } - - panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) -} - -// findPackage locates the package denoted by the importPath in the -// specified directory. -func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { - // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7) - // to avoid holding the lock around FindPackage. - key := findpkgKey{importPath, fromDir, mode} - imp.findpkgMu.Lock() - v, ok := imp.findpkg[key] - if ok { - // cache hit - imp.findpkgMu.Unlock() - - <-v.ready // wait for entry to become ready - } else { - // Cache miss: this goroutine becomes responsible for - // populating the map entry and broadcasting its readiness. - v = &findpkgValue{ready: make(chan struct{})} - imp.findpkg[key] = v - imp.findpkgMu.Unlock() - - ioLimit <- true - v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) - <-ioLimit - - if _, ok := v.err.(*build.NoGoError); ok { - v.err = nil // empty directory is not an error - } - - close(v.ready) // broadcast ready condition - } - return v.bp, v.err -} - -// importAll loads, parses, and type-checks the specified packages in -// parallel and returns their completed importInfos in unspecified order. -// -// fromPath is the package path of the importing package, if it is -// importable, "" otherwise. It is used for cycle detection. -// -// fromDir is the directory containing the import declaration that -// caused these imports. -// -func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { - // TODO(adonovan): opt: do the loop in parallel once - // findPackage is non-blocking. - var pending []*importInfo - for importPath := range imports { - bp, err := imp.findPackage(importPath, fromDir, mode) - if err != nil { - errors = append(errors, importError{ - path: importPath, - err: err, - }) - continue - } - pending = append(pending, imp.startLoad(bp)) - } - - if fromPath != "" { - // We're loading a set of imports. - // - // We must record graph edges from the importing package - // to its dependencies, and check for cycles. - imp.graphMu.Lock() - deps, ok := imp.graph[fromPath] - if !ok { - deps = make(map[string]bool) - imp.graph[fromPath] = deps - } - for _, ii := range pending { - deps[ii.path] = true - } - imp.graphMu.Unlock() - } - - for _, ii := range pending { - if fromPath != "" { - if cycle := imp.findPath(ii.path, fromPath); cycle != nil { - // Cycle-forming import: we must not await its - // completion since it would deadlock. - // - // We don't record the error in ii since - // the error is really associated with the - // cycle-forming edge, not the package itself. - // (Also it would complicate the - // invariants of importPath completion.) - if trace { - fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) - } - continue - } - } - ii.awaitCompletion() - infos = append(infos, ii.info) - } - - return infos, errors -} - -// findPath returns an arbitrary path from 'from' to 'to' in the import -// graph, or nil if there was none. -func (imp *importer) findPath(from, to string) []string { - imp.graphMu.Lock() - defer imp.graphMu.Unlock() - - seen := make(map[string]bool) - var search func(stack []string, importPath string) []string - search = func(stack []string, importPath string) []string { - if !seen[importPath] { - seen[importPath] = true - stack = append(stack, importPath) - if importPath == to { - return stack - } - for x := range imp.graph[importPath] { - if p := search(stack, x); p != nil { - return p - } - } - } - return nil - } - return search(make([]string, 0, 20), from) -} - -// startLoad initiates the loading, parsing and type-checking of the -// specified package and its dependencies, if it has not already begun. -// -// It returns an importInfo, not necessarily in a completed state. The -// caller must call awaitCompletion() before accessing its info field. -// -// startLoad is concurrency-safe and idempotent. -// -func (imp *importer) startLoad(bp *build.Package) *importInfo { - path := bp.ImportPath - imp.importedMu.Lock() - ii, ok := imp.imported[path] - if !ok { - ii = &importInfo{path: path, complete: make(chan struct{})} - imp.imported[path] = ii - go func() { - info := imp.load(bp) - ii.Complete(info) - }() - } - imp.importedMu.Unlock() - - return ii -} - -// load implements package loading by parsing Go source files -// located by go/build. -func (imp *importer) load(bp *build.Package) *PackageInfo { - info := imp.newPackageInfo(bp.ImportPath, bp.Dir) - info.Importable = true - files, errs := imp.conf.parsePackageFiles(bp, 'g') - for _, err := range errs { - info.appendError(err) - } - - imp.addFiles(info, files, true) - - imp.progMu.Lock() - imp.prog.importMap[bp.ImportPath] = info.Pkg - imp.progMu.Unlock() - - return info -} - -// addFiles adds and type-checks the specified files to info, loading -// their dependencies if needed. The order of files determines the -// package initialization order. It may be called multiple times on the -// same package. Errors are appended to the info.Errors field. -// -// cycleCheck determines whether the imports within files create -// dependency edges that should be checked for potential cycles. -// -func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { - // Ensure the dependencies are loaded, in parallel. - var fromPath string - if cycleCheck { - fromPath = info.Pkg.Path() - } - // TODO(adonovan): opt: make the caller do scanImports. - // Callers with a build.Package can skip it. - imp.importAll(fromPath, info.dir, scanImports(files), 0) - - if trace { - fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", - time.Since(imp.start), info.Pkg.Path(), len(files)) - } - - // Don't call checker.Files on Unsafe, even with zero files, - // because it would mutate the package, which is a global. - if info.Pkg == types.Unsafe { - if len(files) > 0 { - panic(`"unsafe" package contains unexpected files`) - } - } else { - // Ignore the returned (first) error since we - // already collect them all in the PackageInfo. - info.checker.Files(files) - info.Files = append(info.Files, files...) - } - - if imp.conf.AfterTypeCheck != nil { - imp.conf.AfterTypeCheck(info, files) - } - - if trace { - fmt.Fprintf(os.Stderr, "%s: stop %q\n", - time.Since(imp.start), info.Pkg.Path()) - } -} - -func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { - var pkg *types.Package - if path == "unsafe" { - pkg = types.Unsafe - } else { - pkg = types.NewPackage(path, "") - } - info := &PackageInfo{ - Pkg: pkg, - Info: types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - }, - errorFunc: imp.conf.TypeChecker.Error, - dir: dir, - } - - // Copy the types.Config so we can vary it across PackageInfos. - tc := imp.conf.TypeChecker - tc.IgnoreFuncBodies = false - if f := imp.conf.TypeCheckFuncBodies; f != nil { - tc.IgnoreFuncBodies = !f(path) - } - tc.Importer = closure{imp, info} - tc.Error = info.appendError // appendError wraps the user's Error function - - info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) - imp.progMu.Lock() - imp.prog.AllPackages[pkg] = info - imp.progMu.Unlock() - return info -} - -type closure struct { - imp *importer - info *PackageInfo -} - -func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go deleted file mode 100644 index 7f38dd7..0000000 --- a/vendor/golang.org/x/tools/go/loader/util.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package loader - -import ( - "go/ast" - "go/build" - "go/parser" - "go/token" - "io" - "os" - "strconv" - "sync" - - "golang.org/x/tools/go/buildutil" -) - -// We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 10) - -// parseFiles parses the Go source files within directory dir and -// returns the ASTs of the ones that could be at least partially parsed, -// along with a list of I/O and parse errors encountered. -// -// I/O is done via ctxt, which may specify a virtual file system. -// displayPath is used to transform the filenames attached to the ASTs. -// -func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { - if displayPath == nil { - displayPath = func(path string) string { return path } - } - var wg sync.WaitGroup - n := len(files) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range files { - if !buildutil.IsAbsPath(ctxt, file) { - file = buildutil.JoinPath(ctxt, dir, file) - } - wg.Add(1) - go func(i int, file string) { - ioLimit <- true // wait - defer func() { - wg.Done() - <-ioLimit // signal - }() - var rd io.ReadCloser - var err error - if ctxt.OpenFile != nil { - rd, err = ctxt.OpenFile(file) - } else { - rd, err = os.Open(file) - } - if err != nil { - errors[i] = err // open failed - return - } - - // ParseFile may return both an AST and an error. - parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) - rd.Close() - }(i, file) - } - wg.Wait() - - // Eliminate nils, preserving order. - var o int - for _, f := range parsed { - if f != nil { - parsed[o] = f - o++ - } - } - parsed = parsed[:o] - - o = 0 - for _, err := range errors { - if err != nil { - errors[o] = err - o++ - } - } - errors = errors[:o] - - return parsed, errors -} - -// scanImports returns the set of all import paths from all -// import specs in the specified files. -func scanImports(files []*ast.File) map[string]bool { - imports := make(map[string]bool) - for _, f := range files { - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - for _, spec := range decl.Specs { - spec := spec.(*ast.ImportSpec) - - // NB: do not assume the program is well-formed! - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - continue // quietly ignore the error - } - if path == "C" { - continue // skip pseudopackage - } - imports[path] = true - } - } - } - } - return imports -} - -// ---------- Internal helpers ---------- - -// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) -func tokenFileContainsPos(f *token.File, pos token.Pos) bool { - p := int(pos) - base := f.Base() - return base <= p && p < base+f.Size() -} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go deleted file mode 100644 index 9c441db..0000000 --- a/vendor/golang.org/x/tools/go/types/typeutil/imports.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeutil - -import "go/types" - -// Dependencies returns all dependencies of the specified packages. -// -// Dependent packages appear in topological order: if package P imports -// package Q, Q appears earlier than P in the result. -// The algorithm follows import statements in the order they -// appear in the source code, so the result is a total order. -// -func Dependencies(pkgs ...*types.Package) []*types.Package { - var result []*types.Package - seen := make(map[*types.Package]bool) - var visit func(pkgs []*types.Package) - visit = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !seen[p] { - seen[p] = true - visit(p.Imports()) - result = append(result, p) - } - } - } - visit(pkgs) - return result -} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go deleted file mode 100644 index c7f7545..0000000 --- a/vendor/golang.org/x/tools/go/types/typeutil/map.go +++ /dev/null @@ -1,313 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typeutil defines various utilities for types, such as Map, -// a mapping from types.Type to interface{} values. -package typeutil // import "golang.org/x/tools/go/types/typeutil" - -import ( - "bytes" - "fmt" - "go/types" - "reflect" -) - -// Map is a hash-table-based mapping from types (types.Type) to -// arbitrary interface{} values. The concrete types that implement -// the Type interface are pointers. Since they are not canonicalized, -// == cannot be used to check for equivalence, and thus we cannot -// simply use a Go map. -// -// Just as with map[K]V, a nil *Map is a valid empty map. -// -// Not thread-safe. -// -type Map struct { - hasher Hasher // shared by many Maps - table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused - length int // number of map entries -} - -// entry is an entry (key/value association) in a hash bucket. -type entry struct { - key types.Type - value interface{} -} - -// SetHasher sets the hasher used by Map. -// -// All Hashers are functionally equivalent but contain internal state -// used to cache the results of hashing previously seen types. -// -// A single Hasher created by MakeHasher() may be shared among many -// Maps. This is recommended if the instances have many keys in -// common, as it will amortize the cost of hash computation. -// -// A Hasher may grow without bound as new types are seen. Even when a -// type is deleted from the map, the Hasher never shrinks, since other -// types in the map may reference the deleted type indirectly. -// -// Hashers are not thread-safe, and read-only operations such as -// Map.Lookup require updates to the hasher, so a full Mutex lock (not a -// read-lock) is require around all Map operations if a shared -// hasher is accessed from multiple threads. -// -// If SetHasher is not called, the Map will create a private hasher at -// the first call to Insert. -// -func (m *Map) SetHasher(hasher Hasher) { - m.hasher = hasher -} - -// Delete removes the entry with the given key, if any. -// It returns true if the entry was found. -// -func (m *Map) Delete(key types.Type) bool { - if m != nil && m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - for i, e := range bucket { - if e.key != nil && types.Identical(key, e.key) { - // We can't compact the bucket as it - // would disturb iterators. - bucket[i] = entry{} - m.length-- - return true - } - } - } - return false -} - -// At returns the map entry for the given key. -// The result is nil if the entry is not present. -// -func (m *Map) At(key types.Type) interface{} { - if m != nil && m.table != nil { - for _, e := range m.table[m.hasher.Hash(key)] { - if e.key != nil && types.Identical(key, e.key) { - return e.value - } - } - } - return nil -} - -// Set sets the map entry for key to val, -// and returns the previous entry, if any. -func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { - if m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - var hole *entry - for i, e := range bucket { - if e.key == nil { - hole = &bucket[i] - } else if types.Identical(key, e.key) { - prev = e.value - bucket[i].value = value - return - } - } - - if hole != nil { - *hole = entry{key, value} // overwrite deleted entry - } else { - m.table[hash] = append(bucket, entry{key, value}) - } - } else { - if m.hasher.memo == nil { - m.hasher = MakeHasher() - } - hash := m.hasher.Hash(key) - m.table = map[uint32][]entry{hash: {entry{key, value}}} - } - - m.length++ - return -} - -// Len returns the number of map entries. -func (m *Map) Len() int { - if m != nil { - return m.length - } - return 0 -} - -// Iterate calls function f on each entry in the map in unspecified order. -// -// If f should mutate the map, Iterate provides the same guarantees as -// Go maps: if f deletes a map entry that Iterate has not yet reached, -// f will not be invoked for it, but if f inserts a map entry that -// Iterate has not yet reached, whether or not f will be invoked for -// it is unspecified. -// -func (m *Map) Iterate(f func(key types.Type, value interface{})) { - if m != nil { - for _, bucket := range m.table { - for _, e := range bucket { - if e.key != nil { - f(e.key, e.value) - } - } - } - } -} - -// Keys returns a new slice containing the set of map keys. -// The order is unspecified. -func (m *Map) Keys() []types.Type { - keys := make([]types.Type, 0, m.Len()) - m.Iterate(func(key types.Type, _ interface{}) { - keys = append(keys, key) - }) - return keys -} - -func (m *Map) toString(values bool) string { - if m == nil { - return "{}" - } - var buf bytes.Buffer - fmt.Fprint(&buf, "{") - sep := "" - m.Iterate(func(key types.Type, value interface{}) { - fmt.Fprint(&buf, sep) - sep = ", " - fmt.Fprint(&buf, key) - if values { - fmt.Fprintf(&buf, ": %q", value) - } - }) - fmt.Fprint(&buf, "}") - return buf.String() -} - -// String returns a string representation of the map's entries. -// Values are printed using fmt.Sprintf("%v", v). -// Order is unspecified. -// -func (m *Map) String() string { - return m.toString(true) -} - -// KeysString returns a string representation of the map's key set. -// Order is unspecified. -// -func (m *Map) KeysString() string { - return m.toString(false) -} - -//////////////////////////////////////////////////////////////////////// -// Hasher - -// A Hasher maps each type to its hash value. -// For efficiency, a hasher uses memoization; thus its memory -// footprint grows monotonically over time. -// Hashers are not thread-safe. -// Hashers have reference semantics. -// Call MakeHasher to create a Hasher. -type Hasher struct { - memo map[types.Type]uint32 -} - -// MakeHasher returns a new Hasher instance. -func MakeHasher() Hasher { - return Hasher{make(map[types.Type]uint32)} -} - -// Hash computes a hash value for the given type t such that -// Identical(t, t') => Hash(t) == Hash(t'). -func (h Hasher) Hash(t types.Type) uint32 { - hash, ok := h.memo[t] - if !ok { - hash = h.hashFor(t) - h.memo[t] = hash - } - return hash -} - -// hashString computes the Fowler–Noll–Vo hash of s. -func hashString(s string) uint32 { - var h uint32 - for i := 0; i < len(s); i++ { - h ^= uint32(s[i]) - h *= 16777619 - } - return h -} - -// hashFor computes the hash of t. -func (h Hasher) hashFor(t types.Type) uint32 { - // See Identical for rationale. - switch t := t.(type) { - case *types.Basic: - return uint32(t.Kind()) - - case *types.Array: - return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) - - case *types.Slice: - return 9049 + 2*h.Hash(t.Elem()) - - case *types.Struct: - var hash uint32 = 9059 - for i, n := 0, t.NumFields(); i < n; i++ { - f := t.Field(i) - if f.Anonymous() { - hash += 8861 - } - hash += hashString(t.Tag(i)) - hash += hashString(f.Name()) // (ignore f.Pkg) - hash += h.Hash(f.Type()) - } - return hash - - case *types.Pointer: - return 9067 + 2*h.Hash(t.Elem()) - - case *types.Signature: - var hash uint32 = 9091 - if t.Variadic() { - hash *= 8863 - } - return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) - - case *types.Interface: - var hash uint32 = 9103 - for i, n := 0, t.NumMethods(); i < n; i++ { - // See go/types.identicalMethods for rationale. - // Method order is not significant. - // Ignore m.Pkg(). - m := t.Method(i) - hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) - } - return hash - - case *types.Map: - return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) - - case *types.Chan: - return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) - - case *types.Named: - // Not safe with a copying GC; objects may move. - return uint32(reflect.ValueOf(t.Obj()).Pointer()) - - case *types.Tuple: - return h.hashTuple(t) - } - panic(t) -} - -func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { - // See go/types.identicalTypes for rationale. - n := tuple.Len() - var hash uint32 = 9137 + 2*uint32(n) - for i := 0; i < n; i++ { - hash += 3 * h.Hash(tuple.At(i).Type()) - } - return hash -} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go deleted file mode 100644 index 3208461..0000000 --- a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go +++ /dev/null @@ -1,72 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file implements a cache of method sets. - -package typeutil - -import ( - "go/types" - "sync" -) - -// A MethodSetCache records the method set of each type T for which -// MethodSet(T) is called so that repeat queries are fast. -// The zero value is a ready-to-use cache instance. -type MethodSetCache struct { - mu sync.Mutex - named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N - others map[types.Type]*types.MethodSet // all other types -} - -// MethodSet returns the method set of type T. It is thread-safe. -// -// If cache is nil, this function is equivalent to types.NewMethodSet(T). -// Utility functions can thus expose an optional *MethodSetCache -// parameter to clients that care about performance. -// -func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { - if cache == nil { - return types.NewMethodSet(T) - } - cache.mu.Lock() - defer cache.mu.Unlock() - - switch T := T.(type) { - case *types.Named: - return cache.lookupNamed(T).value - - case *types.Pointer: - if N, ok := T.Elem().(*types.Named); ok { - return cache.lookupNamed(N).pointer - } - } - - // all other types - // (The map uses pointer equivalence, not type identity.) - mset := cache.others[T] - if mset == nil { - mset = types.NewMethodSet(T) - if cache.others == nil { - cache.others = make(map[types.Type]*types.MethodSet) - } - cache.others[T] = mset - } - return mset -} - -func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } { - if cache.named == nil { - cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet }) - } - // Avoid recomputing mset(*T) for each distinct Pointer - // instance whose underlying type is a named type. - msets, ok := cache.named[named] - if !ok { - msets.value = types.NewMethodSet(named) - msets.pointer = types.NewMethodSet(types.NewPointer(named)) - cache.named[named] = msets - } - return msets -} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go deleted file mode 100644 index 9849c24..0000000 --- a/vendor/golang.org/x/tools/go/types/typeutil/ui.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeutil - -// This file defines utilities for user interfaces that display types. - -import "go/types" - -// IntuitiveMethodSet returns the intuitive method set of a type T, -// which is the set of methods you can call on an addressable value of -// that type. -// -// The result always contains MethodSet(T), and is exactly MethodSet(T) -// for interface types and for pointer-to-concrete types. -// For all other concrete types T, the result additionally -// contains each method belonging to *T if there is no identically -// named method on T itself. -// -// This corresponds to user intuition about method sets; -// this function is intended only for user interfaces. -// -// The order of the result is as for types.MethodSet(T). -// -func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { - isPointerToConcrete := func(T types.Type) bool { - ptr, ok := T.(*types.Pointer) - return ok && !types.IsInterface(ptr.Elem()) - } - - var result []*types.Selection - mset := msets.MethodSet(T) - if types.IsInterface(T) || isPointerToConcrete(T) { - for i, n := 0, mset.Len(); i < n; i++ { - result = append(result, mset.At(i)) - } - } else { - // T is some other concrete type. - // Report methods of T and *T, preferring those of T. - pmset := msets.MethodSet(types.NewPointer(T)) - for i, n := 0, pmset.Len(); i < n; i++ { - meth := pmset.At(i) - if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { - meth = m - } - result = append(result, meth) - } - - } - return result -} diff --git a/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE b/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE deleted file mode 100644 index 1723a22..0000000 --- a/vendor/golang.org/x/tools/third_party/moduleloader/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2013-2016 Guy Bedford, Luke Hoban, Addy Osmani - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/golang.org/x/tools/third_party/typescript/LICENSE b/vendor/golang.org/x/tools/third_party/typescript/LICENSE deleted file mode 100644 index e7259f8..0000000 --- a/vendor/golang.org/x/tools/third_party/typescript/LICENSE +++ /dev/null @@ -1,55 +0,0 @@ -Apache License - -Version 2.0, January 2004 - -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. - -"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: - -You must give any other recipients of the Work or Derivative Works a copy of this License; and - -You must cause any modified files to carry prominent notices stating that You changed the files; and - -You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and - -If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE b/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE deleted file mode 100644 index e648283..0000000 --- a/vendor/golang.org/x/tools/third_party/webcomponents/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015 The Polymer Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/honnef.co/go/tools/LICENSE b/vendor/honnef.co/go/tools/LICENSE deleted file mode 100644 index dfd0314..0000000 --- a/vendor/honnef.co/go/tools/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2016 Dominik Honnef - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/honnef.co/go/tools/callgraph/callgraph.go b/vendor/honnef.co/go/tools/callgraph/callgraph.go deleted file mode 100644 index d93a20a..0000000 --- a/vendor/honnef.co/go/tools/callgraph/callgraph.go +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* - -Package callgraph defines the call graph and various algorithms -and utilities to operate on it. - -A call graph is a labelled directed graph whose nodes represent -functions and whose edge labels represent syntactic function call -sites. The presence of a labelled edge (caller, site, callee) -indicates that caller may call callee at the specified call site. - -A call graph is a multigraph: it may contain multiple edges (caller, -*, callee) connecting the same pair of nodes, so long as the edges -differ by label; this occurs when one function calls another function -from multiple call sites. Also, it may contain multiple edges -(caller, site, *) that differ only by callee; this indicates a -polymorphic call. - -A SOUND call graph is one that overapproximates the dynamic calling -behaviors of the program in all possible executions. One call graph -is more PRECISE than another if it is a smaller overapproximation of -the dynamic behavior. - -All call graphs have a synthetic root node which is responsible for -calling main() and init(). - -Calls to built-in functions (e.g. panic, println) are not represented -in the call graph; they are treated like built-in operators of the -language. - -*/ -package callgraph // import "honnef.co/go/tools/callgraph" - -// TODO(adonovan): add a function to eliminate wrappers from the -// callgraph, preserving topology. -// More generally, we could eliminate "uninteresting" nodes such as -// nodes from packages we don't care about. - -import ( - "fmt" - "go/token" - - "honnef.co/go/tools/ssa" -) - -// A Graph represents a call graph. -// -// A graph may contain nodes that are not reachable from the root. -// If the call graph is sound, such nodes indicate unreachable -// functions. -// -type Graph struct { - Root *Node // the distinguished root node - Nodes map[*ssa.Function]*Node // all nodes by function -} - -// New returns a new Graph with the specified root node. -func New(root *ssa.Function) *Graph { - g := &Graph{Nodes: make(map[*ssa.Function]*Node)} - g.Root = g.CreateNode(root) - return g -} - -// CreateNode returns the Node for fn, creating it if not present. -func (g *Graph) CreateNode(fn *ssa.Function) *Node { - n, ok := g.Nodes[fn] - if !ok { - n = &Node{Func: fn, ID: len(g.Nodes)} - g.Nodes[fn] = n - } - return n -} - -// A Node represents a node in a call graph. -type Node struct { - Func *ssa.Function // the function this node represents - ID int // 0-based sequence number - In []*Edge // unordered set of incoming call edges (n.In[*].Callee == n) - Out []*Edge // unordered set of outgoing call edges (n.Out[*].Caller == n) -} - -func (n *Node) String() string { - return fmt.Sprintf("n%d:%s", n.ID, n.Func) -} - -// A Edge represents an edge in the call graph. -// -// Site is nil for edges originating in synthetic or intrinsic -// functions, e.g. reflect.Call or the root of the call graph. -type Edge struct { - Caller *Node - Site ssa.CallInstruction - Callee *Node -} - -func (e Edge) String() string { - return fmt.Sprintf("%s --> %s", e.Caller, e.Callee) -} - -func (e Edge) Description() string { - var prefix string - switch e.Site.(type) { - case nil: - return "synthetic call" - case *ssa.Go: - prefix = "concurrent " - case *ssa.Defer: - prefix = "deferred " - } - return prefix + e.Site.Common().Description() -} - -func (e Edge) Pos() token.Pos { - if e.Site == nil { - return token.NoPos - } - return e.Site.Pos() -} - -// AddEdge adds the edge (caller, site, callee) to the call graph. -// Elimination of duplicate edges is the caller's responsibility. -func AddEdge(caller *Node, site ssa.CallInstruction, callee *Node) { - e := &Edge{caller, site, callee} - callee.In = append(callee.In, e) - caller.Out = append(caller.Out, e) -} diff --git a/vendor/honnef.co/go/tools/callgraph/static/static.go b/vendor/honnef.co/go/tools/callgraph/static/static.go deleted file mode 100644 index 5444e84..0000000 --- a/vendor/honnef.co/go/tools/callgraph/static/static.go +++ /dev/null @@ -1,35 +0,0 @@ -// Package static computes the call graph of a Go program containing -// only static call edges. -package static // import "honnef.co/go/tools/callgraph/static" - -import ( - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/ssa/ssautil" -) - -// CallGraph computes the call graph of the specified program -// considering only static calls. -// -func CallGraph(prog *ssa.Program) *callgraph.Graph { - cg := callgraph.New(nil) // TODO(adonovan) eliminate concept of rooted callgraph - - // TODO(adonovan): opt: use only a single pass over the ssa.Program. - // TODO(adonovan): opt: this is slower than RTA (perhaps because - // the lower precision means so many edges are allocated)! - for f := range ssautil.AllFunctions(prog) { - fnode := cg.CreateNode(f) - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if site, ok := instr.(ssa.CallInstruction); ok { - if g := site.Common().StaticCallee(); g != nil { - gnode := cg.CreateNode(g) - callgraph.AddEdge(fnode, site, gnode) - } - } - } - } - } - - return cg -} diff --git a/vendor/honnef.co/go/tools/callgraph/util.go b/vendor/honnef.co/go/tools/callgraph/util.go deleted file mode 100644 index 7aeda96..0000000 --- a/vendor/honnef.co/go/tools/callgraph/util.go +++ /dev/null @@ -1,181 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package callgraph - -import "honnef.co/go/tools/ssa" - -// This file provides various utilities over call graphs, such as -// visitation and path search. - -// CalleesOf returns a new set containing all direct callees of the -// caller node. -// -func CalleesOf(caller *Node) map[*Node]bool { - callees := make(map[*Node]bool) - for _, e := range caller.Out { - callees[e.Callee] = true - } - return callees -} - -// GraphVisitEdges visits all the edges in graph g in depth-first order. -// The edge function is called for each edge in postorder. If it -// returns non-nil, visitation stops and GraphVisitEdges returns that -// value. -// -func GraphVisitEdges(g *Graph, edge func(*Edge) error) error { - seen := make(map[*Node]bool) - var visit func(n *Node) error - visit = func(n *Node) error { - if !seen[n] { - seen[n] = true - for _, e := range n.Out { - if err := visit(e.Callee); err != nil { - return err - } - if err := edge(e); err != nil { - return err - } - } - } - return nil - } - for _, n := range g.Nodes { - if err := visit(n); err != nil { - return err - } - } - return nil -} - -// PathSearch finds an arbitrary path starting at node start and -// ending at some node for which isEnd() returns true. On success, -// PathSearch returns the path as an ordered list of edges; on -// failure, it returns nil. -// -func PathSearch(start *Node, isEnd func(*Node) bool) []*Edge { - stack := make([]*Edge, 0, 32) - seen := make(map[*Node]bool) - var search func(n *Node) []*Edge - search = func(n *Node) []*Edge { - if !seen[n] { - seen[n] = true - if isEnd(n) { - return stack - } - for _, e := range n.Out { - stack = append(stack, e) // push - if found := search(e.Callee); found != nil { - return found - } - stack = stack[:len(stack)-1] // pop - } - } - return nil - } - return search(start) -} - -// DeleteSyntheticNodes removes from call graph g all nodes for -// synthetic functions (except g.Root and package initializers), -// preserving the topology. In effect, calls to synthetic wrappers -// are "inlined". -// -func (g *Graph) DeleteSyntheticNodes() { - // Measurements on the standard library and go.tools show that - // resulting graph has ~15% fewer nodes and 4-8% fewer edges - // than the input. - // - // Inlining a wrapper of in-degree m, out-degree n adds m*n - // and removes m+n edges. Since most wrappers are monomorphic - // (n=1) this results in a slight reduction. Polymorphic - // wrappers (n>1), e.g. from embedding an interface value - // inside a struct to satisfy some interface, cause an - // increase in the graph, but they seem to be uncommon. - - // Hash all existing edges to avoid creating duplicates. - edges := make(map[Edge]bool) - for _, cgn := range g.Nodes { - for _, e := range cgn.Out { - edges[*e] = true - } - } - for fn, cgn := range g.Nodes { - if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) { - continue // keep - } - for _, eIn := range cgn.In { - for _, eOut := range cgn.Out { - newEdge := Edge{eIn.Caller, eIn.Site, eOut.Callee} - if edges[newEdge] { - continue // don't add duplicate - } - AddEdge(eIn.Caller, eIn.Site, eOut.Callee) - edges[newEdge] = true - } - } - g.DeleteNode(cgn) - } -} - -func isInit(fn *ssa.Function) bool { - return fn.Pkg != nil && fn.Pkg.Func("init") == fn -} - -// DeleteNode removes node n and its edges from the graph g. -// (NB: not efficient for batch deletion.) -func (g *Graph) DeleteNode(n *Node) { - n.deleteIns() - n.deleteOuts() - delete(g.Nodes, n.Func) -} - -// deleteIns deletes all incoming edges to n. -func (n *Node) deleteIns() { - for _, e := range n.In { - removeOutEdge(e) - } - n.In = nil -} - -// deleteOuts deletes all outgoing edges from n. -func (n *Node) deleteOuts() { - for _, e := range n.Out { - removeInEdge(e) - } - n.Out = nil -} - -// removeOutEdge removes edge.Caller's outgoing edge 'edge'. -func removeOutEdge(edge *Edge) { - caller := edge.Caller - n := len(caller.Out) - for i, e := range caller.Out { - if e == edge { - // Replace it with the final element and shrink the slice. - caller.Out[i] = caller.Out[n-1] - caller.Out[n-1] = nil // aid GC - caller.Out = caller.Out[:n-1] - return - } - } - panic("edge not found: " + edge.String()) -} - -// removeInEdge removes edge.Callee's incoming edge 'edge'. -func removeInEdge(edge *Edge) { - caller := edge.Callee - n := len(caller.In) - for i, e := range caller.In { - if e == edge { - // Replace it with the final element and shrink the slice. - caller.In[i] = caller.In[n-1] - caller.In[n-1] = nil // aid GC - caller.In = caller.In[:n-1] - return - } - } - panic("edge not found: " + edge.String()) -} diff --git a/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go b/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go deleted file mode 100644 index 4c0b97c..0000000 --- a/vendor/honnef.co/go/tools/cmd/megacheck/megacheck.go +++ /dev/null @@ -1,122 +0,0 @@ -// megacheck runs staticcheck, gosimple and unused. -package main // import "honnef.co/go/tools/cmd/megacheck" - -import ( - "os" - - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/simple" - "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/unused" -) - -func main() { - var flags struct { - staticcheck struct { - enabled bool - generated bool - exitNonZero bool - } - gosimple struct { - enabled bool - generated bool - exitNonZero bool - } - unused struct { - enabled bool - constants bool - fields bool - functions bool - types bool - variables bool - debug string - wholeProgram bool - reflection bool - exitNonZero bool - } - } - fs := lintutil.FlagSet("megacheck") - fs.BoolVar(&flags.gosimple.enabled, - "simple.enabled", true, "Run gosimple") - fs.BoolVar(&flags.gosimple.generated, - "simple.generated", false, "Check generated code") - fs.BoolVar(&flags.gosimple.exitNonZero, - "simple.exit-non-zero", false, "Exit non-zero if any problems were found") - - fs.BoolVar(&flags.staticcheck.enabled, - "staticcheck.enabled", true, "Run staticcheck") - fs.BoolVar(&flags.staticcheck.generated, - "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)") - fs.BoolVar(&flags.staticcheck.exitNonZero, - "staticcheck.exit-non-zero", true, "Exit non-zero if any problems were found") - - fs.BoolVar(&flags.unused.enabled, - "unused.enabled", true, "Run unused") - fs.BoolVar(&flags.unused.constants, - "unused.consts", true, "Report unused constants") - fs.BoolVar(&flags.unused.fields, - "unused.fields", true, "Report unused fields") - fs.BoolVar(&flags.unused.functions, - "unused.funcs", true, "Report unused functions and methods") - fs.BoolVar(&flags.unused.types, - "unused.types", true, "Report unused types") - fs.BoolVar(&flags.unused.variables, - "unused.vars", true, "Report unused variables") - fs.BoolVar(&flags.unused.wholeProgram, - "unused.exported", false, "Treat arguments as a program and report unused exported identifiers") - fs.BoolVar(&flags.unused.reflection, - "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") - fs.BoolVar(&flags.unused.exitNonZero, - "unused.exit-non-zero", true, "Exit non-zero if any problems were found") - - fs.Parse(os.Args[1:]) - - var checkers []lintutil.CheckerConfig - - if flags.staticcheck.enabled { - sac := staticcheck.NewChecker() - sac.CheckGenerated = flags.staticcheck.generated - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: sac, - ExitNonZero: flags.staticcheck.exitNonZero, - }) - } - - if flags.gosimple.enabled { - sc := simple.NewChecker() - sc.CheckGenerated = flags.gosimple.generated - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: sc, - ExitNonZero: flags.gosimple.exitNonZero, - }) - } - - if flags.unused.enabled { - var mode unused.CheckMode - if flags.unused.constants { - mode |= unused.CheckConstants - } - if flags.unused.fields { - mode |= unused.CheckFields - } - if flags.unused.functions { - mode |= unused.CheckFunctions - } - if flags.unused.types { - mode |= unused.CheckTypes - } - if flags.unused.variables { - mode |= unused.CheckVariables - } - uc := unused.NewChecker(mode) - uc.WholeProgram = flags.unused.wholeProgram - uc.ConsiderReflection = flags.unused.reflection - checkers = append(checkers, lintutil.CheckerConfig{ - Checker: unused.NewLintChecker(uc), - ExitNonZero: flags.unused.exitNonZero, - }) - - } - - lintutil.ProcessFlagSet(checkers, fs) -} diff --git a/vendor/honnef.co/go/tools/deprecated/stdlib.go b/vendor/honnef.co/go/tools/deprecated/stdlib.go deleted file mode 100644 index b6b217c..0000000 --- a/vendor/honnef.co/go/tools/deprecated/stdlib.go +++ /dev/null @@ -1,54 +0,0 @@ -package deprecated - -type Deprecation struct { - DeprecatedSince int - AlternativeAvailableSince int -} - -var Stdlib = map[string]Deprecation{ - "image/jpeg.Reader": {4, 0}, - // FIXME(dh): AllowBinary isn't being detected as deprecated - // because the comment has a newline right after "Deprecated:" - "go/build.AllowBinary": {7, 7}, - "(archive/zip.FileHeader).CompressedSize": {1, 1}, - "(archive/zip.FileHeader).UncompressedSize": {1, 1}, - "(go/doc.Package).Bugs": {1, 1}, - "os.SEEK_SET": {7, 7}, - "os.SEEK_CUR": {7, 7}, - "os.SEEK_END": {7, 7}, - "(net.Dialer).Cancel": {7, 7}, - "runtime.CPUProfile": {9, 0}, - "compress/flate.ReadError": {6, 6}, - "compress/flate.WriteError": {6, 6}, - "path/filepath.HasPrefix": {0, 0}, - "(net/http.Transport).Dial": {7, 7}, - "(*net/http.Transport).CancelRequest": {6, 5}, - "net/http.ErrWriteAfterFlush": {7, 0}, - "net/http.ErrHeaderTooLong": {8, 0}, - "net/http.ErrShortBody": {8, 0}, - "net/http.ErrMissingContentLength": {8, 0}, - "net/http/httputil.ErrPersistEOF": {0, 0}, - "net/http/httputil.ErrClosed": {0, 0}, - "net/http/httputil.ErrPipeline": {0, 0}, - "net/http/httputil.ServerConn": {0, 0}, - "net/http/httputil.NewServerConn": {0, 0}, - "net/http/httputil.ClientConn": {0, 0}, - "net/http/httputil.NewClientConn": {0, 0}, - "net/http/httputil.NewProxyClientConn": {0, 0}, - "(net/http.Request).Cancel": {7, 7}, - "(text/template/parse.PipeNode).Line": {1, 1}, - "(text/template/parse.ActionNode).Line": {1, 1}, - "(text/template/parse.BranchNode).Line": {1, 1}, - "(text/template/parse.TemplateNode).Line": {1, 1}, - "database/sql/driver.ColumnConverter": {9, 9}, - "database/sql/driver.Execer": {8, 8}, - "database/sql/driver.Queryer": {8, 8}, - "(database/sql/driver.Conn).Begin": {8, 8}, - "(database/sql/driver.Stmt).Exec": {8, 8}, - "(database/sql/driver.Stmt).Query": {8, 8}, - "syscall.StringByteSlice": {1, 1}, - "syscall.StringBytePtr": {1, 1}, - "syscall.StringSlicePtr": {1, 1}, - "syscall.StringToUTF16": {1, 1}, - "syscall.StringToUTF16Ptr": {1, 1}, -} diff --git a/vendor/honnef.co/go/tools/functions/concrete.go b/vendor/honnef.co/go/tools/functions/concrete.go deleted file mode 100644 index 932acd0..0000000 --- a/vendor/honnef.co/go/tools/functions/concrete.go +++ /dev/null @@ -1,56 +0,0 @@ -package functions - -import ( - "go/token" - "go/types" - - "honnef.co/go/tools/ssa" -) - -func concreteReturnTypes(fn *ssa.Function) []*types.Tuple { - res := fn.Signature.Results() - if res == nil { - return nil - } - ifaces := make([]bool, res.Len()) - any := false - for i := 0; i < res.Len(); i++ { - _, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface) - any = any || ifaces[i] - } - if !any { - return []*types.Tuple{res} - } - var out []*types.Tuple - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return) - if !ok { - continue - } - vars := make([]*types.Var, res.Len()) - for i, v := range ret.Results { - var typ types.Type - if !ifaces[i] { - typ = res.At(i).Type() - } else if mi, ok := v.(*ssa.MakeInterface); ok { - // TODO(dh): if mi.X is a function call that returns - // an interface, call concreteReturnTypes on that - // function (or, really, go through Descriptions, - // avoid infinite recursion etc, just like nil error - // detection) - - // TODO(dh): support Phi nodes - typ = mi.X.Type() - } else { - typ = res.At(i).Type() - } - vars[i] = types.NewParam(token.NoPos, nil, "", typ) - } - out = append(out, types.NewTuple(vars...)) - } - // TODO(dh): deduplicate out - return out -} diff --git a/vendor/honnef.co/go/tools/functions/functions.go b/vendor/honnef.co/go/tools/functions/functions.go deleted file mode 100644 index c5fe2d7..0000000 --- a/vendor/honnef.co/go/tools/functions/functions.go +++ /dev/null @@ -1,150 +0,0 @@ -package functions - -import ( - "go/types" - "sync" - - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/static" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/staticcheck/vrp" -) - -var stdlibDescs = map[string]Description{ - "errors.New": Description{Pure: true}, - - "fmt.Errorf": Description{Pure: true}, - "fmt.Sprintf": Description{Pure: true}, - "fmt.Sprint": Description{Pure: true}, - - "sort.Reverse": Description{Pure: true}, - - "strings.Map": Description{Pure: true}, - "strings.Repeat": Description{Pure: true}, - "strings.Replace": Description{Pure: true}, - "strings.Title": Description{Pure: true}, - "strings.ToLower": Description{Pure: true}, - "strings.ToLowerSpecial": Description{Pure: true}, - "strings.ToTitle": Description{Pure: true}, - "strings.ToTitleSpecial": Description{Pure: true}, - "strings.ToUpper": Description{Pure: true}, - "strings.ToUpperSpecial": Description{Pure: true}, - "strings.Trim": Description{Pure: true}, - "strings.TrimFunc": Description{Pure: true}, - "strings.TrimLeft": Description{Pure: true}, - "strings.TrimLeftFunc": Description{Pure: true}, - "strings.TrimPrefix": Description{Pure: true}, - "strings.TrimRight": Description{Pure: true}, - "strings.TrimRightFunc": Description{Pure: true}, - "strings.TrimSpace": Description{Pure: true}, - "strings.TrimSuffix": Description{Pure: true}, - - "(*net/http.Request).WithContext": Description{Pure: true}, - - "math/rand.Read": Description{NilError: true}, - "(*math/rand.Rand).Read": Description{NilError: true}, -} - -type Description struct { - // The function is known to be pure - Pure bool - // The function is known to be a stub - Stub bool - // The function is known to never return (panics notwithstanding) - Infinite bool - // Variable ranges - Ranges vrp.Ranges - Loops []Loop - // Function returns an error as its last argument, but it is - // always nil - NilError bool - ConcreteReturnTypes []*types.Tuple -} - -type descriptionEntry struct { - ready chan struct{} - result Description -} - -type Descriptions struct { - CallGraph *callgraph.Graph - mu sync.Mutex - cache map[*ssa.Function]*descriptionEntry -} - -func NewDescriptions(prog *ssa.Program) *Descriptions { - return &Descriptions{ - CallGraph: static.CallGraph(prog), - cache: map[*ssa.Function]*descriptionEntry{}, - } -} - -func (d *Descriptions) Get(fn *ssa.Function) Description { - d.mu.Lock() - fd := d.cache[fn] - if fd == nil { - fd = &descriptionEntry{ - ready: make(chan struct{}), - } - d.cache[fn] = fd - d.mu.Unlock() - - { - fd.result = stdlibDescs[fn.RelString(nil)] - fd.result.Pure = fd.result.Pure || d.IsPure(fn) - fd.result.Stub = fd.result.Stub || d.IsStub(fn) - fd.result.Infinite = fd.result.Infinite || !terminates(fn) - fd.result.Ranges = vrp.BuildGraph(fn).Solve() - fd.result.Loops = findLoops(fn) - fd.result.NilError = fd.result.NilError || IsNilError(fn) - fd.result.ConcreteReturnTypes = concreteReturnTypes(fn) - } - - close(fd.ready) - } else { - d.mu.Unlock() - <-fd.ready - } - return fd.result -} - -func IsNilError(fn *ssa.Function) bool { - // TODO(dh): This is very simplistic, as we only look for constant - // nil returns. A more advanced approach would work transitively. - // An even more advanced approach would be context-aware and - // determine nil errors based on inputs (e.g. io.WriteString to a - // bytes.Buffer will always return nil, but an io.WriteString to - // an os.File might not). Similarly, an os.File opened for reading - // won't error on Close, but other files will. - res := fn.Signature.Results() - if res.Len() == 0 { - return false - } - last := res.At(res.Len() - 1) - if types.TypeString(last.Type(), nil) != "error" { - return false - } - - if fn.Blocks == nil { - return false - } - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ins := block.Instrs[len(block.Instrs)-1] - ret, ok := ins.(*ssa.Return) - if !ok { - continue - } - v := ret.Results[len(ret.Results)-1] - c, ok := v.(*ssa.Const) - if !ok { - return false - } - if !c.IsNil() { - return false - } - } - return true -} diff --git a/vendor/honnef.co/go/tools/functions/loops.go b/vendor/honnef.co/go/tools/functions/loops.go deleted file mode 100644 index 63011cf..0000000 --- a/vendor/honnef.co/go/tools/functions/loops.go +++ /dev/null @@ -1,50 +0,0 @@ -package functions - -import "honnef.co/go/tools/ssa" - -type Loop map[*ssa.BasicBlock]bool - -func findLoops(fn *ssa.Function) []Loop { - if fn.Blocks == nil { - return nil - } - tree := fn.DomPreorder() - var sets []Loop - for _, h := range tree { - for _, n := range h.Preds { - if !h.Dominates(n) { - continue - } - // n is a back-edge to h - // h is the loop header - if n == h { - sets = append(sets, Loop{n: true}) - continue - } - set := Loop{h: true, n: true} - for _, b := range allPredsBut(n, h, nil) { - set[b] = true - } - sets = append(sets, set) - } - } - return sets -} - -func allPredsBut(b, but *ssa.BasicBlock, list []*ssa.BasicBlock) []*ssa.BasicBlock { -outer: - for _, pred := range b.Preds { - if pred == but { - continue - } - for _, p := range list { - // TODO improve big-o complexity of this function - if pred == p { - continue outer - } - } - list = append(list, pred) - list = allPredsBut(pred, but, list) - } - return list -} diff --git a/vendor/honnef.co/go/tools/functions/pure.go b/vendor/honnef.co/go/tools/functions/pure.go deleted file mode 100644 index d1c4d03..0000000 --- a/vendor/honnef.co/go/tools/functions/pure.go +++ /dev/null @@ -1,123 +0,0 @@ -package functions - -import ( - "go/token" - "go/types" - - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/ssa" -) - -// IsStub reports whether a function is a stub. A function is -// considered a stub if it has no instructions or exactly one -// instruction, which must be either returning only constant values or -// a panic. -func (d *Descriptions) IsStub(fn *ssa.Function) bool { - if len(fn.Blocks) == 0 { - return true - } - if len(fn.Blocks) > 1 { - return false - } - instrs := lint.FilterDebug(fn.Blocks[0].Instrs) - if len(instrs) != 1 { - return false - } - - switch instrs[0].(type) { - case *ssa.Return: - // Since this is the only instruction, the return value must - // be a constant. We consider all constants as stubs, not just - // the zero value. This does not, unfortunately, cover zero - // initialised structs, as these cause additional - // instructions. - return true - case *ssa.Panic: - return true - default: - return false - } -} - -func (d *Descriptions) IsPure(fn *ssa.Function) bool { - if fn.Signature.Results().Len() == 0 { - // A function with no return values is empty or is doing some - // work we cannot see (for example because of build tags); - // don't consider it pure. - return false - } - - for _, param := range fn.Params { - if _, ok := param.Type().Underlying().(*types.Basic); !ok { - return false - } - } - - if fn.Blocks == nil { - return false - } - checkCall := func(common *ssa.CallCommon) bool { - if common.IsInvoke() { - return false - } - builtin, ok := common.Value.(*ssa.Builtin) - if !ok { - if common.StaticCallee() != fn { - if common.StaticCallee() == nil { - return false - } - // TODO(dh): ideally, IsPure wouldn't be responsible - // for avoiding infinite recursion, but - // FunctionDescriptions would be. - node := d.CallGraph.CreateNode(common.StaticCallee()) - if callgraph.PathSearch(node, func(other *callgraph.Node) bool { - return other.Func == fn - }) != nil { - return false - } - if !d.Get(common.StaticCallee()).Pure { - return false - } - } - } else { - switch builtin.Name() { - case "len", "cap", "make", "new": - default: - return false - } - } - return true - } - for _, b := range fn.Blocks { - for _, ins := range b.Instrs { - switch ins := ins.(type) { - case *ssa.Call: - if !checkCall(ins.Common()) { - return false - } - case *ssa.Defer: - if !checkCall(&ins.Call) { - return false - } - case *ssa.Select: - return false - case *ssa.Send: - return false - case *ssa.Go: - return false - case *ssa.Panic: - return false - case *ssa.Store: - return false - case *ssa.FieldAddr: - return false - case *ssa.UnOp: - if ins.Op == token.MUL || ins.Op == token.AND { - return false - } - } - } - } - return true -} diff --git a/vendor/honnef.co/go/tools/functions/terminates.go b/vendor/honnef.co/go/tools/functions/terminates.go deleted file mode 100644 index 65f9e16..0000000 --- a/vendor/honnef.co/go/tools/functions/terminates.go +++ /dev/null @@ -1,24 +0,0 @@ -package functions - -import "honnef.co/go/tools/ssa" - -// terminates reports whether fn is supposed to return, that is if it -// has at least one theoretic path that returns from the function. -// Explicit panics do not count as terminating. -func terminates(fn *ssa.Function) bool { - if fn.Blocks == nil { - // assuming that a function terminates is the conservative - // choice - return true - } - - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - if _, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return); ok { - return true - } - } - return false -} diff --git a/vendor/honnef.co/go/tools/gcsizes/LICENSE b/vendor/honnef.co/go/tools/gcsizes/LICENSE deleted file mode 100644 index 6a66aea..0000000 --- a/vendor/honnef.co/go/tools/gcsizes/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go deleted file mode 100644 index 5f62fc2..0000000 --- a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go +++ /dev/null @@ -1,70 +0,0 @@ -package sharedcheck - -import ( - "go/ast" - "go/types" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/ssa" -) - -func CheckRangeStringRunes(nodeFns map[ast.Node]*ssa.Function, j *lint.Job) { - fn := func(node ast.Node) bool { - rng, ok := node.(*ast.RangeStmt) - if !ok || !lint.IsBlank(rng.Key) { - return true - } - ssafn := nodeFns[rng] - if ssafn == nil { - return true - } - v, _ := ssafn.ValueForExpr(rng.X) - - // Check that we're converting from string to []rune - val, _ := v.(*ssa.Convert) - if val == nil { - return true - } - Tsrc, ok := val.X.Type().(*types.Basic) - if !ok || Tsrc.Kind() != types.String { - return true - } - Tdst, ok := val.Type().(*types.Slice) - if !ok { - return true - } - TdstElem, ok := Tdst.Elem().(*types.Basic) - if !ok || TdstElem.Kind() != types.Int32 { - return true - } - - // Check that the result of the conversion is only used to - // range over - refs := val.Referrers() - if refs == nil { - return true - } - - // Expect two refs: one for obtaining the length of the slice, - // one for accessing the elements - if len(lint.FilterDebug(*refs)) != 2 { - // TODO(dh): right now, we check that only one place - // refers to our slice. This will miss cases such as - // ranging over the slice twice. Ideally, we'd ensure that - // the slice is only used for ranging over (without - // accessing the key), but that is harder to do because in - // SSA form, ranging over a slice looks like an ordinary - // loop with index increments and slice accesses. We'd - // have to look at the associated AST node to check that - // it's a range statement. - return true - } - - j.Errorf(rng, "should range over string, not []rune(string)") - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} diff --git a/vendor/honnef.co/go/tools/lint/LICENSE b/vendor/honnef.co/go/tools/lint/LICENSE deleted file mode 100644 index 796130a..0000000 --- a/vendor/honnef.co/go/tools/lint/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2013 The Go Authors. All rights reserved. -Copyright (c) 2016 Dominik Honnef. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/lint/lint.go b/vendor/honnef.co/go/tools/lint/lint.go deleted file mode 100644 index 75a5198..0000000 --- a/vendor/honnef.co/go/tools/lint/lint.go +++ /dev/null @@ -1,844 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lint provides the foundation for tools like gosimple. -package lint // import "honnef.co/go/tools/lint" - -import ( - "bytes" - "fmt" - "go/ast" - "go/build" - "go/constant" - "go/printer" - "go/token" - "go/types" - "path/filepath" - "runtime" - "sort" - "strings" - "sync" - "unicode" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/loader" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/ssa/ssautil" -) - -type Job struct { - Program *Program - - checker string - check string - problems []Problem -} - -type Ignore interface { - Match(p Problem) bool -} - -type LineIgnore struct { - File string - Line int - Checks []string - matched bool - pos token.Pos -} - -func (li *LineIgnore) Match(p Problem) bool { - if p.Position.Filename != li.File || p.Position.Line != li.Line { - return false - } - for _, c := range li.Checks { - if m, _ := filepath.Match(c, p.Check); m { - li.matched = true - return true - } - } - return false -} - -func (li *LineIgnore) String() string { - matched := "not matched" - if li.matched { - matched = "matched" - } - return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) -} - -type FileIgnore struct { - File string - Checks []string -} - -func (fi *FileIgnore) Match(p Problem) bool { - if p.Position.Filename != fi.File { - return false - } - for _, c := range fi.Checks { - if m, _ := filepath.Match(c, p.Check); m { - return true - } - } - return false -} - -type GlobIgnore struct { - Pattern string - Checks []string -} - -func (gi *GlobIgnore) Match(p Problem) bool { - if gi.Pattern != "*" { - pkgpath := p.Package.Path() - if strings.HasSuffix(pkgpath, "_test") { - pkgpath = pkgpath[:len(pkgpath)-len("_test")] - } - name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename)) - if m, _ := filepath.Match(gi.Pattern, name); !m { - return false - } - } - for _, c := range gi.Checks { - if m, _ := filepath.Match(c, p.Check); m { - return true - } - } - return false -} - -type Program struct { - SSA *ssa.Program - Prog *loader.Program - // TODO(dh): Rename to InitialPackages? - Packages []*Pkg - InitialFunctions []*ssa.Function - AllFunctions []*ssa.Function - Files []*ast.File - Info *types.Info - GoVersion int - - tokenFileMap map[*token.File]*ast.File - astFileMap map[*ast.File]*Pkg -} - -type Func func(*Job) - -// Problem represents a problem in some source code. -type Problem struct { - pos token.Pos - Position token.Position // position in source file - Text string // the prose that describes the problem - Check string - Checker string - Package *types.Package - Ignored bool -} - -func (p *Problem) String() string { - if p.Check == "" { - return p.Text - } - return fmt.Sprintf("%s (%s)", p.Text, p.Check) -} - -type Checker interface { - Name() string - Prefix() string - Init(*Program) - Funcs() map[string]Func -} - -// A Linter lints Go source code. -type Linter struct { - Checker Checker - Ignores []Ignore - GoVersion int - ReturnIgnored bool - - automaticIgnores []Ignore -} - -func (l *Linter) ignore(p Problem) bool { - ignored := false - for _, ig := range l.automaticIgnores { - // We cannot short-circuit these, as we want to record, for - // each ignore, whether it matched or not. - if ig.Match(p) { - ignored = true - } - } - if ignored { - // no need to execute other ignores if we've already had a - // match. - return true - } - for _, ig := range l.Ignores { - // We can short-circuit here, as we aren't tracking any - // information. - if ig.Match(p) { - return true - } - } - - return false -} - -func (prog *Program) File(node Positioner) *ast.File { - return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())] -} - -func (j *Job) File(node Positioner) *ast.File { - return j.Program.File(node) -} - -// TODO(dh): switch to sort.Slice when Go 1.9 lands. -type byPosition struct { - fset *token.FileSet - ps []Problem -} - -func (ps byPosition) Len() int { - return len(ps.ps) -} - -func (ps byPosition) Less(i int, j int) bool { - pi, pj := ps.ps[i].Position, ps.ps[j].Position - - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename - } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return ps.ps[i].Text < ps.ps[j].Text -} - -func (ps byPosition) Swap(i int, j int) { - ps.ps[i], ps.ps[j] = ps.ps[j], ps.ps[i] -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -func (l *Linter) Lint(lprog *loader.Program, conf *loader.Config) []Problem { - ssaprog := ssautil.CreateProgram(lprog, ssa.GlobalDebug) - ssaprog.Build() - pkgMap := map[*ssa.Package]*Pkg{} - var pkgs []*Pkg - for _, pkginfo := range lprog.InitialPackages() { - ssapkg := ssaprog.Package(pkginfo.Pkg) - var bp *build.Package - if len(pkginfo.Files) != 0 { - path := lprog.Fset.Position(pkginfo.Files[0].Pos()).Filename - dir := filepath.Dir(path) - var err error - ctx := conf.Build - if ctx == nil { - ctx = &build.Default - } - bp, err = ctx.ImportDir(dir, 0) - if err != nil { - // shouldn't happen - } - } - pkg := &Pkg{ - Package: ssapkg, - Info: pkginfo, - BuildPkg: bp, - } - pkgMap[ssapkg] = pkg - pkgs = append(pkgs, pkg) - } - prog := &Program{ - SSA: ssaprog, - Prog: lprog, - Packages: pkgs, - Info: &types.Info{}, - GoVersion: l.GoVersion, - tokenFileMap: map[*token.File]*ast.File{}, - astFileMap: map[*ast.File]*Pkg{}, - } - - initial := map[*types.Package]struct{}{} - for _, pkg := range pkgs { - initial[pkg.Info.Pkg] = struct{}{} - } - for fn := range ssautil.AllFunctions(ssaprog) { - if fn.Pkg == nil { - continue - } - prog.AllFunctions = append(prog.AllFunctions, fn) - if _, ok := initial[fn.Pkg.Pkg]; ok { - prog.InitialFunctions = append(prog.InitialFunctions, fn) - } - } - for _, pkg := range pkgs { - prog.Files = append(prog.Files, pkg.Info.Files...) - - ssapkg := ssaprog.Package(pkg.Info.Pkg) - for _, f := range pkg.Info.Files { - prog.astFileMap[f] = pkgMap[ssapkg] - } - } - - for _, pkginfo := range lprog.AllPackages { - for _, f := range pkginfo.Files { - tf := lprog.Fset.File(f.Pos()) - prog.tokenFileMap[tf] = f - } - } - - var out []Problem - l.automaticIgnores = nil - for _, pkginfo := range lprog.InitialPackages() { - for _, f := range pkginfo.Files { - cm := ast.NewCommentMap(lprog.Fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - switch cmd { - case "ignore", "file-ignore": - if len(args) < 2 { - // FIXME(dh): this causes duplicated warnings when using megacheck - p := Problem{ - pos: c.Pos(), - Position: prog.DisplayPosition(c.Pos()), - Text: "malformed linter directive; missing the required reason field?", - Check: "", - Checker: l.Checker.Name(), - Package: nil, - } - out = append(out, p) - continue - } - default: - // unknown directive, ignore - continue - } - checks := strings.Split(args[0], ",") - pos := prog.DisplayPosition(node.Pos()) - var ig Ignore - switch cmd { - case "ignore": - ig = &LineIgnore{ - File: pos.Filename, - Line: pos.Line, - Checks: checks, - pos: c.Pos(), - } - case "file-ignore": - ig = &FileIgnore{ - File: pos.Filename, - Checks: checks, - } - } - l.automaticIgnores = append(l.automaticIgnores, ig) - } - } - } - } - } - - sizes := struct { - types int - defs int - uses int - implicits int - selections int - scopes int - }{} - for _, pkg := range pkgs { - sizes.types += len(pkg.Info.Info.Types) - sizes.defs += len(pkg.Info.Info.Defs) - sizes.uses += len(pkg.Info.Info.Uses) - sizes.implicits += len(pkg.Info.Info.Implicits) - sizes.selections += len(pkg.Info.Info.Selections) - sizes.scopes += len(pkg.Info.Info.Scopes) - } - prog.Info.Types = make(map[ast.Expr]types.TypeAndValue, sizes.types) - prog.Info.Defs = make(map[*ast.Ident]types.Object, sizes.defs) - prog.Info.Uses = make(map[*ast.Ident]types.Object, sizes.uses) - prog.Info.Implicits = make(map[ast.Node]types.Object, sizes.implicits) - prog.Info.Selections = make(map[*ast.SelectorExpr]*types.Selection, sizes.selections) - prog.Info.Scopes = make(map[ast.Node]*types.Scope, sizes.scopes) - for _, pkg := range pkgs { - for k, v := range pkg.Info.Info.Types { - prog.Info.Types[k] = v - } - for k, v := range pkg.Info.Info.Defs { - prog.Info.Defs[k] = v - } - for k, v := range pkg.Info.Info.Uses { - prog.Info.Uses[k] = v - } - for k, v := range pkg.Info.Info.Implicits { - prog.Info.Implicits[k] = v - } - for k, v := range pkg.Info.Info.Selections { - prog.Info.Selections[k] = v - } - for k, v := range pkg.Info.Info.Scopes { - prog.Info.Scopes[k] = v - } - } - l.Checker.Init(prog) - - funcs := l.Checker.Funcs() - var keys []string - for k := range funcs { - keys = append(keys, k) - } - sort.Strings(keys) - - var jobs []*Job - for _, k := range keys { - j := &Job{ - Program: prog, - checker: l.Checker.Name(), - check: k, - } - jobs = append(jobs, j) - } - wg := &sync.WaitGroup{} - for _, j := range jobs { - wg.Add(1) - go func(j *Job) { - defer wg.Done() - fn := funcs[j.check] - if fn == nil { - return - } - fn(j) - }(j) - } - wg.Wait() - - for _, j := range jobs { - for _, p := range j.problems { - p.Ignored = l.ignore(p) - if l.ReturnIgnored || !p.Ignored { - out = append(out, p) - } - } - } - - for _, ig := range l.automaticIgnores { - ig, ok := ig.(*LineIgnore) - if !ok { - continue - } - if ig.matched { - continue - } - for _, c := range ig.Checks { - idx := strings.IndexFunc(c, func(r rune) bool { - return unicode.IsNumber(r) - }) - if idx == -1 { - // malformed check name, backing out - continue - } - if c[:idx] != l.Checker.Prefix() { - // not for this checker - continue - } - p := Problem{ - pos: ig.pos, - Position: prog.DisplayPosition(ig.pos), - Text: "this linter directive didn't match anything; should it be removed?", - Check: "", - Checker: l.Checker.Name(), - Package: nil, - } - out = append(out, p) - } - } - - sort.Sort(byPosition{lprog.Fset, out}) - return out -} - -// Pkg represents a package being linted. -type Pkg struct { - *ssa.Package - Info *loader.PackageInfo - BuildPkg *build.Package -} - -type packager interface { - Package() *ssa.Package -} - -func IsExample(fn *ssa.Function) bool { - if !strings.HasPrefix(fn.Name(), "Example") { - return false - } - f := fn.Prog.Fset.File(fn.Pos()) - if f == nil { - return false - } - return strings.HasSuffix(f.Name(), "_test.go") -} - -func (j *Job) IsInTest(node Positioner) bool { - f := j.Program.SSA.Fset.File(node.Pos()) - return f != nil && strings.HasSuffix(f.Name(), "_test.go") -} - -func (j *Job) IsInMain(node Positioner) bool { - if node, ok := node.(packager); ok { - return node.Package().Pkg.Name() == "main" - } - pkg := j.NodePackage(node) - if pkg == nil { - return false - } - return pkg.Pkg.Name() == "main" -} - -type Positioner interface { - Pos() token.Pos -} - -func (prog *Program) DisplayPosition(p token.Pos) token.Position { - // The //line compiler directive can be used to change the file - // name and line numbers associated with code. This can, for - // example, be used by code generation tools. The most prominent - // example is 'go tool cgo', which uses //line directives to refer - // back to the original source code. - // - // In the context of our linters, we need to treat these - // directives differently depending on context. For cgo files, we - // want to honour the directives, so that line numbers are - // adjusted correctly. For all other files, we want to ignore the - // directives, so that problems are reported at their actual - // position and not, for example, a yacc grammar file. This also - // affects the ignore mechanism, since it operates on the position - // information stored within problems. With this implementation, a - // user will ignore foo.go, not foo.y - - pkg := prog.astFileMap[prog.tokenFileMap[prog.Prog.Fset.File(p)]] - bp := pkg.BuildPkg - adjPos := prog.Prog.Fset.Position(p) - if bp == nil { - // couldn't find the package for some reason (deleted? faulty - // file system?) - return adjPos - } - base := filepath.Base(adjPos.Filename) - for _, f := range bp.CgoFiles { - if f == base { - // this is a cgo file, use the adjusted position - return adjPos - } - } - // not a cgo file, ignore //line directives - return prog.Prog.Fset.PositionFor(p, false) -} - -func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - tf := j.Program.SSA.Fset.File(n.Pos()) - f := j.Program.tokenFileMap[tf] - pkg := j.Program.astFileMap[f].Pkg - - pos := j.Program.DisplayPosition(n.Pos()) - problem := Problem{ - pos: n.Pos(), - Position: pos, - Text: fmt.Sprintf(format, args...), - Check: j.check, - Checker: j.checker, - Package: pkg, - } - j.problems = append(j.problems, problem) - return &j.problems[len(j.problems)-1] -} - -func (j *Job) Render(x interface{}) string { - fset := j.Program.SSA.Fset - var buf bytes.Buffer - if err := printer.Fprint(&buf, fset, x); err != nil { - panic(err) - } - return buf.String() -} - -func (j *Job) RenderArgs(args []ast.Expr) string { - var ss []string - for _, arg := range args { - ss = append(ss, j.Render(arg)) - } - return strings.Join(ss, ", ") -} - -func IsIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func IsBlank(id ast.Expr) bool { - ident, ok := id.(*ast.Ident) - return ok && ident.Name == "_" -} - -func IsZero(expr ast.Expr) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "0" -} - -func (j *Job) IsNil(expr ast.Expr) bool { - return j.Program.Info.Types[expr].IsNil() -} - -func (j *Job) BoolConst(expr ast.Expr) bool { - val := j.Program.Info.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() - return constant.BoolVal(val) -} - -func (j *Job) IsBoolConst(expr ast.Expr) bool { - // We explicitly don't support typed bools because more often than - // not, custom bool types are used as binary enums and the - // explicit comparison is desired. - - ident, ok := expr.(*ast.Ident) - if !ok { - return false - } - obj := j.Program.Info.ObjectOf(ident) - c, ok := obj.(*types.Const) - if !ok { - return false - } - basic, ok := c.Type().(*types.Basic) - if !ok { - return false - } - if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { - return false - } - return true -} - -func (j *Job) ExprToInt(expr ast.Expr) (int64, bool) { - tv := j.Program.Info.Types[expr] - if tv.Value == nil { - return 0, false - } - if tv.Value.Kind() != constant.Int { - return 0, false - } - return constant.Int64Val(tv.Value) -} - -func (j *Job) ExprToString(expr ast.Expr) (string, bool) { - val := j.Program.Info.Types[expr].Value - if val == nil { - return "", false - } - if val.Kind() != constant.String { - return "", false - } - return constant.StringVal(val), true -} - -func (j *Job) NodePackage(node Positioner) *Pkg { - f := j.File(node) - return j.Program.astFileMap[f] -} - -func IsGenerated(f *ast.File) bool { - comments := f.Comments - if len(comments) > 0 { - comment := comments[0].Text() - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") - } - return false -} - -func Preamble(f *ast.File) string { - cutoff := f.Package - if f.Doc != nil { - cutoff = f.Doc.Pos() - } - var out []string - for _, cmt := range f.Comments { - if cmt.Pos() >= cutoff { - break - } - out = append(out, cmt.Text()) - } - return strings.Join(out, "\n") -} - -func IsPointerLike(T types.Type) bool { - switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Pointer: - return true - case *types.Basic: - return T.Kind() == types.UnsafePointer - } - return false -} - -func (j *Job) IsGoVersion(minor int) bool { - return j.Program.GoVersion >= minor -} - -func (j *Job) IsCallToAST(node ast.Node, name string) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return false - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) - return ok && fn.FullName() == name -} - -func (j *Job) IsCallToAnyAST(node ast.Node, names ...string) bool { - for _, name := range names { - if j.IsCallToAST(node, name) { - return true - } - } - return false -} - -func CallName(call *ssa.CallCommon) string { - if call.IsInvoke() { - return "" - } - switch v := call.Value.(type) { - case *ssa.Function: - fn, ok := v.Object().(*types.Func) - if !ok { - return "" - } - return fn.FullName() - case *ssa.Builtin: - return v.Name() - } - return "" -} - -func IsCallTo(call *ssa.CallCommon, name string) bool { - return CallName(call) == name -} - -func FilterDebug(instr []ssa.Instruction) []ssa.Instruction { - var out []ssa.Instruction - for _, ins := range instr { - if _, ok := ins.(*ssa.DebugRef); !ok { - out = append(out, ins) - } - } - return out -} - -func NodeFns(pkgs []*Pkg) map[ast.Node]*ssa.Function { - out := map[ast.Node]*ssa.Function{} - - wg := &sync.WaitGroup{} - chNodeFns := make(chan map[ast.Node]*ssa.Function, runtime.NumCPU()*2) - for _, pkg := range pkgs { - pkg := pkg - wg.Add(1) - go func() { - m := map[ast.Node]*ssa.Function{} - for _, f := range pkg.Info.Files { - ast.Walk(&globalVisitor{m, pkg, f}, f) - } - chNodeFns <- m - wg.Done() - }() - } - go func() { - wg.Wait() - close(chNodeFns) - }() - - for nodeFns := range chNodeFns { - for k, v := range nodeFns { - out[k] = v - } - } - - return out -} - -type globalVisitor struct { - m map[ast.Node]*ssa.Function - pkg *Pkg - f *ast.File -} - -func (v *globalVisitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.CallExpr: - v.m[node] = v.pkg.Func("init") - return v - case *ast.FuncDecl, *ast.FuncLit: - nv := &fnVisitor{v.m, v.f, v.pkg, nil} - return nv.Visit(node) - default: - return v - } -} - -type fnVisitor struct { - m map[ast.Node]*ssa.Function - f *ast.File - pkg *Pkg - ssafn *ssa.Function -} - -func (v *fnVisitor) Visit(node ast.Node) ast.Visitor { - switch node := node.(type) { - case *ast.FuncDecl: - var ssafn *ssa.Function - ssafn = v.pkg.Prog.FuncValue(v.pkg.Info.ObjectOf(node.Name).(*types.Func)) - v.m[node] = ssafn - if ssafn == nil { - return nil - } - return &fnVisitor{v.m, v.f, v.pkg, ssafn} - case *ast.FuncLit: - var ssafn *ssa.Function - path, _ := astutil.PathEnclosingInterval(v.f, node.Pos(), node.Pos()) - ssafn = ssa.EnclosingFunction(v.pkg.Package, path) - v.m[node] = ssafn - if ssafn == nil { - return nil - } - return &fnVisitor{v.m, v.f, v.pkg, ssafn} - case nil: - return nil - default: - v.m[node] = v.ssafn - return v - } -} diff --git a/vendor/honnef.co/go/tools/lint/lintutil/util.go b/vendor/honnef.co/go/tools/lint/lintutil/util.go deleted file mode 100644 index 0bb1426..0000000 --- a/vendor/honnef.co/go/tools/lint/lintutil/util.go +++ /dev/null @@ -1,349 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lintutil provides helpers for writing linter command lines. -package lintutil // import "honnef.co/go/tools/lint/lintutil" - -import ( - "encoding/json" - "errors" - "flag" - "fmt" - "go/build" - "go/parser" - "go/token" - "go/types" - "io" - "os" - "path/filepath" - "strconv" - "strings" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/version" - - "github.com/kisielk/gotool" - "golang.org/x/tools/go/loader" -) - -type OutputFormatter interface { - Format(p lint.Problem) -} - -type TextOutput struct { - w io.Writer -} - -func (o TextOutput) Format(p lint.Problem) { - fmt.Fprintf(o.w, "%v: %s\n", relativePositionString(p.Position), p.String()) -} - -type JSONOutput struct { - w io.Writer -} - -func (o JSONOutput) Format(p lint.Problem) { - type location struct { - File string `json:"file"` - Line int `json:"line"` - Column int `json:"column"` - } - jp := struct { - Checker string `json:"checker"` - Code string `json:"code"` - Severity string `json:"severity,omitempty"` - Location location `json:"location"` - Message string `json:"message"` - Ignored bool `json:"ignored"` - }{ - p.Checker, - p.Check, - "", // TODO(dh): support severity - location{ - p.Position.Filename, - p.Position.Line, - p.Position.Column, - }, - p.Text, - p.Ignored, - } - _ = json.NewEncoder(o.w).Encode(jp) -} -func usage(name string, flags *flag.FlagSet) func() { - return func() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) - fmt.Fprintf(os.Stderr, "Flags:\n") - flags.PrintDefaults() - } -} - -type runner struct { - checker lint.Checker - tags []string - ignores []lint.Ignore - version int - returnIgnored bool -} - -func resolveRelative(importPaths []string, tags []string) (goFiles bool, err error) { - if len(importPaths) == 0 { - return false, nil - } - if strings.HasSuffix(importPaths[0], ".go") { - // User is specifying a package in terms of .go files, don't resolve - return true, nil - } - wd, err := os.Getwd() - if err != nil { - return false, err - } - ctx := build.Default - ctx.BuildTags = tags - for i, path := range importPaths { - bpkg, err := ctx.Import(path, wd, build.FindOnly) - if err != nil { - return false, fmt.Errorf("can't load package %q: %v", path, err) - } - importPaths[i] = bpkg.ImportPath - } - return false, nil -} - -func parseIgnore(s string) ([]lint.Ignore, error) { - var out []lint.Ignore - if len(s) == 0 { - return nil, nil - } - for _, part := range strings.Fields(s) { - p := strings.Split(part, ":") - if len(p) != 2 { - return nil, errors.New("malformed ignore string") - } - path := p[0] - checks := strings.Split(p[1], ",") - out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks}) - } - return out, nil -} - -type versionFlag int - -func (v *versionFlag) String() string { - return fmt.Sprintf("1.%d", *v) -} - -func (v *versionFlag) Set(s string) error { - if len(s) < 3 { - return errors.New("invalid Go version") - } - if s[0] != '1' { - return errors.New("invalid Go version") - } - if s[1] != '.' { - return errors.New("invalid Go version") - } - i, err := strconv.Atoi(s[2:]) - *v = versionFlag(i) - return err -} - -func (v *versionFlag) Get() interface{} { - return int(*v) -} - -func FlagSet(name string) *flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.Usage = usage(name, flags) - flags.Float64("min_confidence", 0, "Deprecated; use -ignore instead") - flags.String("tags", "", "List of `build tags`") - flags.String("ignore", "", "Space separated list of checks to ignore, in the following format: 'import/path/file.go:Check1,Check2,...' Both the import path and file name sections support globbing, e.g. 'os/exec/*_test.go'") - flags.Bool("tests", true, "Include tests") - flags.Bool("version", false, "Print version and exit") - flags.Bool("show-ignored", false, "Don't filter ignored problems") - flags.String("f", "text", "Output `format` (valid choices are 'text' and 'json')") - - tags := build.Default.ReleaseTags - v := tags[len(tags)-1][2:] - version := new(versionFlag) - if err := version.Set(v); err != nil { - panic(fmt.Sprintf("internal error: %s", err)) - } - - flags.Var(version, "go", "Target Go `version` in the format '1.x'") - return flags -} - -type CheckerConfig struct { - Checker lint.Checker - ExitNonZero bool -} - -func ProcessFlagSet(confs []CheckerConfig, fs *flag.FlagSet) { - tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) - ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) - tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) - goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) - format := fs.Lookup("f").Value.(flag.Getter).Get().(string) - printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) - showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) - - if printVersion { - version.Print() - os.Exit(0) - } - - var cs []lint.Checker - for _, conf := range confs { - cs = append(cs, conf.Checker) - } - pss, err := Lint(cs, fs.Args(), &Options{ - Tags: strings.Fields(tags), - LintTests: tests, - Ignores: ignore, - GoVersion: goVersion, - ReturnIgnored: showIgnored, - }) - if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } - - var ps []lint.Problem - for _, p := range pss { - ps = append(ps, p...) - } - - var f OutputFormatter - switch format { - case "text": - f = TextOutput{os.Stdout} - case "json": - f = JSONOutput{os.Stdout} - default: - fmt.Fprintf(os.Stderr, "unsupported output format %q\n", format) - os.Exit(2) - } - - for _, p := range ps { - f.Format(p) - } - for i, p := range pss { - if len(p) != 0 && confs[i].ExitNonZero { - os.Exit(1) - } - } -} - -type Options struct { - Tags []string - LintTests bool - Ignores string - GoVersion int - ReturnIgnored bool -} - -func Lint(cs []lint.Checker, pkgs []string, opt *Options) ([][]lint.Problem, error) { - if opt == nil { - opt = &Options{} - } - ignores, err := parseIgnore(opt.Ignores) - if err != nil { - return nil, err - } - paths := gotool.ImportPaths(pkgs) - goFiles, err := resolveRelative(paths, opt.Tags) - if err != nil { - return nil, err - } - ctx := build.Default - ctx.BuildTags = opt.Tags - hadError := false - conf := &loader.Config{ - Build: &ctx, - ParserMode: parser.ParseComments, - ImportPkgs: map[string]bool{}, - TypeChecker: types.Config{ - Error: func(err error) { - // Only print the first error found - if hadError { - return - } - hadError = true - fmt.Fprintln(os.Stderr, err) - }, - }, - } - if goFiles { - conf.CreateFromFilenames("adhoc", paths...) - } else { - for _, path := range paths { - conf.ImportPkgs[path] = opt.LintTests - } - } - lprog, err := conf.Load() - if err != nil { - return nil, err - } - - var problems [][]lint.Problem - for _, c := range cs { - runner := &runner{ - checker: c, - tags: opt.Tags, - ignores: ignores, - version: opt.GoVersion, - returnIgnored: opt.ReturnIgnored, - } - problems = append(problems, runner.lint(lprog, conf)) - } - return problems, nil -} - -func shortPath(path string) string { - cwd, err := os.Getwd() - if err != nil { - return path - } - if rel, err := filepath.Rel(cwd, path); err == nil && len(rel) < len(path) { - return rel - } - return path -} - -func relativePositionString(pos token.Position) string { - s := shortPath(pos.Filename) - if pos.IsValid() { - if s != "" { - s += ":" - } - s += fmt.Sprintf("%d:%d", pos.Line, pos.Column) - } - if s == "" { - s = "-" - } - return s -} - -func ProcessArgs(name string, cs []CheckerConfig, args []string) { - flags := FlagSet(name) - flags.Parse(args) - - ProcessFlagSet(cs, flags) -} - -func (runner *runner) lint(lprog *loader.Program, conf *loader.Config) []lint.Problem { - l := &lint.Linter{ - Checker: runner.checker, - Ignores: runner.ignores, - GoVersion: runner.version, - ReturnIgnored: runner.returnIgnored, - } - return l.Lint(lprog, conf) -} diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go deleted file mode 100644 index 47ee690..0000000 --- a/vendor/honnef.co/go/tools/simple/lint.go +++ /dev/null @@ -1,1771 +0,0 @@ -// Package simple contains a linter for Go source code. -package simple // import "honnef.co/go/tools/simple" - -import ( - "go/ast" - "go/constant" - "go/token" - "go/types" - "reflect" - "strings" - - "honnef.co/go/tools/internal/sharedcheck" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/ssa" - - "golang.org/x/tools/go/types/typeutil" -) - -type Checker struct { - CheckGenerated bool - MS *typeutil.MethodSetCache - - nodeFns map[ast.Node]*ssa.Function -} - -func NewChecker() *Checker { - return &Checker{ - MS: &typeutil.MethodSetCache{}, - } -} - -func (*Checker) Name() string { return "gosimple" } -func (*Checker) Prefix() string { return "S" } - -func (c *Checker) Init(prog *lint.Program) { - c.nodeFns = lint.NodeFns(prog.Packages) -} - -func (c *Checker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "S1000": c.LintSingleCaseSelect, - "S1001": c.LintLoopCopy, - "S1002": c.LintIfBoolCmp, - "S1003": c.LintStringsContains, - "S1004": c.LintBytesCompare, - "S1005": c.LintUnnecessaryBlank, - "S1006": c.LintForTrue, - "S1007": c.LintRegexpRaw, - "S1008": c.LintIfReturn, - "S1009": c.LintRedundantNilCheckWithLen, - "S1010": c.LintSlicing, - "S1011": c.LintLoopAppend, - "S1012": c.LintTimeSince, - "S1013": c.LintSimplerReturn, - "S1014": nil, - "S1015": nil, - "S1016": c.LintSimplerStructConversion, - "S1017": c.LintTrim, - "S1018": c.LintLoopSlide, - "S1019": c.LintMakeLenCap, - "S1020": c.LintAssertNotNil, - "S1021": c.LintDeclareAssign, - "S1022": nil, - "S1023": c.LintRedundantBreak, - "S1024": c.LintTimeUntil, - "S1025": c.LintRedundantSprintf, - "S1026": nil, - "S1027": nil, - "S1028": c.LintErrorsNewSprintf, - "S1029": c.LintRangeStringRunes, - "S1030": c.LintBytesBufferConversions, - "S1031": c.LintNilCheckAroundRange, - } -} - -func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { - if c.CheckGenerated { - return files - } - var out []*ast.File - for _, f := range files { - if !lint.IsGenerated(f) { - out = append(out, f) - } - } - return out -} - -func (c *Checker) LintSingleCaseSelect(j *lint.Job) { - isSingleSelect := func(node ast.Node) bool { - v, ok := node.(*ast.SelectStmt) - if !ok { - return false - } - return len(v.Body.List) == 1 - } - - seen := map[ast.Node]struct{}{} - fn := func(node ast.Node) bool { - switch v := node.(type) { - case *ast.ForStmt: - if len(v.Body.List) != 1 { - return true - } - if !isSingleSelect(v.Body.List[0]) { - return true - } - if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok { - // Don't suggest using range for channel sends - return true - } - seen[v.Body.List[0]] = struct{}{} - j.Errorf(node, "should use for range instead of for { select {} }") - case *ast.SelectStmt: - if _, ok := seen[v]; ok { - return true - } - if !isSingleSelect(v) { - return true - } - j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") - return true - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintLoopCopy(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } - - if loop.Key == nil { - return true - } - if len(loop.Body.List) != 1 { - return true - } - stmt, ok := loop.Body.List[0].(*ast.AssignStmt) - if !ok { - return true - } - if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true - } - lhs, ok := stmt.Lhs[0].(*ast.IndexExpr) - if !ok { - return true - } - if _, ok := j.Program.Info.TypeOf(lhs.X).(*types.Slice); !ok { - return true - } - lidx, ok := lhs.Index.(*ast.Ident) - if !ok { - return true - } - key, ok := loop.Key.(*ast.Ident) - if !ok { - return true - } - if j.Program.Info.TypeOf(lhs) == nil || j.Program.Info.TypeOf(stmt.Rhs[0]) == nil { - return true - } - if j.Program.Info.ObjectOf(lidx) != j.Program.Info.ObjectOf(key) { - return true - } - if !types.Identical(j.Program.Info.TypeOf(lhs), j.Program.Info.TypeOf(stmt.Rhs[0])) { - return true - } - if _, ok := j.Program.Info.TypeOf(loop.X).(*types.Slice); !ok { - return true - } - - if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok { - rx, ok := rhs.X.(*ast.Ident) - _ = rx - if !ok { - return true - } - ridx, ok := rhs.Index.(*ast.Ident) - if !ok { - return true - } - if j.Program.Info.ObjectOf(ridx) != j.Program.Info.ObjectOf(key) { - return true - } - } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { - value, ok := loop.Value.(*ast.Ident) - if !ok { - return true - } - if j.Program.Info.ObjectOf(rhs) != j.Program.Info.ObjectOf(value) { - return true - } - } else { - return true - } - j.Errorf(loop, "should use copy() instead of a loop") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintIfBoolCmp(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) { - return true - } - x := j.IsBoolConst(expr.X) - y := j.IsBoolConst(expr.Y) - if !x && !y { - return true - } - var other ast.Expr - var val bool - if x { - val = j.BoolConst(expr.X) - other = expr.Y - } else { - val = j.BoolConst(expr.Y) - other = expr.X - } - basic, ok := j.Program.Info.TypeOf(other).Underlying().(*types.Basic) - if !ok || basic.Kind() != types.Bool { - return true - } - op := "" - if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { - op = "!" - } - r := op + j.Render(other) - l1 := len(r) - r = strings.TrimLeft(r, "!") - if (l1-len(r))%2 == 1 { - r = "!" + r - } - j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintBytesBufferConversions(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok || len(call.Args) != 1 { - return true - } - - argCall, ok := call.Args[0].(*ast.CallExpr) - if !ok { - return true - } - sel, ok := argCall.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - - typ := j.Program.Info.TypeOf(call.Fun) - if typ == types.Universe.Lookup("string").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).Bytes") { - j.Errorf(call, "should use %v.String() instead of %v", j.Render(sel.X), j.Render(call)) - } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && j.IsCallToAST(call.Args[0], "(*bytes.Buffer).String") { - j.Errorf(call, "should use %v.Bytes() instead of %v", j.Render(sel.X), j.Render(call)) - } - - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintStringsContains(j *lint.Job) { - // map of value to token to bool value - allowed := map[int64]map[token.Token]bool{ - -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, - 0: {token.GEQ: true, token.LSS: false}, - } - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - switch expr.Op { - case token.GEQ, token.GTR, token.NEQ, token.LSS, token.EQL: - default: - return true - } - - value, ok := j.ExprToInt(expr.Y) - if !ok { - return true - } - - allowedOps, ok := allowed[value] - if !ok { - return true - } - b, ok := allowedOps[expr.Op] - if !ok { - return true - } - - call, ok := expr.X.(*ast.CallExpr) - if !ok { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - pkgIdent, ok := sel.X.(*ast.Ident) - if !ok { - return true - } - funIdent := sel.Sel - if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" { - return true - } - newFunc := "" - switch funIdent.Name { - case "IndexRune": - newFunc = "ContainsRune" - case "IndexAny": - newFunc = "ContainsAny" - case "Index": - newFunc = "Contains" - default: - return true - } - - prefix := "" - if !b { - prefix = "!" - } - j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, j.RenderArgs(call.Args)) - - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintBytesCompare(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - if expr.Op != token.NEQ && expr.Op != token.EQL { - return true - } - call, ok := expr.X.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "bytes.Compare") { - return true - } - value, ok := j.ExprToInt(expr.Y) - if !ok || value != 0 { - return true - } - args := j.RenderArgs(call.Args) - prefix := "" - if expr.Op == token.NEQ { - prefix = "!" - } - j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintForTrue(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok { - return true - } - if loop.Init != nil || loop.Post != nil { - return true - } - if !j.IsBoolConst(loop.Cond) || !j.BoolConst(loop.Cond) { - return true - } - j.Errorf(loop, "should use for {} instead of for true {}") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintRegexpRaw(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "regexp.MustCompile") && - !j.IsCallToAST(call, "regexp.Compile") { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if len(call.Args) != 1 { - // invalid function call - return true - } - lit, ok := call.Args[0].(*ast.BasicLit) - if !ok { - // TODO(dominikh): support string concat, maybe support constants - return true - } - if lit.Kind != token.STRING { - // invalid function call - return true - } - if lit.Value[0] != '"' { - // already a raw string - return true - } - val := lit.Value - if !strings.Contains(val, `\\`) { - return true - } - - bs := false - for _, c := range val { - if !bs && c == '\\' { - bs = true - continue - } - if bs && c == '\\' { - bs = false - continue - } - if bs { - // backslash followed by non-backslash -> escape sequence - return true - } - } - - j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintIfReturn(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } - l := len(block.List) - if l < 2 { - return true - } - n1, n2 := block.List[l-2], block.List[l-1] - - if len(block.List) >= 3 { - if _, ok := block.List[l-3].(*ast.IfStmt); ok { - // Do not flag a series of if statements - return true - } - } - // if statement with no init, no else, a single condition - // checking an identifier or function call and just a return - // statement in the body, that returns a boolean constant - ifs, ok := n1.(*ast.IfStmt) - if !ok { - return true - } - if ifs.Else != nil || ifs.Init != nil { - return true - } - if len(ifs.Body.List) != 1 { - return true - } - if op, ok := ifs.Cond.(*ast.BinaryExpr); ok { - switch op.Op { - case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: - default: - return true - } - } - ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) - if !ok { - return true - } - if len(ret1.Results) != 1 { - return true - } - if !j.IsBoolConst(ret1.Results[0]) { - return true - } - - ret2, ok := n2.(*ast.ReturnStmt) - if !ok { - return true - } - if len(ret2.Results) != 1 { - return true - } - if !j.IsBoolConst(ret2.Results[0]) { - return true - } - j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -// LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: -// -// if x == nil || len(x) == 0 {} -// if x != nil && len(x) != 0 {} -// if x != nil && len(x) == N {} (where N != 0) -// if x != nil && len(x) > N {} -// if x != nil && len(x) >= N {} (where N != 0) -// -func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { - isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { - _, ok := expr.(*ast.BasicLit) - if ok { - return true, lint.IsZero(expr) - } - id, ok := expr.(*ast.Ident) - if !ok { - return false, false - } - c, ok := j.Program.Info.ObjectOf(id).(*types.Const) - if !ok { - return false, false - } - return true, c.Val().Kind() == constant.Int && c.Val().String() == "0" - } - - fn := func(node ast.Node) bool { - // check that expr is "x || y" or "x && y" - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - if expr.Op != token.LOR && expr.Op != token.LAND { - return true - } - eqNil := expr.Op == token.LOR - - // check that x is "xx == nil" or "xx != nil" - x, ok := expr.X.(*ast.BinaryExpr) - if !ok { - return true - } - if eqNil && x.Op != token.EQL { - return true - } - if !eqNil && x.Op != token.NEQ { - return true - } - xx, ok := x.X.(*ast.Ident) - if !ok { - return true - } - if !j.IsNil(x.Y) { - return true - } - - // check that y is "len(xx) == 0" or "len(xx) ... " - y, ok := expr.Y.(*ast.BinaryExpr) - if !ok { - return true - } - if eqNil && y.Op != token.EQL { // must be len(xx) *==* 0 - return false - } - yx, ok := y.X.(*ast.CallExpr) - if !ok { - return true - } - yxFun, ok := yx.Fun.(*ast.Ident) - if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { - return true - } - yxArg, ok := yx.Args[0].(*ast.Ident) - if !ok { - return true - } - if yxArg.Name != xx.Name { - return true - } - - if eqNil && !lint.IsZero(y.Y) { // must be len(x) == *0* - return true - } - - if !eqNil { - isConst, isZero := isConstZero(y.Y) - if !isConst { - return true - } - switch y.Op { - case token.EQL: - // avoid false positive for "xx != nil && len(xx) == 0" - if isZero { - return true - } - case token.GEQ: - // avoid false positive for "xx != nil && len(xx) >= 0" - if isZero { - return true - } - case token.NEQ: - // avoid false positive for "xx != nil && len(xx) != " - if !isZero { - return true - } - case token.GTR: - // ok - default: - return true - } - } - - // finally check that xx type is one of array, slice, map or chan - // this is to prevent false positive in case if xx is a pointer to an array - var nilType string - switch j.Program.Info.TypeOf(xx).(type) { - case *types.Slice: - nilType = "nil slices" - case *types.Map: - nilType = "nil maps" - case *types.Chan: - nilType = "nil channels" - default: - return true - } - j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintSlicing(j *lint.Job) { - fn := func(node ast.Node) bool { - n, ok := node.(*ast.SliceExpr) - if !ok { - return true - } - if n.Max != nil { - return true - } - s, ok := n.X.(*ast.Ident) - if !ok || s.Obj == nil { - return true - } - call, ok := n.High.(*ast.CallExpr) - if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { - return true - } - fun, ok := call.Fun.(*ast.Ident) - if !ok || fun.Name != "len" { - return true - } - if _, ok := j.Program.Info.ObjectOf(fun).(*types.Builtin); !ok { - return true - } - arg, ok := call.Args[0].(*ast.Ident) - if !ok || arg.Obj != s.Obj { - return true - } - j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func refersTo(info *types.Info, expr ast.Expr, ident *ast.Ident) bool { - found := false - fn := func(node ast.Node) bool { - ident2, ok := node.(*ast.Ident) - if !ok { - return true - } - if info.ObjectOf(ident) == info.ObjectOf(ident2) { - found = true - return false - } - return true - } - ast.Inspect(expr, fn) - return found -} - -func (c *Checker) LintLoopAppend(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } - if !lint.IsBlank(loop.Key) { - return true - } - val, ok := loop.Value.(*ast.Ident) - if !ok { - return true - } - if len(loop.Body.List) != 1 { - return true - } - stmt, ok := loop.Body.List[0].(*ast.AssignStmt) - if !ok { - return true - } - if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true - } - if refersTo(j.Program.Info, stmt.Lhs[0], val) { - return true - } - call, ok := stmt.Rhs[0].(*ast.CallExpr) - if !ok { - return true - } - if len(call.Args) != 2 || call.Ellipsis.IsValid() { - return true - } - fun, ok := call.Fun.(*ast.Ident) - if !ok { - return true - } - obj := j.Program.Info.ObjectOf(fun) - fn, ok := obj.(*types.Builtin) - if !ok || fn.Name() != "append" { - return true - } - - src := j.Program.Info.TypeOf(loop.X) - dst := j.Program.Info.TypeOf(call.Args[0]) - // TODO(dominikh) remove nil check once Go issue #15173 has - // been fixed - if src == nil { - return true - } - if !types.Identical(src, dst) { - return true - } - - if j.Render(stmt.Lhs[0]) != j.Render(call.Args[0]) { - return true - } - - el, ok := call.Args[1].(*ast.Ident) - if !ok { - return true - } - if j.Program.Info.ObjectOf(val) != j.Program.Info.ObjectOf(el) { - return true - } - j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", - j.Render(stmt.Lhs[0]), j.Render(call.Args[0]), j.Render(loop.X)) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintTimeSince(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if !j.IsCallToAST(sel.X, "time.Now") { - return true - } - if sel.Sel.Name != "Sub" { - return true - } - j.Errorf(call, "should use time.Since instead of time.Now().Sub") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintTimeUntil(j *lint.Job) { - if !j.IsGoVersion(8) { - return - } - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "(time.Time).Sub") { - return true - } - if !j.IsCallToAST(call.Args[0], "time.Now") { - return true - } - j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintSimplerReturn(j *lint.Job) { - fn1 := func(node ast.Node) bool { - var ret *ast.FieldList - switch x := node.(type) { - case *ast.FuncDecl: - ret = x.Type.Results - case *ast.FuncLit: - ret = x.Type.Results - default: - return true - } - if ret == nil { - return true - } - - fn2 := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } - if len(block.List) < 2 { - return true - } - - outer: - for i, stmt := range block.List { - if i == len(block.List)-1 { - break - } - if i > 0 { - // don't flag an if in a series of ifs - if _, ok := block.List[i-1].(*ast.IfStmt); ok { - continue - } - } - - // if != nil - ifs, ok := stmt.(*ast.IfStmt) - if !ok || len(ifs.Body.List) != 1 || ifs.Else != nil { - continue - } - expr, ok := ifs.Cond.(*ast.BinaryExpr) - if !ok || expr.Op != token.NEQ || !j.IsNil(expr.Y) { - continue - } - id1, ok := expr.X.(*ast.Ident) - if !ok { - continue - } - - // return ..., - ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) - if !ok || len(ret1.Results) == 0 { - continue - } - var results1 []types.Object - for _, res := range ret1.Results { - ident, ok := res.(*ast.Ident) - if !ok { - continue outer - } - results1 = append(results1, j.Program.Info.ObjectOf(ident)) - } - if results1[len(results1)-1] != j.Program.Info.ObjectOf(id1) { - continue - } - - // return ..., [ | nil] - ret2, ok := block.List[i+1].(*ast.ReturnStmt) - if !ok || len(ret2.Results) == 0 { - continue - } - var results2 []types.Object - for _, res := range ret2.Results { - ident, ok := res.(*ast.Ident) - if !ok { - continue outer - } - results2 = append(results2, j.Program.Info.ObjectOf(ident)) - } - _, isNil := results2[len(results2)-1].(*types.Nil) - if results2[len(results2)-1] != j.Program.Info.ObjectOf(id1) && - !isNil { - continue - } - for i, v := range results1[:len(results1)-1] { - if v != results2[i] { - continue outer - } - } - - id1Obj := j.Program.Info.ObjectOf(id1) - if id1Obj == nil { - continue - } - _, idIface := id1Obj.Type().Underlying().(*types.Interface) - _, retIface := j.Program.Info.TypeOf(ret.List[len(ret.List)-1].Type).Underlying().(*types.Interface) - - if retIface && !idIface { - // When the return value is an interface, but the - // identifier is not, an explicit check for nil is - // required to return an untyped nil. - continue - } - - j.Errorf(ifs, "'if %s != nil { return %s }; return %s' can be simplified to 'return %s'", - j.Render(expr.X), j.RenderArgs(ret1.Results), - j.RenderArgs(ret2.Results), j.RenderArgs(ret1.Results)) - } - return true - } - ast.Inspect(node, fn2) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn1) - } -} - -func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { - fn1 := func(node ast.Node) { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return - } - if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { - return - } - if !lint.IsBlank(assign.Lhs[1]) { - return - } - switch rhs := assign.Rhs[0].(type) { - case *ast.IndexExpr: - // The type-checker should make sure that it's a map, but - // let's be safe. - if _, ok := j.Program.Info.TypeOf(rhs.X).Underlying().(*types.Map); !ok { - return - } - case *ast.UnaryExpr: - if rhs.Op != token.ARROW { - return - } - default: - return - } - cp := *assign - cp.Lhs = cp.Lhs[0:1] - j.Errorf(assign, "should write %s instead of %s", j.Render(&cp), j.Render(assign)) - } - - fn2 := func(node ast.Node) { - stmt, ok := node.(*ast.AssignStmt) - if !ok { - return - } - if len(stmt.Lhs) != len(stmt.Rhs) { - return - } - for i, lh := range stmt.Lhs { - rh := stmt.Rhs[i] - if !lint.IsBlank(lh) { - continue - } - expr, ok := rh.(*ast.UnaryExpr) - if !ok { - continue - } - if expr.Op != token.ARROW { - continue - } - j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") - } - } - - fn3 := func(node ast.Node) { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return - } - if lint.IsBlank(rs.Key) && (rs.Value == nil || lint.IsBlank(rs.Value)) { - j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") - } - } - - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - if j.IsGoVersion(4) { - fn3(node) - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintSimplerStructConversion(j *lint.Job) { - var skip ast.Node - fn := func(node ast.Node) bool { - // Do not suggest type conversion between pointers - if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND { - if lit, ok := unary.X.(*ast.CompositeLit); ok { - skip = lit - } - return true - } - - if node == skip { - return true - } - - lit, ok := node.(*ast.CompositeLit) - if !ok { - return true - } - typ1, _ := j.Program.Info.TypeOf(lit.Type).(*types.Named) - if typ1 == nil { - return true - } - s1, ok := typ1.Underlying().(*types.Struct) - if !ok { - return true - } - - var typ2 *types.Named - var ident *ast.Ident - getSelType := func(expr ast.Expr) (types.Type, *ast.Ident, bool) { - sel, ok := expr.(*ast.SelectorExpr) - if !ok { - return nil, nil, false - } - ident, ok := sel.X.(*ast.Ident) - if !ok { - return nil, nil, false - } - typ := j.Program.Info.TypeOf(sel.X) - return typ, ident, typ != nil - } - if len(lit.Elts) == 0 { - return true - } - if s1.NumFields() != len(lit.Elts) { - return true - } - for i, elt := range lit.Elts { - var t types.Type - var id *ast.Ident - var ok bool - switch elt := elt.(type) { - case *ast.SelectorExpr: - t, id, ok = getSelType(elt) - if !ok { - return true - } - if i >= s1.NumFields() || s1.Field(i).Name() != elt.Sel.Name { - return true - } - case *ast.KeyValueExpr: - var sel *ast.SelectorExpr - sel, ok = elt.Value.(*ast.SelectorExpr) - if !ok { - return true - } - - if elt.Key.(*ast.Ident).Name != sel.Sel.Name { - return true - } - t, id, ok = getSelType(elt.Value) - } - if !ok { - return true - } - // All fields must be initialized from the same object - if ident != nil && ident.Obj != id.Obj { - return true - } - typ2, _ = t.(*types.Named) - if typ2 == nil { - return true - } - ident = id - } - - if typ2 == nil { - return true - } - - if typ1.Obj().Pkg() != typ2.Obj().Pkg() { - // Do not suggest type conversions between different - // packages. Types in different packages might only match - // by coincidence. Furthermore, if the dependency ever - // adds more fields to its type, it could break the code - // that relies on the type conversion to work. - return true - } - - s2, ok := typ2.Underlying().(*types.Struct) - if !ok { - return true - } - if typ1 == typ2 { - return true - } - if !structsIdentical(s1, s2) { - return true - } - j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", - ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintTrim(j *lint.Job) { - sameNonDynamic := func(node1, node2 ast.Node) bool { - if reflect.TypeOf(node1) != reflect.TypeOf(node2) { - return false - } - - switch node1 := node1.(type) { - case *ast.Ident: - return node1.Obj == node2.(*ast.Ident).Obj - case *ast.SelectorExpr: - return j.Render(node1) == j.Render(node2) - case *ast.IndexExpr: - return j.Render(node1) == j.Render(node2) - } - return false - } - - isLenOnIdent := func(fn ast.Expr, ident ast.Expr) bool { - call, ok := fn.(*ast.CallExpr) - if !ok { - return false - } - if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "len" { - return false - } - if len(call.Args) != 1 { - return false - } - return sameNonDynamic(call.Args[0], ident) - } - - fn := func(node ast.Node) bool { - var pkg string - var fun string - - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - if ifstmt.Init != nil { - return true - } - if ifstmt.Else != nil { - return true - } - if len(ifstmt.Body.List) != 1 { - return true - } - condCall, ok := ifstmt.Cond.(*ast.CallExpr) - if !ok { - return true - } - call, ok := condCall.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if lint.IsIdent(call.X, "strings") { - pkg = "strings" - } else if lint.IsIdent(call.X, "bytes") { - pkg = "bytes" - } else { - return true - } - if lint.IsIdent(call.Sel, "HasPrefix") { - fun = "HasPrefix" - } else if lint.IsIdent(call.Sel, "HasSuffix") { - fun = "HasSuffix" - } else { - return true - } - - assign, ok := ifstmt.Body.List[0].(*ast.AssignStmt) - if !ok { - return true - } - if assign.Tok != token.ASSIGN { - return true - } - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true - } - if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { - return true - } - slice, ok := assign.Rhs[0].(*ast.SliceExpr) - if !ok { - return true - } - if slice.Slice3 { - return true - } - if !sameNonDynamic(slice.X, condCall.Args[0]) { - return true - } - var index ast.Expr - switch fun { - case "HasPrefix": - // TODO(dh) We could detect a High that is len(s), but another - // rule will already flag that, anyway. - if slice.High != nil { - return true - } - index = slice.Low - case "HasSuffix": - if slice.Low != nil { - n, ok := j.ExprToInt(slice.Low) - if !ok || n != 0 { - return true - } - } - index = slice.High - } - - switch index := index.(type) { - case *ast.CallExpr: - if fun != "HasPrefix" { - return true - } - if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { - return true - } - if len(index.Args) != 1 { - return true - } - id3 := index.Args[0] - switch oid3 := condCall.Args[1].(type) { - case *ast.BasicLit: - if pkg != "strings" { - return false - } - lit, ok := id3.(*ast.BasicLit) - if !ok { - return true - } - s1, ok1 := j.ExprToString(lit) - s2, ok2 := j.ExprToString(condCall.Args[1]) - if !ok1 || !ok2 || s1 != s2 { - return true - } - default: - if !sameNonDynamic(id3, oid3) { - return true - } - } - case *ast.BasicLit, *ast.Ident: - if fun != "HasPrefix" { - return true - } - if pkg != "strings" { - return true - } - string, ok1 := j.ExprToString(condCall.Args[1]) - int, ok2 := j.ExprToInt(slice.Low) - if !ok1 || !ok2 || int != int64(len(string)) { - return true - } - case *ast.BinaryExpr: - if fun != "HasSuffix" { - return true - } - if index.Op != token.SUB { - return true - } - if !isLenOnIdent(index.X, condCall.Args[0]) || - !isLenOnIdent(index.Y, condCall.Args[1]) { - return true - } - default: - return true - } - - var replacement string - switch fun { - case "HasPrefix": - replacement = "TrimPrefix" - case "HasSuffix": - replacement = "TrimSuffix" - } - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintLoopSlide(j *lint.Job) { - // TODO(dh): detect bs[i+offset] in addition to bs[offset+i] - // TODO(dh): consider merging this function with LintLoopCopy - // TODO(dh): detect length that is an expression, not a variable name - // TODO(dh): support sliding to a different offset than the beginning of the slice - - fn := func(node ast.Node) bool { - /* - for i := 0; i < n; i++ { - bs[i] = bs[offset+i] - } - - ↓ - - copy(bs[:n], bs[offset:offset+n]) - */ - - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { - return true - } - assign, ok := loop.Init.(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !lint.IsZero(assign.Rhs[0]) { - return true - } - initvar, ok := assign.Lhs[0].(*ast.Ident) - if !ok { - return true - } - post, ok := loop.Post.(*ast.IncDecStmt) - if !ok || post.Tok != token.INC { - return true - } - postvar, ok := post.X.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(postvar) != j.Program.Info.ObjectOf(initvar) { - return true - } - bin, ok := loop.Cond.(*ast.BinaryExpr) - if !ok || bin.Op != token.LSS { - return true - } - binx, ok := bin.X.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(binx) != j.Program.Info.ObjectOf(initvar) { - return true - } - biny, ok := bin.Y.(*ast.Ident) - if !ok { - return true - } - - assign, ok = loop.Body.List[0].(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN { - return true - } - lhs, ok := assign.Lhs[0].(*ast.IndexExpr) - if !ok { - return true - } - rhs, ok := assign.Rhs[0].(*ast.IndexExpr) - if !ok { - return true - } - - bs1, ok := lhs.X.(*ast.Ident) - if !ok { - return true - } - bs2, ok := rhs.X.(*ast.Ident) - if !ok { - return true - } - obj1 := j.Program.Info.ObjectOf(bs1) - obj2 := j.Program.Info.ObjectOf(bs2) - if obj1 != obj2 { - return true - } - if _, ok := obj1.Type().Underlying().(*types.Slice); !ok { - return true - } - - index1, ok := lhs.Index.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(index1) != j.Program.Info.ObjectOf(initvar) { - return true - } - index2, ok := rhs.Index.(*ast.BinaryExpr) - if !ok || index2.Op != token.ADD { - return true - } - add1, ok := index2.X.(*ast.Ident) - if !ok { - return true - } - add2, ok := index2.Y.(*ast.Ident) - if !ok || j.Program.Info.ObjectOf(add2) != j.Program.Info.ObjectOf(initvar) { - return true - } - - j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", j.Render(bs1), j.Render(biny), j.Render(bs1), j.Render(add1)) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintMakeLenCap(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { - // FIXME check whether make is indeed the built-in function - return true - } - switch len(call.Args) { - case 2: - // make(T, len) - if _, ok := j.Program.Info.TypeOf(call.Args[0]).Underlying().(*types.Slice); ok { - break - } - if lint.IsZero(call.Args[1]) { - j.Errorf(call.Args[1], "should use make(%s) instead", j.Render(call.Args[0])) - } - case 3: - // make(T, len, cap) - if j.Render(call.Args[1]) == j.Render(call.Args[2]) { - j.Errorf(call.Args[1], "should use make(%s, %s) instead", j.Render(call.Args[0]), j.Render(call.Args[1])) - } - } - return false - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintAssertNotNil(j *lint.Job) { - isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool { - xbinop, ok := expr.(*ast.BinaryExpr) - if !ok || xbinop.Op != token.NEQ { - return false - } - xident, ok := xbinop.X.(*ast.Ident) - if !ok || xident.Obj != ident.Obj { - return false - } - if !j.IsNil(xbinop.Y) { - return false - } - return true - } - isOKCheck := func(ident *ast.Ident, expr ast.Expr) bool { - yident, ok := expr.(*ast.Ident) - if !ok || yident.Obj != ident.Obj { - return false - } - return true - } - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - assign, ok := ifstmt.Init.(*ast.AssignStmt) - if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !lint.IsBlank(assign.Lhs[0]) { - return true - } - assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) - if !ok { - return true - } - binop, ok := ifstmt.Cond.(*ast.BinaryExpr) - if !ok || binop.Op != token.LAND { - return true - } - assertIdent, ok := assert.X.(*ast.Ident) - if !ok { - return true - } - assignIdent, ok := assign.Lhs[1].(*ast.Ident) - if !ok { - return true - } - if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) && - !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { - return true - } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", j.Render(assignIdent), j.Render(assertIdent)) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintDeclareAssign(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } - if len(block.List) < 2 { - return true - } - for i, stmt := range block.List[:len(block.List)-1] { - _ = i - decl, ok := stmt.(*ast.DeclStmt) - if !ok { - continue - } - gdecl, ok := decl.Decl.(*ast.GenDecl) - if !ok || gdecl.Tok != token.VAR || len(gdecl.Specs) != 1 { - continue - } - vspec, ok := gdecl.Specs[0].(*ast.ValueSpec) - if !ok || len(vspec.Names) != 1 || len(vspec.Values) != 0 { - continue - } - - assign, ok := block.List[i+1].(*ast.AssignStmt) - if !ok || assign.Tok != token.ASSIGN { - continue - } - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - continue - } - ident, ok := assign.Lhs[0].(*ast.Ident) - if !ok { - continue - } - if vspec.Names[0].Obj != ident.Obj { - continue - } - - if refersTo(j.Program.Info, assign.Rhs[0], ident) { - continue - } - j.Errorf(decl, "should merge variable declaration with assignment on next line") - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintRedundantBreak(j *lint.Job) { - fn1 := func(node ast.Node) { - clause, ok := node.(*ast.CaseClause) - if !ok { - return - } - if len(clause.Body) < 2 { - return - } - branch, ok := clause.Body[len(clause.Body)-1].(*ast.BranchStmt) - if !ok || branch.Tok != token.BREAK || branch.Label != nil { - return - } - j.Errorf(branch, "redundant break statement") - return - } - fn2 := func(node ast.Node) { - var ret *ast.FieldList - var body *ast.BlockStmt - switch x := node.(type) { - case *ast.FuncDecl: - ret = x.Type.Results - body = x.Body - case *ast.FuncLit: - ret = x.Type.Results - body = x.Body - default: - return - } - // if the func has results, a return can't be redundant. - // similarly, if there are no statements, there can be - // no return. - if ret != nil || body == nil || len(body.List) < 1 { - return - } - rst, ok := body.List[len(body.List)-1].(*ast.ReturnStmt) - if !ok { - return - } - // we don't need to check rst.Results as we already - // checked x.Type.Results to be nil. - j.Errorf(rst, "redundant return statement") - } - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { - // OPT(dh): we can cache the type lookup - idx := strings.IndexRune(iface, '.') - var scope *types.Scope - var ifaceName string - if idx == -1 { - scope = types.Universe - ifaceName = iface - } else { - pkgName := iface[:idx] - pkg := j.Program.Prog.Package(pkgName) - if pkg == nil { - return false - } - scope = pkg.Pkg.Scope() - ifaceName = iface[idx+1:] - } - - obj := scope.Lookup(ifaceName) - if obj == nil { - return false - } - i, ok := obj.Type().Underlying().(*types.Interface) - if !ok { - return false - } - return types.Implements(typ, i) -} - -func (c *Checker) LintRedundantSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "fmt.Sprintf") { - return true - } - if len(call.Args) != 2 { - return true - } - if s, ok := j.ExprToString(call.Args[0]); !ok || s != "%s" { - return true - } - pkg := j.NodePackage(call) - arg := call.Args[1] - typ := pkg.Info.TypeOf(arg) - - if c.Implements(j, typ, "fmt.Stringer") { - j.Errorf(call, "should use String() instead of fmt.Sprintf") - return true - } - - if typ.Underlying() == types.Universe.Lookup("string").Type() { - if typ == types.Universe.Lookup("string").Type() { - j.Errorf(call, "the argument is already a string, there's no need to use fmt.Sprintf") - } else { - j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") - } - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { - if !j.IsCallToAST(node, "errors.New") { - return true - } - call := node.(*ast.CallExpr) - if !j.IsCallToAST(call.Args[0], "fmt.Sprintf") { - return true - } - j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) LintRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(c.nodeFns, j) -} - -func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - - cond, ok := ifstmt.Cond.(*ast.BinaryExpr) - if !ok { - return true - } - - if cond.Op != token.NEQ || !j.IsNil(cond.Y) || len(ifstmt.Body.List) != 1 { - return true - } - - loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt) - if !ok { - return true - } - ifXIdent, ok := cond.X.(*ast.Ident) - if !ok { - return true - } - rangeXIdent, ok := loop.X.(*ast.Ident) - if !ok { - return true - } - if ifXIdent.Obj != rangeXIdent.Obj { - return true - } - switch j.Program.Info.TypeOf(rangeXIdent).(type) { - case *types.Slice, *types.Map: - j.Errorf(node, "unnecessary nil check around range") - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} diff --git a/vendor/honnef.co/go/tools/simple/lint17.go b/vendor/honnef.co/go/tools/simple/lint17.go deleted file mode 100644 index 53f529c..0000000 --- a/vendor/honnef.co/go/tools/simple/lint17.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !go1.8 - -package simple - -import "go/types" - -var structsIdentical = types.Identical diff --git a/vendor/honnef.co/go/tools/simple/lint18.go b/vendor/honnef.co/go/tools/simple/lint18.go deleted file mode 100644 index ab9ea72..0000000 --- a/vendor/honnef.co/go/tools/simple/lint18.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build go1.8 - -package simple - -import "go/types" - -var structsIdentical = types.IdenticalIgnoreTags diff --git a/vendor/honnef.co/go/tools/ssa/LICENSE b/vendor/honnef.co/go/tools/ssa/LICENSE deleted file mode 100644 index aee4804..0000000 --- a/vendor/honnef.co/go/tools/ssa/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. -Copyright (c) 2016 Dominik Honnef. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/ssa/blockopt.go b/vendor/honnef.co/go/tools/ssa/blockopt.go deleted file mode 100644 index 22c9a4c..0000000 --- a/vendor/honnef.co/go/tools/ssa/blockopt.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// Simple block optimizations to simplify the control flow graph. - -// TODO(adonovan): opt: instead of creating several "unreachable" blocks -// per function in the Builder, reuse a single one (e.g. at Blocks[1]) -// to reduce garbage. - -import ( - "fmt" - "os" -) - -// If true, perform sanity checking and show progress at each -// successive iteration of optimizeBlocks. Very verbose. -const debugBlockOpt = false - -// markReachable sets Index=-1 for all blocks reachable from b. -func markReachable(b *BasicBlock) { - b.Index = -1 - for _, succ := range b.Succs { - if succ.Index == 0 { - markReachable(succ) - } - } -} - -func DeleteUnreachableBlocks(f *Function) { - deleteUnreachableBlocks(f) -} - -// deleteUnreachableBlocks marks all reachable blocks of f and -// eliminates (nils) all others, including possibly cyclic subgraphs. -// -func deleteUnreachableBlocks(f *Function) { - const white, black = 0, -1 - // We borrow b.Index temporarily as the mark bit. - for _, b := range f.Blocks { - b.Index = white - } - markReachable(f.Blocks[0]) - if f.Recover != nil { - markReachable(f.Recover) - } - for i, b := range f.Blocks { - if b.Index == white { - for _, c := range b.Succs { - if c.Index == black { - c.removePred(b) // delete white->black edge - } - } - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "unreachable", b) - } - f.Blocks[i] = nil // delete b - } - } - f.removeNilBlocks() -} - -// jumpThreading attempts to apply simple jump-threading to block b, -// in which a->b->c become a->c if b is just a Jump. -// The result is true if the optimization was applied. -// -func jumpThreading(f *Function, b *BasicBlock) bool { - if b.Index == 0 { - return false // don't apply to entry block - } - if b.Instrs == nil { - return false - } - if _, ok := b.Instrs[0].(*Jump); !ok { - return false // not just a jump - } - c := b.Succs[0] - if c == b { - return false // don't apply to degenerate jump-to-self. - } - if c.hasPhi() { - return false // not sound without more effort - } - for j, a := range b.Preds { - a.replaceSucc(b, c) - - // If a now has two edges to c, replace its degenerate If by Jump. - if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { - jump := new(Jump) - jump.setBlock(a) - a.Instrs[len(a.Instrs)-1] = jump - a.Succs = a.Succs[:1] - c.removePred(b) - } else { - if j == 0 { - c.replacePred(b, a) - } else { - c.Preds = append(c.Preds, a) - } - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) - } - } - f.Blocks[b.Index] = nil // delete b - return true -} - -// fuseBlocks attempts to apply the block fusion optimization to block -// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. -// The result is true if the optimization was applied. -// -func fuseBlocks(f *Function, a *BasicBlock) bool { - if len(a.Succs) != 1 { - return false - } - b := a.Succs[0] - if len(b.Preds) != 1 { - return false - } - - // Degenerate &&/|| ops may result in a straight-line CFG - // containing φ-nodes. (Ideally we'd replace such them with - // their sole operand but that requires Referrers, built later.) - if b.hasPhi() { - return false // not sound without further effort - } - - // Eliminate jump at end of A, then copy all of B across. - a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) - for _, instr := range b.Instrs { - instr.setBlock(a) - } - - // A inherits B's successors - a.Succs = append(a.succs2[:0], b.Succs...) - - // Fix up Preds links of all successors of B. - for _, c := range b.Succs { - c.replacePred(b, a) - } - - if debugBlockOpt { - fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) - } - - f.Blocks[b.Index] = nil // delete b - return true -} - -func OptimizeBlocks(f *Function) { - optimizeBlocks(f) -} - -// optimizeBlocks() performs some simple block optimizations on a -// completed function: dead block elimination, block fusion, jump -// threading. -// -func optimizeBlocks(f *Function) { - deleteUnreachableBlocks(f) - - // Loop until no further progress. - changed := true - for changed { - changed = false - - if debugBlockOpt { - f.WriteTo(os.Stderr) - mustSanityCheck(f, nil) - } - - for _, b := range f.Blocks { - // f.Blocks will temporarily contain nils to indicate - // deleted blocks; we remove them at the end. - if b == nil { - continue - } - - // Fuse blocks. b->c becomes bc. - if fuseBlocks(f, b) { - changed = true - } - - // a->b->c becomes a->c if b contains only a Jump. - if jumpThreading(f, b) { - changed = true - continue // (b was disconnected) - } - } - } - f.removeNilBlocks() -} diff --git a/vendor/honnef.co/go/tools/ssa/builder.go b/vendor/honnef.co/go/tools/ssa/builder.go deleted file mode 100644 index bfb7a2b..0000000 --- a/vendor/honnef.co/go/tools/ssa/builder.go +++ /dev/null @@ -1,2383 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file implements the BUILD phase of SSA construction. -// -// SSA construction has two phases, CREATE and BUILD. In the CREATE phase -// (create.go), all packages are constructed and type-checked and -// definitions of all package members are created, method-sets are -// computed, and wrapper methods are synthesized. -// ssa.Packages are created in arbitrary order. -// -// In the BUILD phase (builder.go), the builder traverses the AST of -// each Go source function and generates SSA instructions for the -// function body. Initializer expressions for package-level variables -// are emitted to the package's init() function in the order specified -// by go/types.Info.InitOrder, then code for each function in the -// package is generated in lexical order. -// The BUILD phases for distinct packages are independent and are -// executed in parallel. -// -// TODO(adonovan): indeed, building functions is now embarrassingly parallel. -// Audit for concurrency then benchmark using more goroutines. -// -// The builder's and Program's indices (maps) are populated and -// mutated during the CREATE phase, but during the BUILD phase they -// remain constant. The sole exception is Prog.methodSets and its -// related maps, which are protected by a dedicated mutex. - -import ( - "fmt" - "go/ast" - exact "go/constant" - "go/token" - "go/types" - "os" - "sync" -) - -type opaqueType struct { - types.Type - name string -} - -func (t *opaqueType) String() string { return t.name } - -var ( - varOk = newVar("ok", tBool) - varIndex = newVar("index", tInt) - - // Type constants. - tBool = types.Typ[types.Bool] - tByte = types.Typ[types.Byte] - tInt = types.Typ[types.Int] - tInvalid = types.Typ[types.Invalid] - tString = types.Typ[types.String] - tUntypedNil = types.Typ[types.UntypedNil] - tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators - tEface = new(types.Interface) - - // SSA Value constants. - vZero = intConst(0) - vOne = intConst(1) - vTrue = NewConst(exact.MakeBool(true), tBool) -) - -// builder holds state associated with the package currently being built. -// Its methods contain all the logic for AST-to-SSA conversion. -type builder struct{} - -// cond emits to fn code to evaluate boolean condition e and jump -// to t or f depending on its value, performing various simplifications. -// -// Postcondition: fn.currentBlock is nil. -// -func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { - switch e := e.(type) { - case *ast.ParenExpr: - b.cond(fn, e.X, t, f) - return - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND: - ltrue := fn.newBasicBlock("cond.true") - b.cond(fn, e.X, ltrue, f) - fn.currentBlock = ltrue - b.cond(fn, e.Y, t, f) - return - - case token.LOR: - lfalse := fn.newBasicBlock("cond.false") - b.cond(fn, e.X, t, lfalse) - fn.currentBlock = lfalse - b.cond(fn, e.Y, t, f) - return - } - - case *ast.UnaryExpr: - if e.Op == token.NOT { - b.cond(fn, e.X, f, t) - return - } - } - - // A traditional compiler would simplify "if false" (etc) here - // but we do not, for better fidelity to the source code. - // - // The value of a constant condition may be platform-specific, - // and may cause blocks that are reachable in some configuration - // to be hidden from subsequent analyses such as bug-finding tools. - emitIf(fn, b.expr(fn, e), t, f) -} - -// logicalBinop emits code to fn to evaluate e, a &&- or -// ||-expression whose reified boolean value is wanted. -// The value is returned. -// -func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { - rhs := fn.newBasicBlock("binop.rhs") - done := fn.newBasicBlock("binop.done") - - // T(e) = T(e.X) = T(e.Y) after untyped constants have been - // eliminated. - // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. - t := fn.Pkg.typeOf(e) - - var short Value // value of the short-circuit path - switch e.Op { - case token.LAND: - b.cond(fn, e.X, rhs, done) - short = NewConst(exact.MakeBool(false), t) - - case token.LOR: - b.cond(fn, e.X, done, rhs) - short = NewConst(exact.MakeBool(true), t) - } - - // Is rhs unreachable? - if rhs.Preds == nil { - // Simplify false&&y to false, true||y to true. - fn.currentBlock = done - return short - } - - // Is done unreachable? - if done.Preds == nil { - // Simplify true&&y (or false||y) to y. - fn.currentBlock = rhs - return b.expr(fn, e.Y) - } - - // All edges from e.X to done carry the short-circuit value. - var edges []Value - for _ = range done.Preds { - edges = append(edges, short) - } - - // The edge from e.Y to done carries the value of e.Y. - fn.currentBlock = rhs - edges = append(edges, b.expr(fn, e.Y)) - emitJump(fn, done) - fn.currentBlock = done - - phi := &Phi{Edges: edges, Comment: e.Op.String()} - phi.pos = e.OpPos - phi.typ = t - return done.emit(phi) -} - -// exprN lowers a multi-result expression e to SSA form, emitting code -// to fn and returning a single Value whose type is a *types.Tuple. -// The caller must access the components via Extract. -// -// Multi-result expressions include CallExprs in a multi-value -// assignment or return statement, and "value,ok" uses of -// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op -// is token.ARROW). -// -func (b *builder) exprN(fn *Function, e ast.Expr) Value { - typ := fn.Pkg.typeOf(e).(*types.Tuple) - switch e := e.(type) { - case *ast.ParenExpr: - return b.exprN(fn, e.X) - - case *ast.CallExpr: - // Currently, no built-in function nor type conversion - // has multiple results, so we can avoid some of the - // cases for single-valued CallExpr. - var c Call - b.setCall(fn, e, &c.Call) - c.typ = typ - return fn.emit(&c) - - case *ast.IndexExpr: - mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) - lookup := &Lookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), - CommaOk: true, - } - lookup.setType(typ) - lookup.setPos(e.Lbrack) - return fn.emit(lookup) - - case *ast.TypeAssertExpr: - return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) - - case *ast.UnaryExpr: // must be receive <- - unop := &UnOp{ - Op: token.ARROW, - X: b.expr(fn, e.X), - CommaOk: true, - } - unop.setType(typ) - unop.setPos(e.OpPos) - return fn.emit(unop) - } - panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) -} - -// builtin emits to fn SSA instructions to implement a call to the -// built-in function obj with the specified arguments -// and return type. It returns the value defined by the result. -// -// The result is nil if no special handling was required; in this case -// the caller should treat this like an ordinary library function -// call. -// -func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { - switch obj.Name() { - case "make": - switch typ.Underlying().(type) { - case *types.Slice: - n := b.expr(fn, args[1]) - m := n - if len(args) == 3 { - m = b.expr(fn, args[2]) - } - if m, ok := m.(*Const); ok { - // treat make([]T, n, m) as new([m]T)[:n] - cap := m.Int64() - at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) - alloc := emitNew(fn, at, pos) - alloc.Comment = "makeslice" - v := &Slice{ - X: alloc, - High: n, - } - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - } - v := &MakeSlice{ - Len: n, - Cap: m, - } - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - - case *types.Map: - var res Value - if len(args) == 2 { - res = b.expr(fn, args[1]) - } - v := &MakeMap{Reserve: res} - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - - case *types.Chan: - var sz Value = vZero - if len(args) == 2 { - sz = b.expr(fn, args[1]) - } - v := &MakeChan{Size: sz} - v.setPos(pos) - v.setType(typ) - return fn.emit(v) - } - - case "new": - alloc := emitNew(fn, deref(typ), pos) - alloc.Comment = "new" - return alloc - - case "len", "cap": - // Special case: len or cap of an array or *array is - // based on the type, not the value which may be nil. - // We must still evaluate the value, though. (If it - // was side-effect free, the whole call would have - // been constant-folded.) - t := deref(fn.Pkg.typeOf(args[0])).Underlying() - if at, ok := t.(*types.Array); ok { - b.expr(fn, args[0]) // for effects only - return intConst(at.Len()) - } - // Otherwise treat as normal. - - case "panic": - fn.emit(&Panic{ - X: emitConv(fn, b.expr(fn, args[0]), tEface), - pos: pos, - }) - fn.currentBlock = fn.newBasicBlock("unreachable") - return vTrue // any non-nil Value will do - } - return nil // treat all others as a regular function call -} - -// addr lowers a single-result addressable expression e to SSA form, -// emitting code to fn and returning the location (an lvalue) defined -// by the expression. -// -// If escaping is true, addr marks the base variable of the -// addressable expression e as being a potentially escaping pointer -// value. For example, in this code: -// -// a := A{ -// b: [1]B{B{c: 1}} -// } -// return &a.b[0].c -// -// the application of & causes a.b[0].c to have its address taken, -// which means that ultimately the local variable a must be -// heap-allocated. This is a simple but very conservative escape -// analysis. -// -// Operations forming potentially escaping pointers include: -// - &x, including when implicit in method call or composite literals. -// - a[:] iff a is an array (not *array) -// - references to variables in lexically enclosing functions. -// -func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { - switch e := e.(type) { - case *ast.Ident: - if isBlankIdent(e) { - return blank{} - } - obj := fn.Pkg.objectOf(e) - v := fn.Prog.packageLevelValue(obj) // var (address) - if v == nil { - v = fn.lookup(obj, escaping) - } - return &address{addr: v, pos: e.Pos(), expr: e} - - case *ast.CompositeLit: - t := deref(fn.Pkg.typeOf(e)) - var v *Alloc - if escaping { - v = emitNew(fn, t, e.Lbrace) - } else { - v = fn.addLocal(t, e.Lbrace) - } - v.Comment = "complit" - var sb storebuf - b.compLit(fn, v, e, true, &sb) - sb.emit(fn) - return &address{addr: v, pos: e.Lbrace, expr: e} - - case *ast.ParenExpr: - return b.addr(fn, e.X, escaping) - - case *ast.SelectorExpr: - sel, ok := fn.Pkg.info.Selections[e] - if !ok { - // qualified identifier - return b.addr(fn, e.Sel, escaping) - } - if sel.Kind() != types.FieldVal { - panic(sel) - } - wantAddr := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel) - last := len(sel.Index()) - 1 - return &address{ - addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), - pos: e.Sel.Pos(), - expr: e.Sel, - } - - case *ast.IndexExpr: - var x Value - var et types.Type - switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { - case *types.Array: - x = b.addr(fn, e.X, escaping).address(fn) - et = types.NewPointer(t.Elem()) - case *types.Pointer: // *array - x = b.expr(fn, e.X) - et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) - case *types.Slice: - x = b.expr(fn, e.X) - et = types.NewPointer(t.Elem()) - case *types.Map: - return &element{ - m: b.expr(fn, e.X), - k: emitConv(fn, b.expr(fn, e.Index), t.Key()), - t: t.Elem(), - pos: e.Lbrack, - } - default: - panic("unexpected container type in IndexExpr: " + t.String()) - } - v := &IndexAddr{ - X: x, - Index: emitConv(fn, b.expr(fn, e.Index), tInt), - } - v.setPos(e.Lbrack) - v.setType(et) - return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} - - case *ast.StarExpr: - return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} - } - - panic(fmt.Sprintf("unexpected address expression: %T", e)) -} - -type store struct { - lhs lvalue - rhs Value -} - -type storebuf struct{ stores []store } - -func (sb *storebuf) store(lhs lvalue, rhs Value) { - sb.stores = append(sb.stores, store{lhs, rhs}) -} - -func (sb *storebuf) emit(fn *Function) { - for _, s := range sb.stores { - s.lhs.store(fn, s.rhs) - } -} - -// assign emits to fn code to initialize the lvalue loc with the value -// of expression e. If isZero is true, assign assumes that loc holds -// the zero value for its type. -// -// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate -// better code in some cases, e.g., for composite literals in an -// addressable location. -// -// If sb is not nil, assign generates code to evaluate expression e, but -// not to update loc. Instead, the necessary stores are appended to the -// storebuf sb so that they can be executed later. This allows correct -// in-place update of existing variables when the RHS is a composite -// literal that may reference parts of the LHS. -// -func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { - // Can we initialize it in place? - if e, ok := unparen(e).(*ast.CompositeLit); ok { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - if _, ok := loc.(blank); !ok { // avoid calling blank.typ() - if isPointer(loc.typ()) { - ptr := b.addr(fn, e, true).address(fn) - // copy address - if sb != nil { - sb.store(loc, ptr) - } else { - loc.store(fn, ptr) - } - return - } - } - - if _, ok := loc.(*address); ok { - if isInterface(loc.typ()) { - // e.g. var x interface{} = T{...} - // Can't in-place initialize an interface value. - // Fall back to copying. - } else { - // x = T{...} or x := T{...} - addr := loc.address(fn) - if sb != nil { - b.compLit(fn, addr, e, isZero, sb) - } else { - var sb storebuf - b.compLit(fn, addr, e, isZero, &sb) - sb.emit(fn) - } - - // Subtle: emit debug ref for aggregate types only; - // slice and map are handled by store ops in compLit. - switch loc.typ().Underlying().(type) { - case *types.Struct, *types.Array: - emitDebugRef(fn, e, addr, true) - } - - return - } - } - } - - // simple case: just copy - rhs := b.expr(fn, e) - if sb != nil { - sb.store(loc, rhs) - } else { - loc.store(fn, rhs) - } -} - -// expr lowers a single-result expression e to SSA form, emitting code -// to fn and returning the Value defined by the expression. -// -func (b *builder) expr(fn *Function, e ast.Expr) Value { - e = unparen(e) - - tv := fn.Pkg.info.Types[e] - - // Is expression a constant? - if tv.Value != nil { - return NewConst(tv.Value, tv.Type) - } - - var v Value - if tv.Addressable() { - // Prefer pointer arithmetic ({Index,Field}Addr) followed - // by Load over subelement extraction (e.g. Index, Field), - // to avoid large copies. - v = b.addr(fn, e, false).load(fn) - } else { - v = b.expr0(fn, e, tv) - } - if fn.debugInfo() { - emitDebugRef(fn, e, v, false) - } - return v -} - -func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { - switch e := e.(type) { - case *ast.BasicLit: - panic("non-constant BasicLit") // unreachable - - case *ast.FuncLit: - fn2 := &Function{ - name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), - Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), - pos: e.Type.Func, - parent: fn, - Pkg: fn.Pkg, - Prog: fn.Prog, - syntax: e, - } - fn.AnonFuncs = append(fn.AnonFuncs, fn2) - b.buildFunction(fn2) - if fn2.FreeVars == nil { - return fn2 - } - v := &MakeClosure{Fn: fn2} - v.setType(tv.Type) - for _, fv := range fn2.FreeVars { - v.Bindings = append(v.Bindings, fv.outer) - fv.outer = nil - } - return fn.emit(v) - - case *ast.TypeAssertExpr: // single-result form only - return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) - - case *ast.CallExpr: - if fn.Pkg.info.Types[e.Fun].IsType() { - // Explicit type conversion, e.g. string(x) or big.Int(x) - x := b.expr(fn, e.Args[0]) - y := emitConv(fn, x, tv.Type) - if y != x { - switch y := y.(type) { - case *Convert: - y.pos = e.Lparen - case *ChangeType: - y.pos = e.Lparen - case *MakeInterface: - y.pos = e.Lparen - } - } - return y - } - // Call to "intrinsic" built-ins, e.g. new, make, panic. - if id, ok := unparen(e.Fun).(*ast.Ident); ok { - if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { - if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { - return v - } - } - } - // Regular function call. - var v Call - b.setCall(fn, e, &v.Call) - v.setType(tv.Type) - return fn.emit(&v) - - case *ast.UnaryExpr: - switch e.Op { - case token.AND: // &X --- potentially escaping. - addr := b.addr(fn, e.X, true) - if _, ok := unparen(e.X).(*ast.StarExpr); ok { - // &*p must panic if p is nil (http://golang.org/s/go12nil). - // For simplicity, we'll just (suboptimally) rely - // on the side effects of a load. - // TODO(adonovan): emit dedicated nilcheck. - addr.load(fn) - } - return addr.address(fn) - case token.ADD: - return b.expr(fn, e.X) - case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ - v := &UnOp{ - Op: e.Op, - X: b.expr(fn, e.X), - } - v.setPos(e.OpPos) - v.setType(tv.Type) - return fn.emit(v) - default: - panic(e.Op) - } - - case *ast.BinaryExpr: - switch e.Op { - case token.LAND, token.LOR: - return b.logicalBinop(fn, e) - case token.SHL, token.SHR: - fallthrough - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) - - case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: - cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) - // The type of x==y may be UntypedBool. - return emitConv(fn, cmp, DefaultType(tv.Type)) - default: - panic("illegal op in BinaryExpr: " + e.Op.String()) - } - - case *ast.SliceExpr: - var low, high, max Value - var x Value - switch fn.Pkg.typeOf(e.X).Underlying().(type) { - case *types.Array: - // Potentially escaping. - x = b.addr(fn, e.X, true).address(fn) - case *types.Basic, *types.Slice, *types.Pointer: // *array - x = b.expr(fn, e.X) - default: - panic("unreachable") - } - if e.High != nil { - high = b.expr(fn, e.High) - } - if e.Low != nil { - low = b.expr(fn, e.Low) - } - if e.Slice3 { - max = b.expr(fn, e.Max) - } - v := &Slice{ - X: x, - Low: low, - High: high, - Max: max, - } - v.setPos(e.Lbrack) - v.setType(tv.Type) - return fn.emit(v) - - case *ast.Ident: - obj := fn.Pkg.info.Uses[e] - // Universal built-in or nil? - switch obj := obj.(type) { - case *types.Builtin: - return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} - case *types.Nil: - return nilConst(tv.Type) - } - // Package-level func or var? - if v := fn.Prog.packageLevelValue(obj); v != nil { - if _, ok := obj.(*types.Var); ok { - return emitLoad(fn, v) // var (address) - } - return v // (func) - } - // Local var. - return emitLoad(fn, fn.lookup(obj, false)) // var (address) - - case *ast.SelectorExpr: - sel, ok := fn.Pkg.info.Selections[e] - if !ok { - // qualified identifier - return b.expr(fn, e.Sel) - } - switch sel.Kind() { - case types.MethodExpr: - // (*T).f or T.f, the method f from the method-set of type T. - // The result is a "thunk". - return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) - - case types.MethodVal: - // e.f where e is an expression and f is a method. - // The result is a "bound". - obj := sel.Obj().(*types.Func) - rt := recvType(obj) - wantAddr := isPointer(rt) - escaping := true - v := b.receiver(fn, e.X, wantAddr, escaping, sel) - if isInterface(rt) { - // If v has interface type I, - // we must emit a check that v is non-nil. - // We use: typeassert v.(I). - emitTypeAssert(fn, v, rt, token.NoPos) - } - c := &MakeClosure{ - Fn: makeBound(fn.Prog, obj), - Bindings: []Value{v}, - } - c.setPos(e.Sel.Pos()) - c.setType(tv.Type) - return fn.emit(c) - - case types.FieldVal: - indices := sel.Index() - last := len(indices) - 1 - v := b.expr(fn, e.X) - v = emitImplicitSelections(fn, v, indices[:last]) - v = emitFieldSelection(fn, v, indices[last], false, e.Sel) - return v - } - - panic("unexpected expression-relative selector") - - case *ast.IndexExpr: - switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { - case *types.Array: - // Non-addressable array (in a register). - v := &Index{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), tInt), - } - v.setPos(e.Lbrack) - v.setType(t.Elem()) - return fn.emit(v) - - case *types.Map: - // Maps are not addressable. - mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) - v := &Lookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), - } - v.setPos(e.Lbrack) - v.setType(mapt.Elem()) - return fn.emit(v) - - case *types.Basic: // => string - // Strings are not addressable. - v := &Lookup{ - X: b.expr(fn, e.X), - Index: b.expr(fn, e.Index), - } - v.setPos(e.Lbrack) - v.setType(tByte) - return fn.emit(v) - - case *types.Slice, *types.Pointer: // *array - // Addressable slice/array; use IndexAddr and Load. - return b.addr(fn, e, false).load(fn) - - default: - panic("unexpected container type in IndexExpr: " + t.String()) - } - - case *ast.CompositeLit, *ast.StarExpr: - // Addressable types (lvalues) - return b.addr(fn, e, false).load(fn) - } - - panic(fmt.Sprintf("unexpected expr: %T", e)) -} - -// stmtList emits to fn code for all statements in list. -func (b *builder) stmtList(fn *Function, list []ast.Stmt) { - for _, s := range list { - b.stmt(fn, s) - } -} - -// receiver emits to fn code for expression e in the "receiver" -// position of selection e.f (where f may be a field or a method) and -// returns the effective receiver after applying the implicit field -// selections of sel. -// -// wantAddr requests that the result is an an address. If -// !sel.Indirect(), this may require that e be built in addr() mode; it -// must thus be addressable. -// -// escaping is defined as per builder.addr(). -// -func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { - var v Value - if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { - v = b.addr(fn, e, escaping).address(fn) - } else { - v = b.expr(fn, e) - } - - last := len(sel.Index()) - 1 - v = emitImplicitSelections(fn, v, sel.Index()[:last]) - if !wantAddr && isPointer(v.Type()) { - v = emitLoad(fn, v) - } - return v -} - -// setCallFunc populates the function parts of a CallCommon structure -// (Func, Method, Recv, Args[0]) based on the kind of invocation -// occurring in e. -// -func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { - c.pos = e.Lparen - - // Is this a method call? - if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { - sel, ok := fn.Pkg.info.Selections[selector] - if ok && sel.Kind() == types.MethodVal { - obj := sel.Obj().(*types.Func) - recv := recvType(obj) - wantAddr := isPointer(recv) - escaping := true - v := b.receiver(fn, selector.X, wantAddr, escaping, sel) - if isInterface(recv) { - // Invoke-mode call. - c.Value = v - c.Method = obj - } else { - // "Call"-mode call. - c.Value = fn.Prog.declaredFunc(obj) - c.Args = append(c.Args, v) - } - return - } - - // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): - // a statically dispatched call to the method f in the - // method-set of T or *T. T may be an interface. - // - // e.Fun would evaluate to a concrete method, interface - // wrapper function, or promotion wrapper. - // - // For now, we evaluate it in the usual way. - // - // TODO(adonovan): opt: inline expr() here, to make the - // call static and to avoid generation of wrappers. - // It's somewhat tricky as it may consume the first - // actual parameter if the call is "invoke" mode. - // - // Examples: - // type T struct{}; func (T) f() {} // "call" mode - // type T interface { f() } // "invoke" mode - // - // type S struct{ T } - // - // var s S - // S.f(s) - // (*S).f(&s) - // - // Suggested approach: - // - consume the first actual parameter expression - // and build it with b.expr(). - // - apply implicit field selections. - // - use MethodVal logic to populate fields of c. - } - - // Evaluate the function operand in the usual way. - c.Value = b.expr(fn, e.Fun) -} - -// emitCallArgs emits to f code for the actual parameters of call e to -// a (possibly built-in) function of effective type sig. -// The argument values are appended to args, which is then returned. -// -func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { - // f(x, y, z...): pass slice z straight through. - if e.Ellipsis != 0 { - for i, arg := range e.Args { - v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) - args = append(args, v) - } - return args - } - - offset := len(args) // 1 if call has receiver, 0 otherwise - - // Evaluate actual parameter expressions. - // - // If this is a chained call of the form f(g()) where g has - // multiple return values (MRV), they are flattened out into - // args; a suffix of them may end up in a varargs slice. - for _, arg := range e.Args { - v := b.expr(fn, arg) - if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain - for i, n := 0, ttuple.Len(); i < n; i++ { - args = append(args, emitExtract(fn, v, i)) - } - } else { - args = append(args, v) - } - } - - // Actual->formal assignability conversions for normal parameters. - np := sig.Params().Len() // number of normal parameters - if sig.Variadic() { - np-- - } - for i := 0; i < np; i++ { - args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) - } - - // Actual->formal assignability conversions for variadic parameter, - // and construction of slice. - if sig.Variadic() { - varargs := args[offset+np:] - st := sig.Params().At(np).Type().(*types.Slice) - vt := st.Elem() - if len(varargs) == 0 { - args = append(args, nilConst(st)) - } else { - // Replace a suffix of args with a slice containing it. - at := types.NewArray(vt, int64(len(varargs))) - a := emitNew(fn, at, token.NoPos) - a.setPos(e.Rparen) - a.Comment = "varargs" - for i, arg := range varargs { - iaddr := &IndexAddr{ - X: a, - Index: intConst(int64(i)), - } - iaddr.setType(types.NewPointer(vt)) - fn.emit(iaddr) - emitStore(fn, iaddr, arg, arg.Pos()) - } - s := &Slice{X: a} - s.setType(st) - args[offset+np] = fn.emit(s) - args = args[:offset+np+1] - } - } - return args -} - -// setCall emits to fn code to evaluate all the parameters of a function -// call e, and populates *c with those values. -// -func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { - // First deal with the f(...) part and optional receiver. - b.setCallFunc(fn, e, c) - - // Then append the other actual parameters. - sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) - if sig == nil { - panic(fmt.Sprintf("no signature for call of %s", e.Fun)) - } - c.Args = b.emitCallArgs(fn, sig, e, c.Args) -} - -// assignOp emits to fn code to perform loc += incr or loc -= incr. -func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token, pos token.Pos) { - oldv := loc.load(fn) - loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), pos)) -} - -// localValueSpec emits to fn code to define all of the vars in the -// function-local ValueSpec, spec. -// -func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { - switch { - case len(spec.Values) == len(spec.Names): - // e.g. var x, y = 0, 1 - // 1:1 assignment - for i, id := range spec.Names { - if !isBlankIdent(id) { - fn.addLocalForIdent(id) - } - lval := b.addr(fn, id, false) // non-escaping - b.assign(fn, lval, spec.Values[i], true, nil) - } - - case len(spec.Values) == 0: - // e.g. var x, y int - // Locals are implicitly zero-initialized. - for _, id := range spec.Names { - if !isBlankIdent(id) { - lhs := fn.addLocalForIdent(id) - if fn.debugInfo() { - emitDebugRef(fn, id, lhs, true) - } - } - } - - default: - // e.g. var x, y = pos() - tuple := b.exprN(fn, spec.Values[0]) - for i, id := range spec.Names { - if !isBlankIdent(id) { - fn.addLocalForIdent(id) - lhs := b.addr(fn, id, false) // non-escaping - lhs.store(fn, emitExtract(fn, tuple, i)) - } - } - } -} - -// assignStmt emits code to fn for a parallel assignment of rhss to lhss. -// isDef is true if this is a short variable declaration (:=). -// -// Note the similarity with localValueSpec. -// -func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { - // Side effects of all LHSs and RHSs must occur in left-to-right order. - lvals := make([]lvalue, len(lhss)) - isZero := make([]bool, len(lhss)) - for i, lhs := range lhss { - var lval lvalue = blank{} - if !isBlankIdent(lhs) { - if isDef { - if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { - fn.addNamedLocal(obj) - isZero[i] = true - } - } - lval = b.addr(fn, lhs, false) // non-escaping - } - lvals[i] = lval - } - if len(lhss) == len(rhss) { - // Simple assignment: x = f() (!isDef) - // Parallel assignment: x, y = f(), g() (!isDef) - // or short var decl: x, y := f(), g() (isDef) - // - // In all cases, the RHSs may refer to the LHSs, - // so we need a storebuf. - var sb storebuf - for i := range rhss { - b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) - } - sb.emit(fn) - } else { - // e.g. x, y = pos() - tuple := b.exprN(fn, rhss[0]) - emitDebugRef(fn, rhss[0], tuple, false) - for i, lval := range lvals { - lval.store(fn, emitExtract(fn, tuple, i)) - } - } -} - -// arrayLen returns the length of the array whose composite literal elements are elts. -func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { - var max int64 = -1 - var i int64 = -1 - for _, e := range elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - i = b.expr(fn, kv.Key).(*Const).Int64() - } else { - i++ - } - if i > max { - max = i - } - } - return max + 1 -} - -// compLit emits to fn code to initialize a composite literal e at -// address addr with type typ. -// -// Nested composite literals are recursively initialized in place -// where possible. If isZero is true, compLit assumes that addr -// holds the zero value for typ. -// -// Because the elements of a composite literal may refer to the -// variables being updated, as in the second line below, -// x := T{a: 1} -// x = T{a: x.a} -// all the reads must occur before all the writes. Thus all stores to -// loc are emitted to the storebuf sb for later execution. -// -// A CompositeLit may have pointer type only in the recursive (nested) -// case when the type name is implicit. e.g. in []*T{{}}, the inner -// literal has type *T behaves like &T{}. -// In that case, addr must hold a T, not a *T. -// -func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { - typ := deref(fn.Pkg.typeOf(e)) - switch t := typ.Underlying().(type) { - case *types.Struct: - if !isZero && len(e.Elts) != t.NumFields() { - // memclear - sb.store(&address{addr, e.Lbrace, nil}, - zeroValue(fn, deref(addr.Type()))) - isZero = true - } - for i, e := range e.Elts { - fieldIndex := i - pos := e.Pos() - if kv, ok := e.(*ast.KeyValueExpr); ok { - fname := kv.Key.(*ast.Ident).Name - for i, n := 0, t.NumFields(); i < n; i++ { - sf := t.Field(i) - if sf.Name() == fname { - fieldIndex = i - pos = kv.Colon - e = kv.Value - break - } - } - } - sf := t.Field(fieldIndex) - faddr := &FieldAddr{ - X: addr, - Field: fieldIndex, - } - faddr.setType(types.NewPointer(sf.Type())) - fn.emit(faddr) - b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) - } - - case *types.Array, *types.Slice: - var at *types.Array - var array Value - switch t := t.(type) { - case *types.Slice: - at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) - alloc := emitNew(fn, at, e.Lbrace) - alloc.Comment = "slicelit" - array = alloc - case *types.Array: - at = t - array = addr - - if !isZero && int64(len(e.Elts)) != at.Len() { - // memclear - sb.store(&address{array, e.Lbrace, nil}, - zeroValue(fn, deref(array.Type()))) - } - } - - var idx *Const - for _, e := range e.Elts { - pos := e.Pos() - if kv, ok := e.(*ast.KeyValueExpr); ok { - idx = b.expr(fn, kv.Key).(*Const) - pos = kv.Colon - e = kv.Value - } else { - var idxval int64 - if idx != nil { - idxval = idx.Int64() + 1 - } - idx = intConst(idxval) - } - iaddr := &IndexAddr{ - X: array, - Index: idx, - } - iaddr.setType(types.NewPointer(at.Elem())) - fn.emit(iaddr) - if t != at { // slice - // backing array is unaliased => storebuf not needed. - b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) - } else { - b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) - } - } - - if t != at { // slice - s := &Slice{X: array} - s.setPos(e.Lbrace) - s.setType(typ) - sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) - } - - case *types.Map: - m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} - m.setPos(e.Lbrace) - m.setType(typ) - fn.emit(m) - for _, e := range e.Elts { - e := e.(*ast.KeyValueExpr) - - // If a key expression in a map literal is itself a - // composite literal, the type may be omitted. - // For example: - // map[*struct{}]bool{{}: true} - // An &-operation may be implied: - // map[*struct{}]bool{&struct{}{}: true} - var key Value - if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { - // A CompositeLit never evaluates to a pointer, - // so if the type of the location is a pointer, - // an &-operation is implied. - key = b.addr(fn, e.Key, true).address(fn) - } else { - key = b.expr(fn, e.Key) - } - - loc := element{ - m: m, - k: emitConv(fn, key, t.Key()), - t: t.Elem(), - pos: e.Colon, - } - - // We call assign() only because it takes care - // of any &-operation required in the recursive - // case, e.g., - // map[int]*struct{}{0: {}} implies &struct{}{}. - // In-place update is of course impossible, - // and no storebuf is needed. - b.assign(fn, &loc, e.Value, true, nil) - } - sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) - - default: - panic("unexpected CompositeLit type: " + t.String()) - } -} - -// switchStmt emits to fn code for the switch statement s, optionally -// labelled by label. -// -func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { - // We treat SwitchStmt like a sequential if-else chain. - // Multiway dispatch can be recovered later by ssautil.Switches() - // to those cases that are free of side effects. - if s.Init != nil { - b.stmt(fn, s.Init) - } - var tag Value = vTrue - if s.Tag != nil { - tag = b.expr(fn, s.Tag) - } - done := fn.newBasicBlock("switch.done") - if label != nil { - label._break = done - } - // We pull the default case (if present) down to the end. - // But each fallthrough label must point to the next - // body block in source order, so we preallocate a - // body block (fallthru) for the next case. - // Unfortunately this makes for a confusing block order. - var dfltBody *[]ast.Stmt - var dfltFallthrough *BasicBlock - var fallthru, dfltBlock *BasicBlock - ncases := len(s.Body.List) - for i, clause := range s.Body.List { - body := fallthru - if body == nil { - body = fn.newBasicBlock("switch.body") // first case only - } - - // Preallocate body block for the next case. - fallthru = done - if i+1 < ncases { - fallthru = fn.newBasicBlock("switch.body") - } - - cc := clause.(*ast.CaseClause) - if cc.List == nil { - // Default case. - dfltBody = &cc.Body - dfltFallthrough = fallthru - dfltBlock = body - continue - } - - var nextCond *BasicBlock - for _, cond := range cc.List { - nextCond = fn.newBasicBlock("switch.next") - // TODO(adonovan): opt: when tag==vTrue, we'd - // get better code if we use b.cond(cond) - // instead of BinOp(EQL, tag, b.expr(cond)) - // followed by If. Don't forget conversions - // though. - cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) - emitIf(fn, cond, body, nextCond) - fn.currentBlock = nextCond - } - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: fallthru, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = nextCond - } - if dfltBlock != nil { - emitJump(fn, dfltBlock) - fn.currentBlock = dfltBlock - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _fallthrough: dfltFallthrough, - } - b.stmtList(fn, *dfltBody) - fn.targets = fn.targets.tail - } - emitJump(fn, done) - fn.currentBlock = done -} - -// typeSwitchStmt emits to fn code for the type switch statement s, optionally -// labelled by label. -// -func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { - // We treat TypeSwitchStmt like a sequential if-else chain. - // Multiway dispatch can be recovered later by ssautil.Switches(). - - // Typeswitch lowering: - // - // var x X - // switch y := x.(type) { - // case T1, T2: S1 // >1 (y := x) - // case nil: SN // nil (y := x) - // default: SD // 0 types (y := x) - // case T3: S3 // 1 type (y := x.(T3)) - // } - // - // ...s.Init... - // x := eval x - // .caseT1: - // t1, ok1 := typeswitch,ok x - // if ok1 then goto S1 else goto .caseT2 - // .caseT2: - // t2, ok2 := typeswitch,ok x - // if ok2 then goto S1 else goto .caseNil - // .S1: - // y := x - // ...S1... - // goto done - // .caseNil: - // if t2, ok2 := typeswitch,ok x - // if x == nil then goto SN else goto .caseT3 - // .SN: - // y := x - // ...SN... - // goto done - // .caseT3: - // t3, ok3 := typeswitch,ok x - // if ok3 then goto S3 else goto default - // .S3: - // y := t3 - // ...S3... - // goto done - // .default: - // y := x - // ...SD... - // goto done - // .done: - - if s.Init != nil { - b.stmt(fn, s.Init) - } - - var x Value - switch ass := s.Assign.(type) { - case *ast.ExprStmt: // x.(type) - x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) - case *ast.AssignStmt: // y := x.(type) - x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) - } - - done := fn.newBasicBlock("typeswitch.done") - if label != nil { - label._break = done - } - var default_ *ast.CaseClause - for _, clause := range s.Body.List { - cc := clause.(*ast.CaseClause) - if cc.List == nil { - default_ = cc - continue - } - body := fn.newBasicBlock("typeswitch.body") - var next *BasicBlock - var casetype types.Type - var ti Value // ti, ok := typeassert,ok x - for _, cond := range cc.List { - next = fn.newBasicBlock("typeswitch.next") - casetype = fn.Pkg.typeOf(cond) - var condv Value - if casetype == tUntypedNil { - condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) - ti = x - } else { - yok := emitTypeTest(fn, x, casetype, cc.Case) - ti = emitExtract(fn, yok, 0) - condv = emitExtract(fn, yok, 1) - } - emitIf(fn, condv, body, next) - fn.currentBlock = next - } - if len(cc.List) != 1 { - ti = x - } - fn.currentBlock = body - b.typeCaseBody(fn, cc, ti, done) - fn.currentBlock = next - } - if default_ != nil { - b.typeCaseBody(fn, default_, x, done) - } else { - emitJump(fn, done) - } - fn.currentBlock = done -} - -func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { - if obj := fn.Pkg.info.Implicits[cc]; obj != nil { - // In a switch y := x.(type), each case clause - // implicitly declares a distinct object y. - // In a single-type case, y has that type. - // In multi-type cases, 'case nil' and default, - // y has the same type as the interface operand. - emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, cc.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) -} - -// selectStmt emits to fn code for the select statement s, optionally -// labelled by label. -// -func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { - // A blocking select of a single case degenerates to a - // simple send or receive. - // TODO(adonovan): opt: is this optimization worth its weight? - if len(s.Body.List) == 1 { - clause := s.Body.List[0].(*ast.CommClause) - if clause.Comm != nil { - b.stmt(fn, clause.Comm) - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = done - return - } - } - - // First evaluate all channels in all cases, and find - // the directions of each state. - var states []*SelectState - blocking := true - debugInfo := fn.debugInfo() - for _, clause := range s.Body.List { - var st *SelectState - switch comm := clause.(*ast.CommClause).Comm.(type) { - case nil: // default case - blocking = false - continue - - case *ast.SendStmt: // ch<- i - ch := b.expr(fn, comm.Chan) - st = &SelectState{ - Dir: types.SendOnly, - Chan: ch, - Send: emitConv(fn, b.expr(fn, comm.Value), - ch.Type().Underlying().(*types.Chan).Elem()), - Pos: comm.Arrow, - } - if debugInfo { - st.DebugNode = comm - } - - case *ast.AssignStmt: // x := <-ch - recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - - case *ast.ExprStmt: // <-ch - recv := unparen(comm.X).(*ast.UnaryExpr) - st = &SelectState{ - Dir: types.RecvOnly, - Chan: b.expr(fn, recv.X), - Pos: recv.OpPos, - } - if debugInfo { - st.DebugNode = recv - } - } - states = append(states, st) - } - - // We dispatch on the (fair) result of Select using a - // sequential if-else chain, in effect: - // - // idx, recvOk, r0...r_n-1 := select(...) - // if idx == 0 { // receive on channel 0 (first receive => r0) - // x, ok := r0, recvOk - // ...state0... - // } else if v == 1 { // send on channel 1 - // ...state1... - // } else { - // ...default... - // } - sel := &Select{ - States: states, - Blocking: blocking, - } - sel.setPos(s.Select) - var vars []*types.Var - vars = append(vars, varIndex, varOk) - for _, st := range states { - if st.Dir == types.RecvOnly { - tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() - vars = append(vars, anonVar(tElem)) - } - } - sel.setType(types.NewTuple(vars...)) - - fn.emit(sel) - idx := emitExtract(fn, sel, 0) - - done := fn.newBasicBlock("select.done") - if label != nil { - label._break = done - } - - var defaultBody *[]ast.Stmt - state := 0 - r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV - for _, cc := range s.Body.List { - clause := cc.(*ast.CommClause) - if clause.Comm == nil { - defaultBody = &clause.Body - continue - } - body := fn.newBasicBlock("select.body") - next := fn.newBasicBlock("select.next") - emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) - fn.currentBlock = body - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - switch comm := clause.Comm.(type) { - case *ast.ExprStmt: // <-ch - if debugInfo { - v := emitExtract(fn, sel, r) - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - r++ - - case *ast.AssignStmt: // x := <-states[state].Chan - if comm.Tok == token.DEFINE { - fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) - } - x := b.addr(fn, comm.Lhs[0], false) // non-escaping - v := emitExtract(fn, sel, r) - if debugInfo { - emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) - } - x.store(fn, v) - - if len(comm.Lhs) == 2 { // x, ok := ... - if comm.Tok == token.DEFINE { - fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) - } - ok := b.addr(fn, comm.Lhs[1], false) // non-escaping - ok.store(fn, emitExtract(fn, sel, 1)) - } - r++ - } - b.stmtList(fn, clause.Body) - fn.targets = fn.targets.tail - emitJump(fn, done) - fn.currentBlock = next - state++ - } - if defaultBody != nil { - fn.targets = &targets{ - tail: fn.targets, - _break: done, - } - b.stmtList(fn, *defaultBody) - fn.targets = fn.targets.tail - } else { - // A blocking select must match some case. - // (This should really be a runtime.errorString, not a string.) - fn.emit(&Panic{ - X: emitConv(fn, stringConst("blocking select matched no case"), tEface), - }) - fn.currentBlock = fn.newBasicBlock("unreachable") - } - emitJump(fn, done) - fn.currentBlock = done -} - -// forStmt emits to fn code for the for statement s, optionally -// labelled by label. -// -func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { - // ...init... - // jump loop - // loop: - // if cond goto body else done - // body: - // ...body... - // jump post - // post: (target of continue) - // ...post... - // jump loop - // done: (target of break) - if s.Init != nil { - b.stmt(fn, s.Init) - } - body := fn.newBasicBlock("for.body") - done := fn.newBasicBlock("for.done") // target of 'break' - loop := body // target of back-edge - if s.Cond != nil { - loop = fn.newBasicBlock("for.loop") - } - cont := loop // target of 'continue' - if s.Post != nil { - cont = fn.newBasicBlock("for.post") - } - if label != nil { - label._break = done - label._continue = cont - } - emitJump(fn, loop) - fn.currentBlock = loop - if loop != body { - b.cond(fn, s.Cond, body, done) - fn.currentBlock = body - } - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: cont, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, cont) - - if s.Post != nil { - fn.currentBlock = cont - b.stmt(fn, s.Post) - emitJump(fn, loop) // back-edge - } - fn.currentBlock = done -} - -// rangeIndexed emits to fn the header for an integer-indexed loop -// over array, *array or slice value x. -// The v result is defined only if tv is non-nil. -// forPos is the position of the "for" token. -// -func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { - // - // length = len(x) - // index = -1 - // loop: (target of continue) - // index++ - // if index < length goto body else done - // body: - // k = index - // v = x[index] - // ...body... - // jump loop - // done: (target of break) - - // Determine number of iterations. - var length Value - if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { - // For array or *array, the number of iterations is - // known statically thanks to the type. We avoid a - // data dependence upon x, permitting later dead-code - // elimination if x is pure, static unrolling, etc. - // Ranging over a nil *array may have >0 iterations. - // We still generate code for x, in case it has effects. - length = intConst(arr.Len()) - } else { - // length = len(x). - var c Call - c.Call.Value = makeLen(x.Type()) - c.Call.Args = []Value{x} - c.setType(tInt) - length = fn.emit(&c) - } - - index := fn.addLocal(tInt, token.NoPos) - emitStore(fn, index, intConst(-1), pos) - - loop = fn.newBasicBlock("rangeindex.loop") - emitJump(fn, loop) - fn.currentBlock = loop - - incr := &BinOp{ - Op: token.ADD, - X: emitLoad(fn, index), - Y: vOne, - } - incr.setType(tInt) - emitStore(fn, index, fn.emit(incr), pos) - - body := fn.newBasicBlock("rangeindex.body") - done = fn.newBasicBlock("rangeindex.done") - emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) - fn.currentBlock = body - - k = emitLoad(fn, index) - if tv != nil { - switch t := x.Type().Underlying().(type) { - case *types.Array: - instr := &Index{ - X: x, - Index: k, - } - instr.setType(t.Elem()) - v = fn.emit(instr) - - case *types.Pointer: // *array - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) - v = emitLoad(fn, fn.emit(instr)) - - case *types.Slice: - instr := &IndexAddr{ - X: x, - Index: k, - } - instr.setType(types.NewPointer(t.Elem())) - v = emitLoad(fn, fn.emit(instr)) - - default: - panic("rangeIndexed x:" + t.String()) - } - } - return -} - -// rangeIter emits to fn the header for a loop using -// Range/Next/Extract to iterate over map or string value x. -// tk and tv are the types of the key/value results k and v, or nil -// if the respective component is not wanted. -// -func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { - // - // it = range x - // loop: (target of continue) - // okv = next it (ok, key, value) - // ok = extract okv #0 - // if ok goto body else done - // body: - // k = extract okv #1 - // v = extract okv #2 - // ...body... - // jump loop - // done: (target of break) - // - - if tk == nil { - tk = tInvalid - } - if tv == nil { - tv = tInvalid - } - - rng := &Range{X: x} - rng.setPos(pos) - rng.setType(tRangeIter) - it := fn.emit(rng) - - loop = fn.newBasicBlock("rangeiter.loop") - emitJump(fn, loop) - fn.currentBlock = loop - - _, isString := x.Type().Underlying().(*types.Basic) - - okv := &Next{ - Iter: it, - IsString: isString, - } - okv.setType(types.NewTuple( - varOk, - newVar("k", tk), - newVar("v", tv), - )) - fn.emit(okv) - - body := fn.newBasicBlock("rangeiter.body") - done = fn.newBasicBlock("rangeiter.done") - emitIf(fn, emitExtract(fn, okv, 0), body, done) - fn.currentBlock = body - - if tk != tInvalid { - k = emitExtract(fn, okv, 1) - } - if tv != tInvalid { - v = emitExtract(fn, okv, 2) - } - return -} - -// rangeChan emits to fn the header for a loop that receives from -// channel x until it fails. -// tk is the channel's element type, or nil if the k result is -// not wanted -// pos is the position of the '=' or ':=' token. -// -func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { - // - // loop: (target of continue) - // ko = <-x (key, ok) - // ok = extract ko #1 - // if ok goto body else done - // body: - // k = extract ko #0 - // ... - // goto loop - // done: (target of break) - - loop = fn.newBasicBlock("rangechan.loop") - emitJump(fn, loop) - fn.currentBlock = loop - recv := &UnOp{ - Op: token.ARROW, - X: x, - CommaOk: true, - } - recv.setPos(pos) - recv.setType(types.NewTuple( - newVar("k", x.Type().Underlying().(*types.Chan).Elem()), - varOk, - )) - ko := fn.emit(recv) - body := fn.newBasicBlock("rangechan.body") - done = fn.newBasicBlock("rangechan.done") - emitIf(fn, emitExtract(fn, ko, 1), body, done) - fn.currentBlock = body - if tk != nil { - k = emitExtract(fn, ko, 0) - } - return -} - -// rangeStmt emits to fn code for the range statement s, optionally -// labelled by label. -// -func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { - var tk, tv types.Type - if s.Key != nil && !isBlankIdent(s.Key) { - tk = fn.Pkg.typeOf(s.Key) - } - if s.Value != nil && !isBlankIdent(s.Value) { - tv = fn.Pkg.typeOf(s.Value) - } - - // If iteration variables are defined (:=), this - // occurs once outside the loop. - // - // Unlike a short variable declaration, a RangeStmt - // using := never redeclares an existing variable; it - // always creates a new one. - if s.Tok == token.DEFINE { - if tk != nil { - fn.addLocalForIdent(s.Key.(*ast.Ident)) - } - if tv != nil { - fn.addLocalForIdent(s.Value.(*ast.Ident)) - } - } - - x := b.expr(fn, s.X) - - var k, v Value - var loop, done *BasicBlock - switch rt := x.Type().Underlying().(type) { - case *types.Slice, *types.Array, *types.Pointer: // *array - k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) - - case *types.Chan: - k, loop, done = b.rangeChan(fn, x, tk, s.For) - - case *types.Map, *types.Basic: // string - k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) - - default: - panic("Cannot range over: " + rt.String()) - } - - // Evaluate both LHS expressions before we update either. - var kl, vl lvalue - if tk != nil { - kl = b.addr(fn, s.Key, false) // non-escaping - } - if tv != nil { - vl = b.addr(fn, s.Value, false) // non-escaping - } - if tk != nil { - kl.store(fn, k) - } - if tv != nil { - vl.store(fn, v) - } - - if label != nil { - label._break = done - label._continue = loop - } - - fn.targets = &targets{ - tail: fn.targets, - _break: done, - _continue: loop, - } - b.stmt(fn, s.Body) - fn.targets = fn.targets.tail - emitJump(fn, loop) // back-edge - fn.currentBlock = done -} - -// stmt lowers statement s to SSA form, emitting code to fn. -func (b *builder) stmt(fn *Function, _s ast.Stmt) { - // The label of the current statement. If non-nil, its _goto - // target is always set; its _break and _continue are set only - // within the body of switch/typeswitch/select/for/range. - // It is effectively an additional default-nil parameter of stmt(). - var label *lblock -start: - switch s := _s.(type) { - case *ast.EmptyStmt: - // ignore. (Usually removed by gofmt.) - - case *ast.DeclStmt: // Con, Var or Typ - d := s.Decl.(*ast.GenDecl) - if d.Tok == token.VAR { - for _, spec := range d.Specs { - if vs, ok := spec.(*ast.ValueSpec); ok { - b.localValueSpec(fn, vs) - } - } - } - - case *ast.LabeledStmt: - label = fn.labelledBlock(s.Label) - emitJump(fn, label._goto) - fn.currentBlock = label._goto - _s = s.Stmt - goto start // effectively: tailcall stmt(fn, s.Stmt, label) - - case *ast.ExprStmt: - b.expr(fn, s.X) - - case *ast.SendStmt: - fn.emit(&Send{ - Chan: b.expr(fn, s.Chan), - X: emitConv(fn, b.expr(fn, s.Value), - fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), - pos: s.Arrow, - }) - - case *ast.IncDecStmt: - op := token.ADD - if s.Tok == token.DEC { - op = token.SUB - } - loc := b.addr(fn, s.X, false) - b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op, s.Pos()) - - case *ast.AssignStmt: - switch s.Tok { - case token.ASSIGN, token.DEFINE: - b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) - - default: // +=, etc. - op := s.Tok + token.ADD - token.ADD_ASSIGN - b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) - } - - case *ast.GoStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Go{pos: s.Go} - b.setCall(fn, s.Call, &v.Call) - fn.emit(&v) - - case *ast.DeferStmt: - // The "intrinsics" new/make/len/cap are forbidden here. - // panic is treated like an ordinary function call. - v := Defer{pos: s.Defer} - b.setCall(fn, s.Call, &v.Call) - fn.emit(&v) - - // A deferred call can cause recovery from panic, - // and control resumes at the Recover block. - createRecoverBlock(fn) - - case *ast.ReturnStmt: - var results []Value - if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { - // Return of one expression in a multi-valued function. - tuple := b.exprN(fn, s.Results[0]) - ttuple := tuple.Type().(*types.Tuple) - for i, n := 0, ttuple.Len(); i < n; i++ { - results = append(results, - emitConv(fn, emitExtract(fn, tuple, i), - fn.Signature.Results().At(i).Type())) - } - } else { - // 1:1 return, or no-arg return in non-void function. - for i, r := range s.Results { - v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) - results = append(results, v) - } - } - if fn.namedResults != nil { - // Function has named result parameters (NRPs). - // Perform parallel assignment of return operands to NRPs. - for i, r := range results { - emitStore(fn, fn.namedResults[i], r, s.Return) - } - } - // Run function calls deferred in this - // function when explicitly returning from it. - fn.emit(new(RunDefers)) - if fn.namedResults != nil { - // Reload NRPs to form the result tuple. - results = results[:0] - for _, r := range fn.namedResults { - results = append(results, emitLoad(fn, r)) - } - } - fn.emit(&Return{Results: results, pos: s.Return}) - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BranchStmt: - var block *BasicBlock - switch s.Tok { - case token.BREAK: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._break - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._break - } - } - - case token.CONTINUE: - if s.Label != nil { - block = fn.labelledBlock(s.Label)._continue - } else { - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._continue - } - } - - case token.FALLTHROUGH: - for t := fn.targets; t != nil && block == nil; t = t.tail { - block = t._fallthrough - } - - case token.GOTO: - block = fn.labelledBlock(s.Label)._goto - } - emitJump(fn, block) - fn.currentBlock = fn.newBasicBlock("unreachable") - - case *ast.BlockStmt: - b.stmtList(fn, s.List) - - case *ast.IfStmt: - if s.Init != nil { - b.stmt(fn, s.Init) - } - then := fn.newBasicBlock("if.then") - done := fn.newBasicBlock("if.done") - els := done - if s.Else != nil { - els = fn.newBasicBlock("if.else") - } - b.cond(fn, s.Cond, then, els) - fn.currentBlock = then - b.stmt(fn, s.Body) - emitJump(fn, done) - - if s.Else != nil { - fn.currentBlock = els - b.stmt(fn, s.Else) - emitJump(fn, done) - } - - fn.currentBlock = done - - case *ast.SwitchStmt: - b.switchStmt(fn, s, label) - - case *ast.TypeSwitchStmt: - b.typeSwitchStmt(fn, s, label) - - case *ast.SelectStmt: - b.selectStmt(fn, s, label) - - case *ast.ForStmt: - b.forStmt(fn, s, label) - - case *ast.RangeStmt: - b.rangeStmt(fn, s, label) - - default: - panic(fmt.Sprintf("unexpected statement kind: %T", s)) - } -} - -// buildFunction builds SSA code for the body of function fn. Idempotent. -func (b *builder) buildFunction(fn *Function) { - if fn.Blocks != nil { - return // building already started - } - - var recvField *ast.FieldList - var body *ast.BlockStmt - var functype *ast.FuncType - switch n := fn.syntax.(type) { - case nil: - return // not a Go source function. (Synthetic, or from object file.) - case *ast.FuncDecl: - functype = n.Type - recvField = n.Recv - body = n.Body - case *ast.FuncLit: - functype = n.Type - body = n.Body - default: - panic(n) - } - - if body == nil { - // External function. - if fn.Params == nil { - // This condition ensures we add a non-empty - // params list once only, but we may attempt - // the degenerate empty case repeatedly. - // TODO(adonovan): opt: don't do that. - - // We set Function.Params even though there is no body - // code to reference them. This simplifies clients. - if recv := fn.Signature.Recv(); recv != nil { - fn.addParamObj(recv) - } - params := fn.Signature.Params() - for i, n := 0, params.Len(); i < n; i++ { - fn.addParamObj(params.At(i)) - } - } - return - } - if fn.Prog.mode&LogSource != 0 { - defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() - } - fn.startBody() - fn.createSyntacticParams(recvField, functype) - b.stmt(fn, body) - if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { - // Control fell off the end of the function's body block. - // - // Block optimizations eliminate the current block, if - // unreachable. It is a builder invariant that - // if this no-arg return is ill-typed for - // fn.Signature.Results, this block must be - // unreachable. The sanity checker checks this. - fn.emit(new(RunDefers)) - fn.emit(new(Return)) - } - fn.finishBody() -} - -// buildFuncDecl builds SSA code for the function or method declared -// by decl in package pkg. -// -func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { - id := decl.Name - if isBlankIdent(id) { - return // discard - } - fn := pkg.values[pkg.info.Defs[id]].(*Function) - if decl.Recv == nil && id.Name == "init" { - var v Call - v.Call.Value = fn - v.setType(types.NewTuple()) - pkg.init.emit(&v) - } - b.buildFunction(fn) -} - -// Build calls Package.Build for each package in prog. -// Building occurs in parallel unless the BuildSerially mode flag was set. -// -// Build is intended for whole-program analysis; a typical compiler -// need only build a single package. -// -// Build is idempotent and thread-safe. -// -func (prog *Program) Build() { - var wg sync.WaitGroup - for _, p := range prog.packages { - if prog.mode&BuildSerially != 0 { - p.Build() - } else { - wg.Add(1) - go func(p *Package) { - p.Build() - wg.Done() - }(p) - } - } - wg.Wait() -} - -// Build builds SSA code for all functions and vars in package p. -// -// Precondition: CreatePackage must have been called for all of p's -// direct imports (and hence its direct imports must have been -// error-free). -// -// Build is idempotent and thread-safe. -// -func (p *Package) Build() { p.buildOnce.Do(p.build) } - -func (p *Package) build() { - if p.info == nil { - return // synthetic package, e.g. "testmain" - } - if p.files == nil { - p.info = nil - return // package loaded from export data - } - - // Ensure we have runtime type info for all exported members. - // TODO(adonovan): ideally belongs in memberFromObject, but - // that would require package creation in topological order. - for name, mem := range p.Members { - if ast.IsExported(name) { - p.Prog.needMethodsOf(mem.Type()) - } - } - if p.Prog.mode&LogSource != 0 { - defer logStack("build %s", p)() - } - init := p.init - init.startBody() - - var done *BasicBlock - - if p.Prog.mode&BareInits == 0 { - // Make init() skip if package is already initialized. - initguard := p.Var("init$guard") - doinit := init.newBasicBlock("init.start") - done = init.newBasicBlock("init.done") - emitIf(init, emitLoad(init, initguard), done, doinit) - init.currentBlock = doinit - emitStore(init, initguard, vTrue, token.NoPos) - - // Call the init() function of each package we import. - for _, pkg := range p.Pkg.Imports() { - prereq := p.Prog.packages[pkg] - if prereq == nil { - panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) - } - var v Call - v.Call.Value = prereq.init - v.Call.pos = init.pos - v.setType(types.NewTuple()) - init.emit(&v) - } - } - - var b builder - - // Initialize package-level vars in correct order. - for _, varinit := range p.info.InitOrder { - if init.Prog.mode&LogSource != 0 { - fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", - varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) - } - if len(varinit.Lhs) == 1 { - // 1:1 initialization: var x, y = a(), b() - var lval lvalue - if v := varinit.Lhs[0]; v.Name() != "_" { - lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} - } else { - lval = blank{} - } - b.assign(init, lval, varinit.Rhs, true, nil) - } else { - // n:1 initialization: var x, y := f() - tuple := b.exprN(init, varinit.Rhs) - for i, v := range varinit.Lhs { - if v.Name() == "_" { - continue - } - emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) - } - } - } - - // Build all package-level functions, init functions - // and methods, including unreachable/blank ones. - // We build them in source order, but it's not significant. - for _, file := range p.files { - for _, decl := range file.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - b.buildFuncDecl(p, decl) - } - } - } - - // Finish up init(). - if p.Prog.mode&BareInits == 0 { - emitJump(init, done) - init.currentBlock = done - } - init.emit(new(Return)) - init.finishBody() - - p.info = nil // We no longer need ASTs or go/types deductions. - - if p.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckPackage(p) - } -} - -// Like ObjectOf, but panics instead of returning nil. -// Only valid during p's create and build phases. -func (p *Package) objectOf(id *ast.Ident) types.Object { - if o := p.info.ObjectOf(id); o != nil { - return o - } - panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", - id.Name, p.Prog.Fset.Position(id.Pos()))) -} - -// Like TypeOf, but panics instead of returning nil. -// Only valid during p's create and build phases. -func (p *Package) typeOf(e ast.Expr) types.Type { - if T := p.info.TypeOf(e); T != nil { - return T - } - panic(fmt.Sprintf("no type for %T @ %s", - e, p.Prog.Fset.Position(e.Pos()))) -} diff --git a/vendor/honnef.co/go/tools/ssa/const.go b/vendor/honnef.co/go/tools/ssa/const.go deleted file mode 100644 index 3606e0f..0000000 --- a/vendor/honnef.co/go/tools/ssa/const.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.6 - -package ssa - -// This file defines the Const SSA value type. - -import ( - "fmt" - exact "go/constant" - "go/token" - "go/types" - "strconv" -) - -// NewConst returns a new constant of the specified value and type. -// val must be valid according to the specification of Const.Value. -// -func NewConst(val exact.Value, typ types.Type) *Const { - return &Const{typ, val} -} - -// intConst returns an 'int' constant that evaluates to i. -// (i is an int64 in case the host is narrower than the target.) -func intConst(i int64) *Const { - return NewConst(exact.MakeInt64(i), tInt) -} - -// nilConst returns a nil constant of the specified type, which may -// be any reference type, including interfaces. -// -func nilConst(typ types.Type) *Const { - return NewConst(nil, typ) -} - -// stringConst returns a 'string' constant that evaluates to s. -func stringConst(s string) *Const { - return NewConst(exact.MakeString(s), tString) -} - -// zeroConst returns a new "zero" constant of the specified type, -// which must not be an array or struct type: the zero values of -// aggregates are well-defined but cannot be represented by Const. -// -func zeroConst(t types.Type) *Const { - switch t := t.(type) { - case *types.Basic: - switch { - case t.Info()&types.IsBoolean != 0: - return NewConst(exact.MakeBool(false), t) - case t.Info()&types.IsNumeric != 0: - return NewConst(exact.MakeInt64(0), t) - case t.Info()&types.IsString != 0: - return NewConst(exact.MakeString(""), t) - case t.Kind() == types.UnsafePointer: - fallthrough - case t.Kind() == types.UntypedNil: - return nilConst(t) - default: - panic(fmt.Sprint("zeroConst for unexpected type:", t)) - } - case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: - return nilConst(t) - case *types.Named: - return NewConst(zeroConst(t.Underlying()).Value, t) - case *types.Array, *types.Struct, *types.Tuple: - panic(fmt.Sprint("zeroConst applied to aggregate:", t)) - } - panic(fmt.Sprint("zeroConst: unexpected ", t)) -} - -func (c *Const) RelString(from *types.Package) string { - var s string - if c.Value == nil { - s = "nil" - } else if c.Value.Kind() == exact.String { - s = exact.StringVal(c.Value) - const max = 20 - // TODO(adonovan): don't cut a rune in half. - if len(s) > max { - s = s[:max-3] + "..." // abbreviate - } - s = strconv.Quote(s) - } else { - s = c.Value.String() - } - return s + ":" + relType(c.Type(), from) -} - -func (c *Const) Name() string { - return c.RelString(nil) -} - -func (c *Const) String() string { - return c.Name() -} - -func (c *Const) Type() types.Type { - return c.typ -} - -func (c *Const) Referrers() *[]Instruction { - return nil -} - -func (c *Const) Parent() *Function { return nil } - -func (c *Const) Pos() token.Pos { - return token.NoPos -} - -// IsNil returns true if this constant represents a typed or untyped nil value. -func (c *Const) IsNil() bool { - return c.Value == nil -} - -// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp. - -// Int64 returns the numeric value of this constant truncated to fit -// a signed 64-bit integer. -// -func (c *Const) Int64() int64 { - switch x := exact.ToInt(c.Value); x.Kind() { - case exact.Int: - if i, ok := exact.Int64Val(x); ok { - return i - } - return 0 - case exact.Float: - f, _ := exact.Float64Val(x) - return int64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Uint64 returns the numeric value of this constant truncated to fit -// an unsigned 64-bit integer. -// -func (c *Const) Uint64() uint64 { - switch x := exact.ToInt(c.Value); x.Kind() { - case exact.Int: - if u, ok := exact.Uint64Val(x); ok { - return u - } - return 0 - case exact.Float: - f, _ := exact.Float64Val(x) - return uint64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Float64 returns the numeric value of this constant truncated to fit -// a float64. -// -func (c *Const) Float64() float64 { - f, _ := exact.Float64Val(c.Value) - return f -} - -// Complex128 returns the complex value of this constant truncated to -// fit a complex128. -// -func (c *Const) Complex128() complex128 { - re, _ := exact.Float64Val(exact.Real(c.Value)) - im, _ := exact.Float64Val(exact.Imag(c.Value)) - return complex(re, im) -} diff --git a/vendor/honnef.co/go/tools/ssa/const15.go b/vendor/honnef.co/go/tools/ssa/const15.go deleted file mode 100644 index 49b5333..0000000 --- a/vendor/honnef.co/go/tools/ssa/const15.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5,!go1.6 - -package ssa - -// This file defines the Const SSA value type. - -import ( - "fmt" - exact "go/constant" - "go/token" - "go/types" - "strconv" -) - -// NewConst returns a new constant of the specified value and type. -// val must be valid according to the specification of Const.Value. -// -func NewConst(val exact.Value, typ types.Type) *Const { - return &Const{typ, val} -} - -// intConst returns an 'int' constant that evaluates to i. -// (i is an int64 in case the host is narrower than the target.) -func intConst(i int64) *Const { - return NewConst(exact.MakeInt64(i), tInt) -} - -// nilConst returns a nil constant of the specified type, which may -// be any reference type, including interfaces. -// -func nilConst(typ types.Type) *Const { - return NewConst(nil, typ) -} - -// stringConst returns a 'string' constant that evaluates to s. -func stringConst(s string) *Const { - return NewConst(exact.MakeString(s), tString) -} - -// zeroConst returns a new "zero" constant of the specified type, -// which must not be an array or struct type: the zero values of -// aggregates are well-defined but cannot be represented by Const. -// -func zeroConst(t types.Type) *Const { - switch t := t.(type) { - case *types.Basic: - switch { - case t.Info()&types.IsBoolean != 0: - return NewConst(exact.MakeBool(false), t) - case t.Info()&types.IsNumeric != 0: - return NewConst(exact.MakeInt64(0), t) - case t.Info()&types.IsString != 0: - return NewConst(exact.MakeString(""), t) - case t.Kind() == types.UnsafePointer: - fallthrough - case t.Kind() == types.UntypedNil: - return nilConst(t) - default: - panic(fmt.Sprint("zeroConst for unexpected type:", t)) - } - case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: - return nilConst(t) - case *types.Named: - return NewConst(zeroConst(t.Underlying()).Value, t) - case *types.Array, *types.Struct, *types.Tuple: - panic(fmt.Sprint("zeroConst applied to aggregate:", t)) - } - panic(fmt.Sprint("zeroConst: unexpected ", t)) -} - -func (c *Const) RelString(from *types.Package) string { - var s string - if c.Value == nil { - s = "nil" - } else if c.Value.Kind() == exact.String { - s = exact.StringVal(c.Value) - const max = 20 - // TODO(adonovan): don't cut a rune in half. - if len(s) > max { - s = s[:max-3] + "..." // abbreviate - } - s = strconv.Quote(s) - } else { - s = c.Value.String() - } - return s + ":" + relType(c.Type(), from) -} - -func (c *Const) Name() string { - return c.RelString(nil) -} - -func (c *Const) String() string { - return c.Name() -} - -func (c *Const) Type() types.Type { - return c.typ -} - -func (c *Const) Referrers() *[]Instruction { - return nil -} - -func (c *Const) Parent() *Function { return nil } - -func (c *Const) Pos() token.Pos { - return token.NoPos -} - -// IsNil returns true if this constant represents a typed or untyped nil value. -func (c *Const) IsNil() bool { - return c.Value == nil -} - -// TODO(adonovan): move everything below into honnef.co/go/tools/ssa/interp. - -// Int64 returns the numeric value of this constant truncated to fit -// a signed 64-bit integer. -// -func (c *Const) Int64() int64 { - switch x := c.Value; x.Kind() { - case exact.Int: - if i, ok := exact.Int64Val(x); ok { - return i - } - return 0 - case exact.Float: - f, _ := exact.Float64Val(x) - return int64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Uint64 returns the numeric value of this constant truncated to fit -// an unsigned 64-bit integer. -// -func (c *Const) Uint64() uint64 { - switch x := c.Value; x.Kind() { - case exact.Int: - if u, ok := exact.Uint64Val(x); ok { - return u - } - return 0 - case exact.Float: - f, _ := exact.Float64Val(x) - return uint64(f) - } - panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) -} - -// Float64 returns the numeric value of this constant truncated to fit -// a float64. -// -func (c *Const) Float64() float64 { - f, _ := exact.Float64Val(c.Value) - return f -} - -// Complex128 returns the complex value of this constant truncated to -// fit a complex128. -// -func (c *Const) Complex128() complex128 { - re, _ := exact.Float64Val(exact.Real(c.Value)) - im, _ := exact.Float64Val(exact.Imag(c.Value)) - return complex(re, im) -} diff --git a/vendor/honnef.co/go/tools/ssa/create.go b/vendor/honnef.co/go/tools/ssa/create.go deleted file mode 100644 index 69ac12b..0000000 --- a/vendor/honnef.co/go/tools/ssa/create.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file implements the CREATE phase of SSA construction. -// See builder.go for explanation. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "os" - "sync" - - "golang.org/x/tools/go/types/typeutil" -) - -// NewProgram returns a new SSA Program. -// -// mode controls diagnostics and checking during SSA construction. -// -func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { - prog := &Program{ - Fset: fset, - imported: make(map[string]*Package), - packages: make(map[*types.Package]*Package), - thunks: make(map[selectionKey]*Function), - bounds: make(map[*types.Func]*Function), - mode: mode, - } - - h := typeutil.MakeHasher() // protected by methodsMu, in effect - prog.methodSets.SetHasher(h) - prog.canon.SetHasher(h) - - return prog -} - -// memberFromObject populates package pkg with a member for the -// typechecker object obj. -// -// For objects from Go source code, syntax is the associated syntax -// tree (for funcs and vars only); it will be used during the build -// phase. -// -func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { - name := obj.Name() - switch obj := obj.(type) { - case *types.Builtin: - if pkg.Pkg != types.Unsafe { - panic("unexpected builtin object: " + obj.String()) - } - - case *types.TypeName: - pkg.Members[name] = &Type{ - object: obj, - pkg: pkg, - } - - case *types.Const: - c := &NamedConst{ - object: obj, - Value: NewConst(obj.Val(), obj.Type()), - pkg: pkg, - } - pkg.values[obj] = c.Value - pkg.Members[name] = c - - case *types.Var: - g := &Global{ - Pkg: pkg, - name: name, - object: obj, - typ: types.NewPointer(obj.Type()), // address - pos: obj.Pos(), - } - pkg.values[obj] = g - pkg.Members[name] = g - - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() == nil && name == "init" { - pkg.ninit++ - name = fmt.Sprintf("init#%d", pkg.ninit) - } - fn := &Function{ - name: name, - object: obj, - Signature: sig, - syntax: syntax, - pos: obj.Pos(), - Pkg: pkg, - Prog: pkg.Prog, - } - if syntax == nil { - fn.Synthetic = "loaded from gc object file" - } - - pkg.values[obj] = fn - if sig.Recv() == nil { - pkg.Members[name] = fn // package-level function - } - - default: // (incl. *types.Package) - panic("unexpected Object type: " + obj.String()) - } -} - -// membersFromDecl populates package pkg with members for each -// typechecker object (var, func, const or type) associated with the -// specified decl. -// -func membersFromDecl(pkg *Package, decl ast.Decl) { - switch decl := decl.(type) { - case *ast.GenDecl: // import, const, type or var - switch decl.Tok { - case token.CONST: - for _, spec := range decl.Specs { - for _, id := range spec.(*ast.ValueSpec).Names { - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], nil) - } - } - } - - case token.VAR: - for _, spec := range decl.Specs { - for _, id := range spec.(*ast.ValueSpec).Names { - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], spec) - } - } - } - - case token.TYPE: - for _, spec := range decl.Specs { - id := spec.(*ast.TypeSpec).Name - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], nil) - } - } - } - - case *ast.FuncDecl: - id := decl.Name - if !isBlankIdent(id) { - memberFromObject(pkg, pkg.info.Defs[id], decl) - } - } -} - -// CreatePackage constructs and returns an SSA Package from the -// specified type-checked, error-free file ASTs, and populates its -// Members mapping. -// -// importable determines whether this package should be returned by a -// subsequent call to ImportedPackage(pkg.Path()). -// -// The real work of building SSA form for each function is not done -// until a subsequent call to Package.Build(). -// -func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { - p := &Package{ - Prog: prog, - Members: make(map[string]Member), - values: make(map[types.Object]Value), - Pkg: pkg, - info: info, // transient (CREATE and BUILD phases) - files: files, // transient (CREATE and BUILD phases) - } - - // Add init() function. - p.init = &Function{ - name: "init", - Signature: new(types.Signature), - Synthetic: "package initializer", - Pkg: p, - Prog: prog, - } - p.Members[p.init.name] = p.init - - // CREATE phase. - // Allocate all package members: vars, funcs, consts and types. - if len(files) > 0 { - // Go source package. - for _, file := range files { - for _, decl := range file.Decls { - membersFromDecl(p, decl) - } - } - } else { - // GC-compiled binary package (or "unsafe") - // No code. - // No position information. - scope := p.Pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - memberFromObject(p, obj, nil) - if obj, ok := obj.(*types.TypeName); ok { - if named, ok := obj.Type().(*types.Named); ok { - for i, n := 0, named.NumMethods(); i < n; i++ { - memberFromObject(p, named.Method(i), nil) - } - } - } - } - } - - if prog.mode&BareInits == 0 { - // Add initializer guard variable. - initguard := &Global{ - Pkg: p, - name: "init$guard", - typ: types.NewPointer(tBool), - } - p.Members[initguard.Name()] = initguard - } - - if prog.mode&GlobalDebug != 0 { - p.SetDebugMode(true) - } - - if prog.mode&PrintPackages != 0 { - printMu.Lock() - p.WriteTo(os.Stdout) - printMu.Unlock() - } - - if importable { - prog.imported[p.Pkg.Path()] = p - } - prog.packages[p.Pkg] = p - - return p -} - -// printMu serializes printing of Packages/Functions to stdout. -var printMu sync.Mutex - -// AllPackages returns a new slice containing all packages in the -// program prog in unspecified order. -// -func (prog *Program) AllPackages() []*Package { - pkgs := make([]*Package, 0, len(prog.packages)) - for _, pkg := range prog.packages { - pkgs = append(pkgs, pkg) - } - return pkgs -} - -// ImportedPackage returns the importable SSA Package whose import -// path is path, or nil if no such SSA package has been created. -// -// Not all packages are importable. For example, no import -// declaration can resolve to the x_test package created by 'go test' -// or the ad-hoc main package created 'go build foo.go'. -// -func (prog *Program) ImportedPackage(path string) *Package { - return prog.imported[path] -} diff --git a/vendor/honnef.co/go/tools/ssa/doc.go b/vendor/honnef.co/go/tools/ssa/doc.go deleted file mode 100644 index 57474dd..0000000 --- a/vendor/honnef.co/go/tools/ssa/doc.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ssa defines a representation of the elements of Go programs -// (packages, types, functions, variables and constants) using a -// static single-assignment (SSA) form intermediate representation -// (IR) for the bodies of functions. -// -// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. -// -// For an introduction to SSA form, see -// http://en.wikipedia.org/wiki/Static_single_assignment_form. -// This page provides a broader reading list: -// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. -// -// The level of abstraction of the SSA form is intentionally close to -// the source language to facilitate construction of source analysis -// tools. It is not intended for machine code generation. -// -// All looping, branching and switching constructs are replaced with -// unstructured control flow. Higher-level control flow constructs -// such as multi-way branch can be reconstructed as needed; see -// ssautil.Switches() for an example. -// -// To construct an SSA-form program, call ssautil.CreateProgram on a -// loader.Program, a set of type-checked packages created from -// parsed Go source files. The resulting ssa.Program contains all the -// packages and their members, but SSA code is not created for -// function bodies until a subsequent call to (*Package).Build. -// -// The builder initially builds a naive SSA form in which all local -// variables are addresses of stack locations with explicit loads and -// stores. Registerisation of eligible locals and φ-node insertion -// using dominance and dataflow are then performed as a second pass -// called "lifting" to improve the accuracy and performance of -// subsequent analyses; this pass can be skipped by setting the -// NaiveForm builder flag. -// -// The primary interfaces of this package are: -// -// - Member: a named member of a Go package. -// - Value: an expression that yields a value. -// - Instruction: a statement that consumes values and performs computation. -// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph) -// -// A computation that yields a result implements both the Value and -// Instruction interfaces. The following table shows for each -// concrete type which of these interfaces it implements. -// -// Value? Instruction? Member? -// *Alloc ✔ ✔ -// *BinOp ✔ ✔ -// *Builtin ✔ -// *Call ✔ ✔ -// *ChangeInterface ✔ ✔ -// *ChangeType ✔ ✔ -// *Const ✔ -// *Convert ✔ ✔ -// *DebugRef ✔ -// *Defer ✔ -// *Extract ✔ ✔ -// *Field ✔ ✔ -// *FieldAddr ✔ ✔ -// *FreeVar ✔ -// *Function ✔ ✔ (func) -// *Global ✔ ✔ (var) -// *Go ✔ -// *If ✔ -// *Index ✔ ✔ -// *IndexAddr ✔ ✔ -// *Jump ✔ -// *Lookup ✔ ✔ -// *MakeChan ✔ ✔ -// *MakeClosure ✔ ✔ -// *MakeInterface ✔ ✔ -// *MakeMap ✔ ✔ -// *MakeSlice ✔ ✔ -// *MapUpdate ✔ -// *NamedConst ✔ (const) -// *Next ✔ ✔ -// *Panic ✔ -// *Parameter ✔ -// *Phi ✔ ✔ -// *Range ✔ ✔ -// *Return ✔ -// *RunDefers ✔ -// *Select ✔ ✔ -// *Send ✔ -// *Slice ✔ ✔ -// *Store ✔ -// *Type ✔ (type) -// *TypeAssert ✔ ✔ -// *UnOp ✔ ✔ -// -// Other key types in this package include: Program, Package, Function -// and BasicBlock. -// -// The program representation constructed by this package is fully -// resolved internally, i.e. it does not rely on the names of Values, -// Packages, Functions, Types or BasicBlocks for the correct -// interpretation of the program. Only the identities of objects and -// the topology of the SSA and type graphs are semantically -// significant. (There is one exception: Ids, used to identify field -// and method names, contain strings.) Avoidance of name-based -// operations simplifies the implementation of subsequent passes and -// can make them very efficient. Many objects are nonetheless named -// to aid in debugging, but it is not essential that the names be -// either accurate or unambiguous. The public API exposes a number of -// name-based maps for client convenience. -// -// The ssa/ssautil package provides various utilities that depend only -// on the public API of this package. -// -// TODO(adonovan): Consider the exceptional control-flow implications -// of defer and recover(). -// -// TODO(adonovan): write a how-to document for all the various cases -// of trying to determine corresponding elements across the four -// domains of source locations, ast.Nodes, types.Objects, -// ssa.Values/Instructions. -// -package ssa // import "honnef.co/go/tools/ssa" diff --git a/vendor/honnef.co/go/tools/ssa/dom.go b/vendor/honnef.co/go/tools/ssa/dom.go deleted file mode 100644 index 12ef430..0000000 --- a/vendor/honnef.co/go/tools/ssa/dom.go +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines algorithms related to dominance. - -// Dominator tree construction ---------------------------------------- -// -// We use the algorithm described in Lengauer & Tarjan. 1979. A fast -// algorithm for finding dominators in a flowgraph. -// http://doi.acm.org/10.1145/357062.357071 -// -// We also apply the optimizations to SLT described in Georgiadis et -// al, Finding Dominators in Practice, JGAA 2006, -// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf -// to avoid the need for buckets of size > 1. - -import ( - "bytes" - "fmt" - "math/big" - "os" - "sort" -) - -// Idom returns the block that immediately dominates b: -// its parent in the dominator tree, if any. -// Neither the entry node (b.Index==0) nor recover node -// (b==b.Parent().Recover()) have a parent. -// -func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } - -// Dominees returns the list of blocks that b immediately dominates: -// its children in the dominator tree. -// -func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } - -// Dominates reports whether b dominates c. -func (b *BasicBlock) Dominates(c *BasicBlock) bool { - return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post -} - -type byDomPreorder []*BasicBlock - -func (a byDomPreorder) Len() int { return len(a) } -func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } - -// DomPreorder returns a new slice containing the blocks of f in -// dominator tree preorder. -// -func (f *Function) DomPreorder() []*BasicBlock { - n := len(f.Blocks) - order := make(byDomPreorder, n, n) - copy(order, f.Blocks) - sort.Sort(order) - return order -} - -// domInfo contains a BasicBlock's dominance information. -type domInfo struct { - idom *BasicBlock // immediate dominator (parent in domtree) - children []*BasicBlock // nodes immediately dominated by this one - pre, post int32 // pre- and post-order numbering within domtree -} - -// ltState holds the working state for Lengauer-Tarjan algorithm -// (during which domInfo.pre is repurposed for CFG DFS preorder number). -type ltState struct { - // Each slice is indexed by b.Index. - sdom []*BasicBlock // b's semidominator - parent []*BasicBlock // b's parent in DFS traversal of CFG - ancestor []*BasicBlock // b's ancestor with least sdom -} - -// dfs implements the depth-first search part of the LT algorithm. -func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { - preorder[i] = v - v.dom.pre = i // For now: DFS preorder of spanning tree of CFG - i++ - lt.sdom[v.Index] = v - lt.link(nil, v) - for _, w := range v.Succs { - if lt.sdom[w.Index] == nil { - lt.parent[w.Index] = v - i = lt.dfs(w, i, preorder) - } - } - return i -} - -// eval implements the EVAL part of the LT algorithm. -func (lt *ltState) eval(v *BasicBlock) *BasicBlock { - // TODO(adonovan): opt: do path compression per simple LT. - u := v - for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { - if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { - u = v - } - } - return u -} - -// link implements the LINK part of the LT algorithm. -func (lt *ltState) link(v, w *BasicBlock) { - lt.ancestor[w.Index] = v -} - -// buildDomTree computes the dominator tree of f using the LT algorithm. -// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). -// -func buildDomTree(f *Function) { - // The step numbers refer to the original LT paper; the - // reordering is due to Georgiadis. - - // Clear any previous domInfo. - for _, b := range f.Blocks { - b.dom = domInfo{} - } - - n := len(f.Blocks) - // Allocate space for 5 contiguous [n]*BasicBlock arrays: - // sdom, parent, ancestor, preorder, buckets. - space := make([]*BasicBlock, 5*n, 5*n) - lt := ltState{ - sdom: space[0:n], - parent: space[n : 2*n], - ancestor: space[2*n : 3*n], - } - - // Step 1. Number vertices by depth-first preorder. - preorder := space[3*n : 4*n] - root := f.Blocks[0] - prenum := lt.dfs(root, 0, preorder) - recover := f.Recover - if recover != nil { - lt.dfs(recover, prenum, preorder) - } - - buckets := space[4*n : 5*n] - copy(buckets, preorder) - - // In reverse preorder... - for i := int32(n) - 1; i > 0; i-- { - w := preorder[i] - - // Step 3. Implicitly define the immediate dominator of each node. - for v := buckets[i]; v != w; v = buckets[v.dom.pre] { - u := lt.eval(v) - if lt.sdom[u.Index].dom.pre < i { - v.dom.idom = u - } else { - v.dom.idom = w - } - } - - // Step 2. Compute the semidominators of all nodes. - lt.sdom[w.Index] = lt.parent[w.Index] - for _, v := range w.Preds { - u := lt.eval(v) - if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { - lt.sdom[w.Index] = lt.sdom[u.Index] - } - } - - lt.link(lt.parent[w.Index], w) - - if lt.parent[w.Index] == lt.sdom[w.Index] { - w.dom.idom = lt.parent[w.Index] - } else { - buckets[i] = buckets[lt.sdom[w.Index].dom.pre] - buckets[lt.sdom[w.Index].dom.pre] = w - } - } - - // The final 'Step 3' is now outside the loop. - for v := buckets[0]; v != root; v = buckets[v.dom.pre] { - v.dom.idom = root - } - - // Step 4. Explicitly define the immediate dominator of each - // node, in preorder. - for _, w := range preorder[1:] { - if w == root || w == recover { - w.dom.idom = nil - } else { - if w.dom.idom != lt.sdom[w.Index] { - w.dom.idom = w.dom.idom.dom.idom - } - // Calculate Children relation as inverse of Idom. - w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) - } - } - - pre, post := numberDomTree(root, 0, 0) - if recover != nil { - numberDomTree(recover, pre, post) - } - - // printDomTreeDot(os.Stderr, f) // debugging - // printDomTreeText(os.Stderr, root, 0) // debugging - - if f.Prog.mode&SanityCheckFunctions != 0 { - sanityCheckDomTree(f) - } -} - -// numberDomTree sets the pre- and post-order numbers of a depth-first -// traversal of the dominator tree rooted at v. These are used to -// answer dominance queries in constant time. -// -func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { - v.dom.pre = pre - pre++ - for _, child := range v.dom.children { - pre, post = numberDomTree(child, pre, post) - } - v.dom.post = post - post++ - return pre, post -} - -// Testing utilities ---------------------------------------- - -// sanityCheckDomTree checks the correctness of the dominator tree -// computed by the LT algorithm by comparing against the dominance -// relation computed by a naive Kildall-style forward dataflow -// analysis (Algorithm 10.16 from the "Dragon" book). -// -func sanityCheckDomTree(f *Function) { - n := len(f.Blocks) - - // D[i] is the set of blocks that dominate f.Blocks[i], - // represented as a bit-set of block indices. - D := make([]big.Int, n) - - one := big.NewInt(1) - - // all is the set of all blocks; constant. - var all big.Int - all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) - - // Initialization. - for i, b := range f.Blocks { - if i == 0 || b == f.Recover { - // A root is dominated only by itself. - D[i].SetBit(&D[0], 0, 1) - } else { - // All other blocks are (initially) dominated - // by every block. - D[i].Set(&all) - } - } - - // Iteration until fixed point. - for changed := true; changed; { - changed = false - for i, b := range f.Blocks { - if i == 0 || b == f.Recover { - continue - } - // Compute intersection across predecessors. - var x big.Int - x.Set(&all) - for _, pred := range b.Preds { - x.And(&x, &D[pred.Index]) - } - x.SetBit(&x, i, 1) // a block always dominates itself. - if D[i].Cmp(&x) != 0 { - D[i].Set(&x) - changed = true - } - } - } - - // Check the entire relation. O(n^2). - // The Recover block (if any) must be treated specially so we skip it. - ok := true - for i := 0; i < n; i++ { - for j := 0; j < n; j++ { - b, c := f.Blocks[i], f.Blocks[j] - if c == f.Recover { - continue - } - actual := b.Dominates(c) - expected := D[j].Bit(i) == 1 - if actual != expected { - fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) - ok = false - } - } - } - - preorder := f.DomPreorder() - for _, b := range f.Blocks { - if got := preorder[b.dom.pre]; got != b { - fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) - ok = false - } - } - - if !ok { - panic("sanityCheckDomTree failed for " + f.String()) - } - -} - -// Printing functions ---------------------------------------- - -// printDomTree prints the dominator tree as text, using indentation. -func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { - fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) - for _, child := range v.dom.children { - printDomTreeText(buf, child, indent+1) - } -} - -// printDomTreeDot prints the dominator tree of f in AT&T GraphViz -// (.dot) format. -func printDomTreeDot(buf *bytes.Buffer, f *Function) { - fmt.Fprintln(buf, "//", f) - fmt.Fprintln(buf, "digraph domtree {") - for i, b := range f.Blocks { - v := b.dom - fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) - // TODO(adonovan): improve appearance of edges - // belonging to both dominator tree and CFG. - - // Dominator tree edge. - if i != 0 { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) - } - // CFG edges. - for _, pred := range b.Preds { - fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) - } - } - fmt.Fprintln(buf, "}") -} diff --git a/vendor/honnef.co/go/tools/ssa/emit.go b/vendor/honnef.co/go/tools/ssa/emit.go deleted file mode 100644 index 400da21..0000000 --- a/vendor/honnef.co/go/tools/ssa/emit.go +++ /dev/null @@ -1,475 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// Helpers for emitting SSA instructions. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" -) - -// emitNew emits to f a new (heap Alloc) instruction allocating an -// object of type typ. pos is the optional source location. -// -func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { - v := &Alloc{Heap: true} - v.setType(types.NewPointer(typ)) - v.setPos(pos) - f.emit(v) - return v -} - -// emitLoad emits to f an instruction to load the address addr into a -// new temporary, and returns the value so defined. -// -func emitLoad(f *Function, addr Value) *UnOp { - v := &UnOp{Op: token.MUL, X: addr} - v.setType(deref(addr.Type())) - f.emit(v) - return v -} - -// emitDebugRef emits to f a DebugRef pseudo-instruction associating -// expression e with value v. -// -func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { - if !f.debugInfo() { - return // debugging not enabled - } - if v == nil || e == nil { - panic("nil") - } - var obj types.Object - e = unparen(e) - if id, ok := e.(*ast.Ident); ok { - if isBlankIdent(id) { - return - } - obj = f.Pkg.objectOf(id) - switch obj.(type) { - case *types.Nil, *types.Const, *types.Builtin: - return - } - } - f.emit(&DebugRef{ - X: v, - Expr: e, - IsAddr: isAddr, - object: obj, - }) -} - -// emitArith emits to f code to compute the binary operation op(x, y) -// where op is an eager shift, logical or arithmetic operation. -// (Use emitCompare() for comparisons and Builder.logicalBinop() for -// non-eager operations.) -// -func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { - switch op { - case token.SHL, token.SHR: - x = emitConv(f, x, t) - // y may be signed or an 'untyped' constant. - // TODO(adonovan): whence signed values? - if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { - y = emitConv(f, y, types.Typ[types.Uint64]) - } - - case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: - x = emitConv(f, x, t) - y = emitConv(f, y, t) - - default: - panic("illegal op in emitArith: " + op.String()) - - } - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setPos(pos) - v.setType(t) - return f.emit(v) -} - -// emitCompare emits to f code compute the boolean result of -// comparison comparison 'x op y'. -// -func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { - xt := x.Type().Underlying() - yt := y.Type().Underlying() - - // Special case to optimise a tagless SwitchStmt so that - // these are equivalent - // switch { case e: ...} - // switch true { case e: ... } - // if e==true { ... } - // even in the case when e's type is an interface. - // TODO(adonovan): opt: generalise to x==true, false!=y, etc. - if x == vTrue && op == token.EQL { - if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { - return y - } - } - - if types.Identical(xt, yt) { - // no conversion necessary - } else if _, ok := xt.(*types.Interface); ok { - y = emitConv(f, y, x.Type()) - } else if _, ok := yt.(*types.Interface); ok { - x = emitConv(f, x, y.Type()) - } else if _, ok := x.(*Const); ok { - x = emitConv(f, x, y.Type()) - } else if _, ok := y.(*Const); ok { - y = emitConv(f, y, x.Type()) - } else { - // other cases, e.g. channels. No-op. - } - - v := &BinOp{ - Op: op, - X: x, - Y: y, - } - v.setPos(pos) - v.setType(tBool) - return f.emit(v) -} - -// isValuePreserving returns true if a conversion from ut_src to -// ut_dst is value-preserving, i.e. just a change of type. -// Precondition: neither argument is a named type. -// -func isValuePreserving(ut_src, ut_dst types.Type) bool { - // Identical underlying types? - if structTypesIdentical(ut_dst, ut_src) { - return true - } - - switch ut_dst.(type) { - case *types.Chan: - // Conversion between channel types? - _, ok := ut_src.(*types.Chan) - return ok - - case *types.Pointer: - // Conversion between pointers with identical base types? - _, ok := ut_src.(*types.Pointer) - return ok - } - return false -} - -// emitConv emits to f code to convert Value val to exactly type typ, -// and returns the converted value. Implicit conversions are required -// by language assignability rules in assignments, parameter passing, -// etc. Conversions cannot fail dynamically. -// -func emitConv(f *Function, val Value, typ types.Type) Value { - t_src := val.Type() - - // Identical types? Conversion is a no-op. - if types.Identical(t_src, typ) { - return val - } - - ut_dst := typ.Underlying() - ut_src := t_src.Underlying() - - // Just a change of type, but not value or representation? - if isValuePreserving(ut_src, ut_dst) { - c := &ChangeType{X: val} - c.setType(typ) - return f.emit(c) - } - - // Conversion to, or construction of a value of, an interface type? - if _, ok := ut_dst.(*types.Interface); ok { - // Assignment from one interface type to another? - if _, ok := ut_src.(*types.Interface); ok { - c := &ChangeInterface{X: val} - c.setType(typ) - return f.emit(c) - } - - // Untyped nil constant? Return interface-typed nil constant. - if ut_src == tUntypedNil { - return nilConst(typ) - } - - // Convert (non-nil) "untyped" literals to their default type. - if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { - val = emitConv(f, val, DefaultType(ut_src)) - } - - f.Pkg.Prog.needMethodsOf(val.Type()) - mi := &MakeInterface{X: val} - mi.setType(typ) - return f.emit(mi) - } - - // Conversion of a compile-time constant value? - if c, ok := val.(*Const); ok { - if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { - // Conversion of a compile-time constant to - // another constant type results in a new - // constant of the destination type and - // (initially) the same abstract value. - // We don't truncate the value yet. - return NewConst(c.Value, typ) - } - - // We're converting from constant to non-constant type, - // e.g. string -> []byte/[]rune. - } - - // A representation-changing conversion? - // At least one of {ut_src,ut_dst} must be *Basic. - // (The other may be []byte or []rune.) - _, ok1 := ut_src.(*types.Basic) - _, ok2 := ut_dst.(*types.Basic) - if ok1 || ok2 { - c := &Convert{X: val} - c.setType(typ) - return f.emit(c) - } - - panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) -} - -// emitStore emits to f an instruction to store value val at location -// addr, applying implicit conversions as required by assignability rules. -// -func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { - s := &Store{ - Addr: addr, - Val: emitConv(f, val, deref(addr.Type())), - pos: pos, - } - f.emit(s) - return s -} - -// emitJump emits to f a jump to target, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -// -func emitJump(f *Function, target *BasicBlock) { - b := f.currentBlock - b.emit(new(Jump)) - addEdge(b, target) - f.currentBlock = nil -} - -func (b *BasicBlock) emitJump(target *BasicBlock) { - b.emit(new(Jump)) - addEdge(b, target) -} - -// emitIf emits to f a conditional jump to tblock or fblock based on -// cond, and updates the control-flow graph. -// Postcondition: f.currentBlock is nil. -// -func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { - b := f.currentBlock - b.emit(&If{Cond: cond}) - addEdge(b, tblock) - addEdge(b, fblock) - f.currentBlock = nil -} - -// emitExtract emits to f an instruction to extract the index'th -// component of tuple. It returns the extracted value. -// -func emitExtract(f *Function, tuple Value, index int) Value { - e := &Extract{Tuple: tuple, Index: index} - e.setType(tuple.Type().(*types.Tuple).At(index).Type()) - return f.emit(e) -} - -// emitTypeAssert emits to f a type assertion value := x.(t) and -// returns the value. x.Type() must be an interface. -// -func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { - a := &TypeAssert{X: x, AssertedType: t} - a.setPos(pos) - a.setType(t) - return f.emit(a) -} - -// emitTypeTest emits to f a type test value,ok := x.(t) and returns -// a (value, ok) tuple. x.Type() must be an interface. -// -func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { - a := &TypeAssert{ - X: x, - AssertedType: t, - CommaOk: true, - } - a.setPos(pos) - a.setType(types.NewTuple( - newVar("value", t), - varOk, - )) - return f.emit(a) -} - -// emitTailCall emits to f a function call in tail position. The -// caller is responsible for all fields of 'call' except its type. -// Intended for wrapper methods. -// Precondition: f does/will not use deferred procedure calls. -// Postcondition: f.currentBlock is nil. -// -func emitTailCall(f *Function, call *Call) { - tresults := f.Signature.Results() - nr := tresults.Len() - if nr == 1 { - call.typ = tresults.At(0).Type() - } else { - call.typ = tresults - } - tuple := f.emit(call) - var ret Return - switch nr { - case 0: - // no-op - case 1: - ret.Results = []Value{tuple} - default: - for i := 0; i < nr; i++ { - v := emitExtract(f, tuple, i) - // TODO(adonovan): in principle, this is required: - // v = emitConv(f, o.Type, f.Signature.Results[i].Type) - // but in practice emitTailCall is only used when - // the types exactly match. - ret.Results = append(ret.Results, v) - } - } - f.emit(&ret) - f.currentBlock = nil -} - -// emitImplicitSelections emits to f code to apply the sequence of -// implicit field selections specified by indices to base value v, and -// returns the selected value. -// -// If v is the address of a struct, the result will be the address of -// a field; if it is the value of a struct, the result will be the -// value of a field. -// -func emitImplicitSelections(f *Function, v Value, indices []int) Value { - for _, index := range indices { - fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) - - if isPointer(v.Type()) { - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr) - // Load the field's value iff indirectly embedded. - if isPointer(fld.Type()) { - v = emitLoad(f, v) - } - } else { - instr := &Field{ - X: v, - Field: index, - } - instr.setType(fld.Type()) - v = f.emit(instr) - } - } - return v -} - -// emitFieldSelection emits to f code to select the index'th field of v. -// -// If wantAddr, the input must be a pointer-to-struct and the result -// will be the field's address; otherwise the result will be the -// field's value. -// Ident id is used for position and debug info. -// -func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) - if isPointer(v.Type()) { - instr := &FieldAddr{ - X: v, - Field: index, - } - instr.setPos(id.Pos()) - instr.setType(types.NewPointer(fld.Type())) - v = f.emit(instr) - // Load the field's value iff we don't want its address. - if !wantAddr { - v = emitLoad(f, v) - } - } else { - instr := &Field{ - X: v, - Field: index, - } - instr.setPos(id.Pos()) - instr.setType(fld.Type()) - v = f.emit(instr) - } - emitDebugRef(f, id, v, wantAddr) - return v -} - -// zeroValue emits to f code to produce a zero value of type t, -// and returns it. -// -func zeroValue(f *Function, t types.Type) Value { - switch t.Underlying().(type) { - case *types.Struct, *types.Array: - return emitLoad(f, f.addLocal(t, token.NoPos)) - default: - return zeroConst(t) - } -} - -// createRecoverBlock emits to f a block of code to return after a -// recovered panic, and sets f.Recover to it. -// -// If f's result parameters are named, the code loads and returns -// their current values, otherwise it returns the zero values of their -// type. -// -// Idempotent. -// -func createRecoverBlock(f *Function) { - if f.Recover != nil { - return // already created - } - saved := f.currentBlock - - f.Recover = f.newBasicBlock("recover") - f.currentBlock = f.Recover - - var results []Value - if f.namedResults != nil { - // Reload NRPs to form value tuple. - for _, r := range f.namedResults { - results = append(results, emitLoad(f, r)) - } - } else { - R := f.Signature.Results() - for i, n := 0, R.Len(); i < n; i++ { - T := R.At(i).Type() - - // Return zero value of each result type. - results = append(results, zeroValue(f, T)) - } - } - f.emit(&Return{Results: results}) - - f.currentBlock = saved -} diff --git a/vendor/honnef.co/go/tools/ssa/func.go b/vendor/honnef.co/go/tools/ssa/func.go deleted file mode 100644 index 86a3da7..0000000 --- a/vendor/honnef.co/go/tools/ssa/func.go +++ /dev/null @@ -1,703 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file implements the Function and BasicBlock types. - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "os" - "strings" -) - -// addEdge adds a control-flow graph edge from from to to. -func addEdge(from, to *BasicBlock) { - from.Succs = append(from.Succs, to) - to.Preds = append(to.Preds, from) -} - -// Parent returns the function that contains block b. -func (b *BasicBlock) Parent() *Function { return b.parent } - -// String returns a human-readable label of this block. -// It is not guaranteed unique within the function. -// -func (b *BasicBlock) String() string { - return fmt.Sprintf("%d", b.Index) -} - -// emit appends an instruction to the current basic block. -// If the instruction defines a Value, it is returned. -// -func (b *BasicBlock) emit(i Instruction) Value { - i.setBlock(b) - b.Instrs = append(b.Instrs, i) - v, _ := i.(Value) - return v -} - -// predIndex returns the i such that b.Preds[i] == c or panics if -// there is none. -func (b *BasicBlock) predIndex(c *BasicBlock) int { - for i, pred := range b.Preds { - if pred == c { - return i - } - } - panic(fmt.Sprintf("no edge %s -> %s", c, b)) -} - -// hasPhi returns true if b.Instrs contains φ-nodes. -func (b *BasicBlock) hasPhi() bool { - _, ok := b.Instrs[0].(*Phi) - return ok -} - -func (b *BasicBlock) Phis() []Instruction { - return b.phis() -} - -// phis returns the prefix of b.Instrs containing all the block's φ-nodes. -func (b *BasicBlock) phis() []Instruction { - for i, instr := range b.Instrs { - if _, ok := instr.(*Phi); !ok { - return b.Instrs[:i] - } - } - return nil // unreachable in well-formed blocks -} - -// replacePred replaces all occurrences of p in b's predecessor list with q. -// Ordinarily there should be at most one. -// -func (b *BasicBlock) replacePred(p, q *BasicBlock) { - for i, pred := range b.Preds { - if pred == p { - b.Preds[i] = q - } - } -} - -// replaceSucc replaces all occurrences of p in b's successor list with q. -// Ordinarily there should be at most one. -// -func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { - for i, succ := range b.Succs { - if succ == p { - b.Succs[i] = q - } - } -} - -func (b *BasicBlock) RemovePred(p *BasicBlock) { - b.removePred(p) -} - -// removePred removes all occurrences of p in b's -// predecessor list and φ-nodes. -// Ordinarily there should be at most one. -// -func (b *BasicBlock) removePred(p *BasicBlock) { - phis := b.phis() - - // We must preserve edge order for φ-nodes. - j := 0 - for i, pred := range b.Preds { - if pred != p { - b.Preds[j] = b.Preds[i] - // Strike out φ-edge too. - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges[j] = phi.Edges[i] - } - j++ - } - } - // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. - for i := j; i < len(b.Preds); i++ { - b.Preds[i] = nil - for _, instr := range phis { - instr.(*Phi).Edges[i] = nil - } - } - b.Preds = b.Preds[:j] - for _, instr := range phis { - phi := instr.(*Phi) - phi.Edges = phi.Edges[:j] - } -} - -// Destinations associated with unlabelled for/switch/select stmts. -// We push/pop one of these as we enter/leave each construct and for -// each BranchStmt we scan for the innermost target of the right type. -// -type targets struct { - tail *targets // rest of stack - _break *BasicBlock - _continue *BasicBlock - _fallthrough *BasicBlock -} - -// Destinations associated with a labelled block. -// We populate these as labels are encountered in forward gotos or -// labelled statements. -// -type lblock struct { - _goto *BasicBlock - _break *BasicBlock - _continue *BasicBlock -} - -// labelledBlock returns the branch target associated with the -// specified label, creating it if needed. -// -func (f *Function) labelledBlock(label *ast.Ident) *lblock { - lb := f.lblocks[label.Obj] - if lb == nil { - lb = &lblock{_goto: f.newBasicBlock(label.Name)} - if f.lblocks == nil { - f.lblocks = make(map[*ast.Object]*lblock) - } - f.lblocks[label.Obj] = lb - } - return lb -} - -// addParam adds a (non-escaping) parameter to f.Params of the -// specified name, type and source position. -// -func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter { - v := &Parameter{ - name: name, - typ: typ, - pos: pos, - parent: f, - } - f.Params = append(f.Params, v) - return v -} - -func (f *Function) addParamObj(obj types.Object) *Parameter { - name := obj.Name() - if name == "" { - name = fmt.Sprintf("arg%d", len(f.Params)) - } - param := f.addParam(name, obj.Type(), obj.Pos()) - param.object = obj - return param -} - -// addSpilledParam declares a parameter that is pre-spilled to the -// stack; the function body will load/store the spilled location. -// Subsequent lifting will eliminate spills where possible. -// -func (f *Function) addSpilledParam(obj types.Object) { - param := f.addParamObj(obj) - spill := &Alloc{Comment: obj.Name()} - spill.setType(types.NewPointer(obj.Type())) - spill.setPos(obj.Pos()) - f.objects[obj] = spill - f.Locals = append(f.Locals, spill) - f.emit(spill) - f.emit(&Store{Addr: spill, Val: param}) -} - -// startBody initializes the function prior to generating SSA code for its body. -// Precondition: f.Type() already set. -// -func (f *Function) startBody() { - f.currentBlock = f.newBasicBlock("entry") - f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init -} - -// createSyntacticParams populates f.Params and generates code (spills -// and named result locals) for all the parameters declared in the -// syntax. In addition it populates the f.objects mapping. -// -// Preconditions: -// f.startBody() was called. -// Postcondition: -// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) -// -func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { - // Receiver (at most one inner iteration). - if recv != nil { - for _, field := range recv.List { - for _, n := range field.Names { - f.addSpilledParam(f.Pkg.info.Defs[n]) - } - // Anonymous receiver? No need to spill. - if field.Names == nil { - f.addParamObj(f.Signature.Recv()) - } - } - } - - // Parameters. - if functype.Params != nil { - n := len(f.Params) // 1 if has recv, 0 otherwise - for _, field := range functype.Params.List { - for _, n := range field.Names { - f.addSpilledParam(f.Pkg.info.Defs[n]) - } - // Anonymous parameter? No need to spill. - if field.Names == nil { - f.addParamObj(f.Signature.Params().At(len(f.Params) - n)) - } - } - } - - // Named results. - if functype.Results != nil { - for _, field := range functype.Results.List { - // Implicit "var" decl of locals for named results. - for _, n := range field.Names { - f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) - } - } - } -} - -// numberRegisters assigns numbers to all SSA registers -// (value-defining Instructions) in f, to aid debugging. -// (Non-Instruction Values are named at construction.) -// -func numberRegisters(f *Function) { - v := 0 - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case Value: - instr.(interface { - setNum(int) - }).setNum(v) - v++ - } - } - } -} - -// buildReferrers populates the def/use information in all non-nil -// Value.Referrers slice. -// Precondition: all such slices are initially empty. -func buildReferrers(f *Function) { - var rands []*Value - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if r := *rand; r != nil { - if ref := r.Referrers(); ref != nil { - *ref = append(*ref, instr) - } - } - } - } - } -} - -// finishBody() finalizes the function after SSA code generation of its body. -func (f *Function) finishBody() { - f.objects = nil - f.currentBlock = nil - f.lblocks = nil - - // Don't pin the AST in memory (except in debug mode). - if n := f.syntax; n != nil && !f.debugInfo() { - f.syntax = extentNode{n.Pos(), n.End()} - } - - // Remove from f.Locals any Allocs that escape to the heap. - j := 0 - for _, l := range f.Locals { - if !l.Heap { - f.Locals[j] = l - j++ - } - } - // Nil out f.Locals[j:] to aid GC. - for i := j; i < len(f.Locals); i++ { - f.Locals[i] = nil - } - f.Locals = f.Locals[:j] - - optimizeBlocks(f) - - buildReferrers(f) - - buildDomTree(f) - - if f.Prog.mode&NaiveForm == 0 { - // For debugging pre-state of lifting pass: - // numberRegisters(f) - // f.WriteTo(os.Stderr) - lift(f) - } - - f.namedResults = nil // (used by lifting) - - numberRegisters(f) - - if f.Prog.mode&PrintFunctions != 0 { - printMu.Lock() - f.WriteTo(os.Stdout) - printMu.Unlock() - } - - if f.Prog.mode&SanityCheckFunctions != 0 { - mustSanityCheck(f, nil) - } -} - -func (f *Function) RemoveNilBlocks() { - f.removeNilBlocks() -} - -// removeNilBlocks eliminates nils from f.Blocks and updates each -// BasicBlock.Index. Use this after any pass that may delete blocks. -// -func (f *Function) removeNilBlocks() { - j := 0 - for _, b := range f.Blocks { - if b != nil { - b.Index = j - f.Blocks[j] = b - j++ - } - } - // Nil out f.Blocks[j:] to aid GC. - for i := j; i < len(f.Blocks); i++ { - f.Blocks[i] = nil - } - f.Blocks = f.Blocks[:j] -} - -// SetDebugMode sets the debug mode for package pkg. If true, all its -// functions will include full debug info. This greatly increases the -// size of the instruction stream, and causes Functions to depend upon -// the ASTs, potentially keeping them live in memory for longer. -// -func (pkg *Package) SetDebugMode(debug bool) { - // TODO(adonovan): do we want ast.File granularity? - pkg.debug = debug -} - -// debugInfo reports whether debug info is wanted for this function. -func (f *Function) debugInfo() bool { - return f.Pkg != nil && f.Pkg.debug -} - -// addNamedLocal creates a local variable, adds it to function f and -// returns it. Its name and type are taken from obj. Subsequent -// calls to f.lookup(obj) will return the same local. -// -func (f *Function) addNamedLocal(obj types.Object) *Alloc { - l := f.addLocal(obj.Type(), obj.Pos()) - l.Comment = obj.Name() - f.objects[obj] = l - return l -} - -func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { - return f.addNamedLocal(f.Pkg.info.Defs[id]) -} - -// addLocal creates an anonymous local variable of type typ, adds it -// to function f and returns it. pos is the optional source location. -// -func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc { - v := &Alloc{} - v.setType(types.NewPointer(typ)) - v.setPos(pos) - f.Locals = append(f.Locals, v) - f.emit(v) - return v -} - -// lookup returns the address of the named variable identified by obj -// that is local to function f or one of its enclosing functions. -// If escaping, the reference comes from a potentially escaping pointer -// expression and the referent must be heap-allocated. -// -func (f *Function) lookup(obj types.Object, escaping bool) Value { - if v, ok := f.objects[obj]; ok { - if alloc, ok := v.(*Alloc); ok && escaping { - alloc.Heap = true - } - return v // function-local var (address) - } - - // Definition must be in an enclosing function; - // plumb it through intervening closures. - if f.parent == nil { - panic("no ssa.Value for " + obj.String()) - } - outer := f.parent.lookup(obj, true) // escaping - v := &FreeVar{ - name: obj.Name(), - typ: outer.Type(), - pos: outer.Pos(), - outer: outer, - parent: f, - } - f.objects[obj] = v - f.FreeVars = append(f.FreeVars, v) - return v -} - -// emit emits the specified instruction to function f. -func (f *Function) emit(instr Instruction) Value { - return f.currentBlock.emit(instr) -} - -// RelString returns the full name of this function, qualified by -// package name, receiver type, etc. -// -// The specific formatting rules are not guaranteed and may change. -// -// Examples: -// "math.IsNaN" // a package-level function -// "(*bytes.Buffer).Bytes" // a declared method or a wrapper -// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) -// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) -// "main.main$1" // an anonymous function in main -// "main.init#1" // a declared init function -// "main.init" // the synthesized package initializer -// -// When these functions are referred to from within the same package -// (i.e. from == f.Pkg.Object), they are rendered without the package path. -// For example: "IsNaN", "(*Buffer).Bytes", etc. -// -// All non-synthetic functions have distinct package-qualified names. -// (But two methods may have the same name "(T).f" if one is a synthetic -// wrapper promoting a non-exported method "f" from another package; in -// that case, the strings are equal but the identifiers "f" are distinct.) -// -func (f *Function) RelString(from *types.Package) string { - // Anonymous? - if f.parent != nil { - // An anonymous function's Name() looks like "parentName$1", - // but its String() should include the type/package/etc. - parent := f.parent.RelString(from) - for i, anon := range f.parent.AnonFuncs { - if anon == f { - return fmt.Sprintf("%s$%d", parent, 1+i) - } - } - - return f.name // should never happen - } - - // Method (declared or wrapper)? - if recv := f.Signature.Recv(); recv != nil { - return f.relMethod(from, recv.Type()) - } - - // Thunk? - if f.method != nil { - return f.relMethod(from, f.method.Recv()) - } - - // Bound? - if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { - return f.relMethod(from, f.FreeVars[0].Type()) - } - - // Package-level function? - // Prefix with package name for cross-package references only. - if p := f.pkg(); p != nil && p != from { - return fmt.Sprintf("%s.%s", p.Path(), f.name) - } - - // Unknown. - return f.name -} - -func (f *Function) relMethod(from *types.Package, recv types.Type) string { - return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) -} - -// writeSignature writes to buf the signature sig in declaration syntax. -func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { - buf.WriteString("func ") - if recv := sig.Recv(); recv != nil { - buf.WriteString("(") - if n := params[0].Name(); n != "" { - buf.WriteString(n) - buf.WriteString(" ") - } - types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) - buf.WriteString(") ") - } - buf.WriteString(name) - types.WriteSignature(buf, sig, types.RelativeTo(from)) -} - -func (f *Function) pkg() *types.Package { - if f.Pkg != nil { - return f.Pkg.Pkg - } - return nil -} - -var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer - -func (f *Function) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WriteFunction(&buf, f) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WriteFunction writes to buf a human-readable "disassembly" of f. -func WriteFunction(buf *bytes.Buffer, f *Function) { - fmt.Fprintf(buf, "# Name: %s\n", f.String()) - if f.Pkg != nil { - fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) - } - if syn := f.Synthetic; syn != "" { - fmt.Fprintln(buf, "# Synthetic:", syn) - } - if pos := f.Pos(); pos.IsValid() { - fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) - } - - if f.parent != nil { - fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) - } - - if f.Recover != nil { - fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) - } - - from := f.pkg() - - if f.FreeVars != nil { - buf.WriteString("# Free variables:\n") - for i, fv := range f.FreeVars { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) - } - } - - if len(f.Locals) > 0 { - buf.WriteString("# Locals:\n") - for i, l := range f.Locals { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) - } - } - writeSignature(buf, from, f.Name(), f.Signature, f.Params) - buf.WriteString(":\n") - - if f.Blocks == nil { - buf.WriteString("\t(external)\n") - } - - // NB. column calculations are confused by non-ASCII - // characters and assume 8-space tabs. - const punchcard = 80 // for old time's sake. - const tabwidth = 8 - for _, b := range f.Blocks { - if b == nil { - // Corrupt CFG. - fmt.Fprintf(buf, ".nil:\n") - continue - } - n, _ := fmt.Fprintf(buf, "%d:", b.Index) - bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) - fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) - - if false { // CFG debugging - fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) - } - for _, instr := range b.Instrs { - buf.WriteString("\t") - switch v := instr.(type) { - case Value: - l := punchcard - tabwidth - // Left-align the instruction. - if name := v.Name(); name != "" { - n, _ := fmt.Fprintf(buf, "%s = ", name) - l -= n - } - n, _ := buf.WriteString(instr.String()) - l -= n - // Right-align the type if there's space. - if t := v.Type(); t != nil { - buf.WriteByte(' ') - ts := relType(t, from) - l -= len(ts) + len(" ") // (spaces before and after type) - if l > 0 { - fmt.Fprintf(buf, "%*s", l, "") - } - buf.WriteString(ts) - } - case nil: - // Be robust against bad transforms. - buf.WriteString("") - default: - buf.WriteString(instr.String()) - } - buf.WriteString("\n") - } - } - fmt.Fprintf(buf, "\n") -} - -// newBasicBlock adds to f a new basic block and returns it. It does -// not automatically become the current block for subsequent calls to emit. -// comment is an optional string for more readable debugging output. -// -func (f *Function) newBasicBlock(comment string) *BasicBlock { - b := &BasicBlock{ - Index: len(f.Blocks), - Comment: comment, - parent: f, - } - b.Succs = b.succs2[:0] - f.Blocks = append(f.Blocks, b) - return b -} - -// NewFunction returns a new synthetic Function instance belonging to -// prog, with its name and signature fields set as specified. -// -// The caller is responsible for initializing the remaining fields of -// the function object, e.g. Pkg, Params, Blocks. -// -// It is practically impossible for clients to construct well-formed -// SSA functions/packages/programs directly, so we assume this is the -// job of the Builder alone. NewFunction exists to provide clients a -// little flexibility. For example, analysis tools may wish to -// construct fake Functions for the root of the callgraph, a fake -// "reflect" package, etc. -// -// TODO(adonovan): think harder about the API here. -// -func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { - return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} -} - -type extentNode [2]token.Pos - -func (n extentNode) Pos() token.Pos { return n[0] } -func (n extentNode) End() token.Pos { return n[1] } - -// Syntax returns an ast.Node whose Pos/End methods provide the -// lexical extent of the function if it was defined by Go source code -// (f.Synthetic==""), or nil otherwise. -// -// If f was built with debug information (see Package.SetDebugRef), -// the result is the *ast.FuncDecl or *ast.FuncLit that declared the -// function. Otherwise, it is an opaque Node providing only position -// information; this avoids pinning the AST in memory. -// -func (f *Function) Syntax() ast.Node { return f.syntax } diff --git a/vendor/honnef.co/go/tools/ssa/identical.go b/vendor/honnef.co/go/tools/ssa/identical.go deleted file mode 100644 index 53cbee1..0000000 --- a/vendor/honnef.co/go/tools/ssa/identical.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build go1.8 - -package ssa - -import "go/types" - -var structTypesIdentical = types.IdenticalIgnoreTags diff --git a/vendor/honnef.co/go/tools/ssa/identical_17.go b/vendor/honnef.co/go/tools/ssa/identical_17.go deleted file mode 100644 index da89d33..0000000 --- a/vendor/honnef.co/go/tools/ssa/identical_17.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !go1.8 - -package ssa - -import "go/types" - -var structTypesIdentical = types.Identical diff --git a/vendor/honnef.co/go/tools/ssa/lift.go b/vendor/honnef.co/go/tools/ssa/lift.go deleted file mode 100644 index 0270797..0000000 --- a/vendor/honnef.co/go/tools/ssa/lift.go +++ /dev/null @@ -1,608 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file defines the lifting pass which tries to "lift" Alloc -// cells (new/local variables) into SSA registers, replacing loads -// with the dominating stored value, eliminating loads and stores, and -// inserting φ-nodes as needed. - -// Cited papers and resources: -// -// Ron Cytron et al. 1991. Efficiently computing SSA form... -// http://doi.acm.org/10.1145/115372.115320 -// -// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. -// Software Practice and Experience 2001, 4:1-10. -// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf -// -// Daniel Berlin, llvmdev mailing list, 2012. -// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html -// (Be sure to expand the whole thread.) - -// TODO(adonovan): opt: there are many optimizations worth evaluating, and -// the conventional wisdom for SSA construction is that a simple -// algorithm well engineered often beats those of better asymptotic -// complexity on all but the most egregious inputs. -// -// Danny Berlin suggests that the Cooper et al. algorithm for -// computing the dominance frontier is superior to Cytron et al. -// Furthermore he recommends that rather than computing the DF for the -// whole function then renaming all alloc cells, it may be cheaper to -// compute the DF for each alloc cell separately and throw it away. -// -// Consider exploiting liveness information to avoid creating dead -// φ-nodes which we then immediately remove. -// -// Integrate lifting with scalar replacement of aggregates (SRA) since -// the two are synergistic. -// -// Also see many other "TODO: opt" suggestions in the code. - -import ( - "fmt" - "go/token" - "go/types" - "math/big" - "os" -) - -// If true, perform sanity checking and show diagnostic information at -// each step of lifting. Very verbose. -const debugLifting = false - -// domFrontier maps each block to the set of blocks in its dominance -// frontier. The outer slice is conceptually a map keyed by -// Block.Index. The inner slice is conceptually a set, possibly -// containing duplicates. -// -// TODO(adonovan): opt: measure impact of dups; consider a packed bit -// representation, e.g. big.Int, and bitwise parallel operations for -// the union step in the Children loop. -// -// domFrontier's methods mutate the slice's elements but not its -// length, so their receivers needn't be pointers. -// -type domFrontier [][]*BasicBlock - -func (df domFrontier) add(u, v *BasicBlock) { - p := &df[u.Index] - *p = append(*p, v) -} - -// build builds the dominance frontier df for the dominator (sub)tree -// rooted at u, using the Cytron et al. algorithm. -// -// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA -// by pruning the entire IDF computation, rather than merely pruning -// the DF -> IDF step. -func (df domFrontier) build(u *BasicBlock) { - // Encounter each node u in postorder of dom tree. - for _, child := range u.dom.children { - df.build(child) - } - for _, vb := range u.Succs { - if v := vb.dom; v.idom != u { - df.add(u, vb) - } - } - for _, w := range u.dom.children { - for _, vb := range df[w.Index] { - // TODO(adonovan): opt: use word-parallel bitwise union. - if v := vb.dom; v.idom != u { - df.add(u, vb) - } - } - } -} - -func buildDomFrontier(fn *Function) domFrontier { - df := make(domFrontier, len(fn.Blocks)) - df.build(fn.Blocks[0]) - if fn.Recover != nil { - df.build(fn.Recover) - } - return df -} - -func RemoveInstr(refs []Instruction, instr Instruction) []Instruction { - return removeInstr(refs, instr) -} - -func removeInstr(refs []Instruction, instr Instruction) []Instruction { - i := 0 - for _, ref := range refs { - if ref == instr { - continue - } - refs[i] = ref - i++ - } - for j := i; j != len(refs); j++ { - refs[j] = nil // aid GC - } - return refs[:i] -} - -// lift attempts to replace local and new Allocs accessed only with -// load/store by SSA registers, inserting φ-nodes where necessary. -// The result is a program in classical pruned SSA form. -// -// Preconditions: -// - fn has no dead blocks (blockopt has run). -// - Def/use info (Operands and Referrers) is up-to-date. -// - The dominator tree is up-to-date. -// -func lift(fn *Function) { - // TODO(adonovan): opt: lots of little optimizations may be - // worthwhile here, especially if they cause us to avoid - // buildDomFrontier. For example: - // - // - Alloc never loaded? Eliminate. - // - Alloc never stored? Replace all loads with a zero constant. - // - Alloc stored once? Replace loads with dominating store; - // don't forget that an Alloc is itself an effective store - // of zero. - // - Alloc used only within a single block? - // Use degenerate algorithm avoiding φ-nodes. - // - Consider synergy with scalar replacement of aggregates (SRA). - // e.g. *(&x.f) where x is an Alloc. - // Perhaps we'd get better results if we generated this as x.f - // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). - // Unclear. - // - // But we will start with the simplest correct code. - df := buildDomFrontier(fn) - - if debugLifting { - title := false - for i, blocks := range df { - if blocks != nil { - if !title { - fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) - title = true - } - fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) - } - } - } - - newPhis := make(newPhiMap) - - // During this pass we will replace some BasicBlock.Instrs - // (allocs, loads and stores) with nil, keeping a count in - // BasicBlock.gaps. At the end we will reset Instrs to the - // concatenation of all non-dead newPhis and non-nil Instrs - // for the block, reusing the original array if space permits. - - // While we're here, we also eliminate 'rundefers' - // instructions in functions that contain no 'defer' - // instructions. - usesDefer := false - - // Determine which allocs we can lift and number them densely. - // The renaming phase uses this numbering for compact maps. - numAllocs := 0 - for _, b := range fn.Blocks { - b.gaps = 0 - b.rundefers = 0 - for _, instr := range b.Instrs { - switch instr := instr.(type) { - case *Alloc: - index := -1 - if liftAlloc(df, instr, newPhis) { - index = numAllocs - numAllocs++ - } - instr.index = index - case *Defer: - usesDefer = true - case *RunDefers: - b.rundefers++ - } - } - } - - // renaming maps an alloc (keyed by index) to its replacement - // value. Initially the renaming contains nil, signifying the - // zero constant of the appropriate type; we construct the - // Const lazily at most once on each path through the domtree. - // TODO(adonovan): opt: cache per-function not per subtree. - renaming := make([]Value, numAllocs) - - // Renaming. - rename(fn.Blocks[0], renaming, newPhis) - - // Eliminate dead new phis, then prepend the live ones to each block. - for _, b := range fn.Blocks { - - // Compress the newPhis slice to eliminate unused phis. - // TODO(adonovan): opt: compute liveness to avoid - // placing phis in blocks for which the alloc cell is - // not live. - nps := newPhis[b] - j := 0 - for _, np := range nps { - if !phiIsLive(np.phi) { - // discard it, first removing it from referrers - for _, newval := range np.phi.Edges { - if refs := newval.Referrers(); refs != nil { - *refs = removeInstr(*refs, np.phi) - } - } - continue - } - nps[j] = np - j++ - } - nps = nps[:j] - - rundefersToKill := b.rundefers - if usesDefer { - rundefersToKill = 0 - } - - if j+b.gaps+rundefersToKill == 0 { - continue // fast path: no new phis or gaps - } - - // Compact nps + non-nil Instrs into a new slice. - // TODO(adonovan): opt: compact in situ if there is - // sufficient space or slack in the slice. - dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) - for i, np := range nps { - dst[i] = np.phi - } - for _, instr := range b.Instrs { - if instr == nil { - continue - } - if !usesDefer { - if _, ok := instr.(*RunDefers); ok { - continue - } - } - dst[j] = instr - j++ - } - for i, np := range nps { - dst[i] = np.phi - } - b.Instrs = dst - } - - // Remove any fn.Locals that were lifted. - j := 0 - for _, l := range fn.Locals { - if l.index < 0 { - fn.Locals[j] = l - j++ - } - } - // Nil out fn.Locals[j:] to aid GC. - for i := j; i < len(fn.Locals); i++ { - fn.Locals[i] = nil - } - fn.Locals = fn.Locals[:j] -} - -func phiIsLive(phi *Phi) bool { - for _, instr := range *phi.Referrers() { - if instr == phi { - continue // self-refs don't count - } - if _, ok := instr.(*DebugRef); ok { - continue // debug refs don't count - } - return true - } - return false -} - -type blockSet struct{ big.Int } // (inherit methods from Int) - -// add adds b to the set and returns true if the set changed. -func (s *blockSet) add(b *BasicBlock) bool { - i := b.Index - if s.Bit(i) != 0 { - return false - } - s.SetBit(&s.Int, i, 1) - return true -} - -// take removes an arbitrary element from a set s and -// returns its index, or returns -1 if empty. -func (s *blockSet) take() int { - l := s.BitLen() - for i := 0; i < l; i++ { - if s.Bit(i) == 1 { - s.SetBit(&s.Int, i, 0) - return i - } - } - return -1 -} - -// newPhi is a pair of a newly introduced φ-node and the lifted Alloc -// it replaces. -type newPhi struct { - phi *Phi - alloc *Alloc -} - -// newPhiMap records for each basic block, the set of newPhis that -// must be prepended to the block. -type newPhiMap map[*BasicBlock][]newPhi - -// liftAlloc determines whether alloc can be lifted into registers, -// and if so, it populates newPhis with all the φ-nodes it may require -// and returns true. -// -func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap) bool { - // Don't lift aggregates into registers, because we don't have - // a way to express their zero-constants. - switch deref(alloc.Type()).Underlying().(type) { - case *types.Array, *types.Struct: - return false - } - - // Don't lift named return values in functions that defer - // calls that may recover from panic. - if fn := alloc.Parent(); fn.Recover != nil { - for _, nr := range fn.namedResults { - if nr == alloc { - return false - } - } - } - - // Compute defblocks, the set of blocks containing a - // definition of the alloc cell. - var defblocks blockSet - for _, instr := range *alloc.Referrers() { - // Bail out if we discover the alloc is not liftable; - // the only operations permitted to use the alloc are - // loads/stores into the cell, and DebugRef. - switch instr := instr.(type) { - case *Store: - if instr.Val == alloc { - return false // address used as value - } - if instr.Addr != alloc { - panic("Alloc.Referrers is inconsistent") - } - defblocks.add(instr.Block()) - case *UnOp: - if instr.Op != token.MUL { - return false // not a load - } - if instr.X != alloc { - panic("Alloc.Referrers is inconsistent") - } - case *DebugRef: - // ok - default: - return false // some other instruction - } - } - // The Alloc itself counts as a (zero) definition of the cell. - defblocks.add(alloc.Block()) - - if debugLifting { - fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) - } - - fn := alloc.Parent() - - // Φ-insertion. - // - // What follows is the body of the main loop of the insert-φ - // function described by Cytron et al, but instead of using - // counter tricks, we just reset the 'hasAlready' and 'work' - // sets each iteration. These are bitmaps so it's pretty cheap. - // - // TODO(adonovan): opt: recycle slice storage for W, - // hasAlready, defBlocks across liftAlloc calls. - var hasAlready blockSet - - // Initialize W and work to defblocks. - var work blockSet = defblocks // blocks seen - var W blockSet // blocks to do - W.Set(&defblocks.Int) - - // Traverse iterated dominance frontier, inserting φ-nodes. - for i := W.take(); i != -1; i = W.take() { - u := fn.Blocks[i] - for _, v := range df[u.Index] { - if hasAlready.add(v) { - // Create φ-node. - // It will be prepended to v.Instrs later, if needed. - phi := &Phi{ - Edges: make([]Value, len(v.Preds)), - Comment: alloc.Comment, - } - phi.pos = alloc.Pos() - phi.setType(deref(alloc.Type())) - phi.block = v - if debugLifting { - fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) - } - newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) - - if work.add(v) { - W.add(v) - } - } - } - } - - return true -} - -func ReplaceAll(x, y Value) { - replaceAll(x, y) -} - -// replaceAll replaces all intraprocedural uses of x with y, -// updating x.Referrers and y.Referrers. -// Precondition: x.Referrers() != nil, i.e. x must be local to some function. -// -func replaceAll(x, y Value) { - var rands []*Value - pxrefs := x.Referrers() - pyrefs := y.Referrers() - for _, instr := range *pxrefs { - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if *rand != nil { - if *rand == x { - *rand = y - } - } - } - if pyrefs != nil { - *pyrefs = append(*pyrefs, instr) // dups ok - } - } - *pxrefs = nil // x is now unreferenced -} - -// renamed returns the value to which alloc is being renamed, -// constructing it lazily if it's the implicit zero initialization. -// -func renamed(renaming []Value, alloc *Alloc) Value { - v := renaming[alloc.index] - if v == nil { - v = zeroConst(deref(alloc.Type())) - renaming[alloc.index] = v - } - return v -} - -// rename implements the (Cytron et al) SSA renaming algorithm, a -// preorder traversal of the dominator tree replacing all loads of -// Alloc cells with the value stored to that cell by the dominating -// store instruction. For lifting, we need only consider loads, -// stores and φ-nodes. -// -// renaming is a map from *Alloc (keyed by index number) to its -// dominating stored value; newPhis[x] is the set of new φ-nodes to be -// prepended to block x. -// -func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { - // Each φ-node becomes the new name for its associated Alloc. - for _, np := range newPhis[u] { - phi := np.phi - alloc := np.alloc - renaming[alloc.index] = phi - } - - // Rename loads and stores of allocs. - for i, instr := range u.Instrs { - switch instr := instr.(type) { - case *Alloc: - if instr.index >= 0 { // store of zero to Alloc cell - // Replace dominated loads by the zero value. - renaming[instr.index] = nil - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) - } - // Delete the Alloc. - u.Instrs[i] = nil - u.gaps++ - } - - case *Store: - if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell - // Replace dominated loads by the stored value. - renaming[alloc.index] = instr.Val - if debugLifting { - fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", - instr, instr.Val.Name()) - } - // Remove the store from the referrer list of the stored value. - if refs := instr.Val.Referrers(); refs != nil { - *refs = removeInstr(*refs, instr) - } - // Delete the Store. - u.Instrs[i] = nil - u.gaps++ - } - - case *UnOp: - if instr.Op == token.MUL { - if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell - newval := renamed(renaming, alloc) - if debugLifting { - fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", - instr.Name(), instr, newval.Name()) - } - // Replace all references to - // the loaded value by the - // dominating stored value. - replaceAll(instr, newval) - // Delete the Load. - u.Instrs[i] = nil - u.gaps++ - } - } - - case *DebugRef: - if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell - if instr.IsAddr { - instr.X = renamed(renaming, alloc) - instr.IsAddr = false - - // Add DebugRef to instr.X's referrers. - if refs := instr.X.Referrers(); refs != nil { - *refs = append(*refs, instr) - } - } else { - // A source expression denotes the address - // of an Alloc that was optimized away. - instr.X = nil - - // Delete the DebugRef. - u.Instrs[i] = nil - u.gaps++ - } - } - } - } - - // For each φ-node in a CFG successor, rename the edge. - for _, v := range u.Succs { - phis := newPhis[v] - if len(phis) == 0 { - continue - } - i := v.predIndex(u) - for _, np := range phis { - phi := np.phi - alloc := np.alloc - newval := renamed(renaming, alloc) - if debugLifting { - fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", - phi.Name(), u, v, i, alloc.Name(), newval.Name()) - } - phi.Edges[i] = newval - if prefs := newval.Referrers(); prefs != nil { - *prefs = append(*prefs, phi) - } - } - } - - // Continue depth-first recursion over domtree, pushing a - // fresh copy of the renaming map for each subtree. - for _, v := range u.dom.children { - // TODO(adonovan): opt: avoid copy on final iteration; use destructive update. - r := make([]Value, len(renaming)) - copy(r, renaming) - rename(v, r, newPhis) - } -} diff --git a/vendor/honnef.co/go/tools/ssa/lvalue.go b/vendor/honnef.co/go/tools/ssa/lvalue.go deleted file mode 100644 index d2226a9..0000000 --- a/vendor/honnef.co/go/tools/ssa/lvalue.go +++ /dev/null @@ -1,125 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// lvalues are the union of addressable expressions and map-index -// expressions. - -import ( - "go/ast" - "go/token" - "go/types" -) - -// An lvalue represents an assignable location that may appear on the -// left-hand side of an assignment. This is a generalization of a -// pointer to permit updates to elements of maps. -// -type lvalue interface { - store(fn *Function, v Value) // stores v into the location - load(fn *Function) Value // loads the contents of the location - address(fn *Function) Value // address of the location - typ() types.Type // returns the type of the location -} - -// An address is an lvalue represented by a true pointer. -type address struct { - addr Value - pos token.Pos // source position - expr ast.Expr // source syntax of the value (not address) [debug mode] -} - -func (a *address) load(fn *Function) Value { - load := emitLoad(fn, a.addr) - load.pos = a.pos - return load -} - -func (a *address) store(fn *Function, v Value) { - store := emitStore(fn, a.addr, v, a.pos) - if a.expr != nil { - // store.Val is v, converted for assignability. - emitDebugRef(fn, a.expr, store.Val, false) - } -} - -func (a *address) address(fn *Function) Value { - if a.expr != nil { - emitDebugRef(fn, a.expr, a.addr, true) - } - return a.addr -} - -func (a *address) typ() types.Type { - return deref(a.addr.Type()) -} - -// An element is an lvalue represented by m[k], the location of an -// element of a map or string. These locations are not addressable -// since pointers cannot be formed from them, but they do support -// load(), and in the case of maps, store(). -// -type element struct { - m, k Value // map or string - t types.Type // map element type or string byte type - pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) -} - -func (e *element) load(fn *Function) Value { - l := &Lookup{ - X: e.m, - Index: e.k, - } - l.setPos(e.pos) - l.setType(e.t) - return fn.emit(l) -} - -func (e *element) store(fn *Function, v Value) { - up := &MapUpdate{ - Map: e.m, - Key: e.k, - Value: emitConv(fn, v, e.t), - } - up.pos = e.pos - fn.emit(up) -} - -func (e *element) address(fn *Function) Value { - panic("map/string elements are not addressable") -} - -func (e *element) typ() types.Type { - return e.t -} - -// A blank is a dummy variable whose name is "_". -// It is not reified: loads are illegal and stores are ignored. -// -type blank struct{} - -func (bl blank) load(fn *Function) Value { - panic("blank.load is illegal") -} - -func (bl blank) store(fn *Function, v Value) { - s := &BlankStore{ - Val: v, - } - fn.emit(s) -} - -func (bl blank) address(fn *Function) Value { - panic("blank var is not addressable") -} - -func (bl blank) typ() types.Type { - // This should be the type of the blank Ident; the typechecker - // doesn't provide this yet, but fortunately, we don't need it - // yet either. - panic("blank.typ is unimplemented") -} diff --git a/vendor/honnef.co/go/tools/ssa/methods.go b/vendor/honnef.co/go/tools/ssa/methods.go deleted file mode 100644 index 7d1fb42..0000000 --- a/vendor/honnef.co/go/tools/ssa/methods.go +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file defines utilities for population of method sets. - -import ( - "fmt" - "go/types" -) - -// MethodValue returns the Function implementing method sel, building -// wrapper methods on demand. It returns nil if sel denotes an -// abstract (interface) method. -// -// Precondition: sel.Kind() == MethodVal. -// -// Thread-safe. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// -func (prog *Program) MethodValue(sel *types.Selection) *Function { - if sel.Kind() != types.MethodVal { - panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel)) - } - T := sel.Recv() - if isInterface(T) { - return nil // abstract method - } - if prog.mode&LogSource != 0 { - defer logStack("Method %s %v", T, sel)() - } - - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - return prog.addMethod(prog.createMethodSet(T), sel) -} - -// LookupMethod returns the implementation of the method of type T -// identified by (pkg, name). It returns nil if the method exists but -// is abstract, and panics if T has no such method. -// -func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { - sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) - if sel == nil { - panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) - } - return prog.MethodValue(sel) -} - -// methodSet contains the (concrete) methods of a non-interface type. -type methodSet struct { - mapping map[string]*Function // populated lazily - complete bool // mapping contains all methods -} - -// Precondition: !isInterface(T). -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func (prog *Program) createMethodSet(T types.Type) *methodSet { - mset, ok := prog.methodSets.At(T).(*methodSet) - if !ok { - mset = &methodSet{mapping: make(map[string]*Function)} - prog.methodSets.Set(T, mset) - } - return mset -} - -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { - if sel.Kind() == types.MethodExpr { - panic(sel) - } - id := sel.Obj().Id() - fn := mset.mapping[id] - if fn == nil { - obj := sel.Obj().(*types.Func) - - needsPromotion := len(sel.Index()) > 1 - needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) - if needsPromotion || needsIndirection { - fn = makeWrapper(prog, sel) - } else { - fn = prog.declaredFunc(obj) - } - if fn.Signature.Recv() == nil { - panic(fn) // missing receiver - } - mset.mapping[id] = fn - } - return fn -} - -// RuntimeTypes returns a new unordered slice containing all -// concrete types in the program for which a complete (non-empty) -// method set is required at run-time. -// -// Thread-safe. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// -func (prog *Program) RuntimeTypes() []types.Type { - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - var res []types.Type - prog.methodSets.Iterate(func(T types.Type, v interface{}) { - if v.(*methodSet).complete { - res = append(res, T) - } - }) - return res -} - -// declaredFunc returns the concrete function/method denoted by obj. -// Panic ensues if there is none. -// -func (prog *Program) declaredFunc(obj *types.Func) *Function { - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Function) - } - panic("no concrete method: " + obj.String()) -} - -// needMethodsOf ensures that runtime type information (including the -// complete method set) is available for the specified type T and all -// its subcomponents. -// -// needMethodsOf must be called for at least every type that is an -// operand of some MakeInterface instruction, and for the type of -// every exported package member. -// -// Precondition: T is not a method signature (*Signature with Recv()!=nil). -// -// Thread-safe. (Called via emitConv from multiple builder goroutines.) -// -// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. -// -// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// -func (prog *Program) needMethodsOf(T types.Type) { - prog.methodsMu.Lock() - prog.needMethods(T, false) - prog.methodsMu.Unlock() -} - -// Precondition: T is not a method signature (*Signature with Recv()!=nil). -// Recursive case: skip => don't create methods for T. -// -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -// -func (prog *Program) needMethods(T types.Type, skip bool) { - // Each package maintains its own set of types it has visited. - if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { - // needMethods(T) was previously called - if !prevSkip || skip { - return // already seen, with same or false 'skip' value - } - } - prog.runtimeTypes.Set(T, skip) - - tmset := prog.MethodSets.MethodSet(T) - - if !skip && !isInterface(T) && tmset.Len() > 0 { - // Create methods of T. - mset := prog.createMethodSet(T) - if !mset.complete { - mset.complete = true - n := tmset.Len() - for i := 0; i < n; i++ { - prog.addMethod(mset, tmset.At(i)) - } - } - } - - // Recursion over signatures of each method. - for i := 0; i < tmset.Len(); i++ { - sig := tmset.At(i).Type().(*types.Signature) - prog.needMethods(sig.Params(), false) - prog.needMethods(sig.Results(), false) - } - - switch t := T.(type) { - case *types.Basic: - // nop - - case *types.Interface: - // nop---handled by recursion over method set. - - case *types.Pointer: - prog.needMethods(t.Elem(), false) - - case *types.Slice: - prog.needMethods(t.Elem(), false) - - case *types.Chan: - prog.needMethods(t.Elem(), false) - - case *types.Map: - prog.needMethods(t.Key(), false) - prog.needMethods(t.Elem(), false) - - case *types.Signature: - if t.Recv() != nil { - panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) - } - prog.needMethods(t.Params(), false) - prog.needMethods(t.Results(), false) - - case *types.Named: - // A pointer-to-named type can be derived from a named - // type via reflection. It may have methods too. - prog.needMethods(types.NewPointer(T), false) - - // Consider 'type T struct{S}' where S has methods. - // Reflection provides no way to get from T to struct{S}, - // only to S, so the method set of struct{S} is unwanted, - // so set 'skip' flag during recursion. - prog.needMethods(t.Underlying(), true) - - case *types.Array: - prog.needMethods(t.Elem(), false) - - case *types.Struct: - for i, n := 0, t.NumFields(); i < n; i++ { - prog.needMethods(t.Field(i).Type(), false) - } - - case *types.Tuple: - for i, n := 0, t.Len(); i < n; i++ { - prog.needMethods(t.At(i).Type(), false) - } - - default: - panic(T) - } -} diff --git a/vendor/honnef.co/go/tools/ssa/mode.go b/vendor/honnef.co/go/tools/ssa/mode.go deleted file mode 100644 index d2a2698..0000000 --- a/vendor/honnef.co/go/tools/ssa/mode.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssa - -// This file defines the BuilderMode type and its command-line flag. - -import ( - "bytes" - "fmt" -) - -// BuilderMode is a bitmask of options for diagnostics and checking. -// -// *BuilderMode satisfies the flag.Value interface. Example: -// -// var mode = ssa.BuilderMode(0) -// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } -// -type BuilderMode uint - -const ( - PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout - PrintFunctions // Print function SSA code to stdout - LogSource // Log source locations as SSA builder progresses - SanityCheckFunctions // Perform sanity checking of function bodies - NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers - BuildSerially // Build packages serially, not in parallel. - GlobalDebug // Enable debug info for all packages - BareInits // Build init functions without guards or calls to dependent inits -) - -const BuilderModeDoc = `Options controlling the SSA builder. -The value is a sequence of zero or more of these letters: -C perform sanity [C]hecking of the SSA form. -D include [D]ebug info for every function. -P print [P]ackage inventory. -F print [F]unction SSA code. -S log [S]ource locations as SSA builder progresses. -L build distinct packages seria[L]ly instead of in parallel. -N build [N]aive SSA form: don't replace local loads/stores with registers. -I build bare [I]nit functions: no init guards or calls to dependent inits. -` - -func (m BuilderMode) String() string { - var buf bytes.Buffer - if m&GlobalDebug != 0 { - buf.WriteByte('D') - } - if m&PrintPackages != 0 { - buf.WriteByte('P') - } - if m&PrintFunctions != 0 { - buf.WriteByte('F') - } - if m&LogSource != 0 { - buf.WriteByte('S') - } - if m&SanityCheckFunctions != 0 { - buf.WriteByte('C') - } - if m&NaiveForm != 0 { - buf.WriteByte('N') - } - if m&BuildSerially != 0 { - buf.WriteByte('L') - } - return buf.String() -} - -// Set parses the flag characters in s and updates *m. -func (m *BuilderMode) Set(s string) error { - var mode BuilderMode - for _, c := range s { - switch c { - case 'D': - mode |= GlobalDebug - case 'P': - mode |= PrintPackages - case 'F': - mode |= PrintFunctions - case 'S': - mode |= LogSource | BuildSerially - case 'C': - mode |= SanityCheckFunctions - case 'N': - mode |= NaiveForm - case 'L': - mode |= BuildSerially - default: - return fmt.Errorf("unknown BuilderMode option: %q", c) - } - } - *m = mode - return nil -} - -// Get returns m. -func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/honnef.co/go/tools/ssa/print.go b/vendor/honnef.co/go/tools/ssa/print.go deleted file mode 100644 index a7deb88..0000000 --- a/vendor/honnef.co/go/tools/ssa/print.go +++ /dev/null @@ -1,433 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file implements the String() methods for all Value and -// Instruction types. - -import ( - "bytes" - "fmt" - "go/types" - "io" - "reflect" - "sort" - - "golang.org/x/tools/go/types/typeutil" -) - -// relName returns the name of v relative to i. -// In most cases, this is identical to v.Name(), but references to -// Functions (including methods) and Globals use RelString and -// all types are displayed with relType, so that only cross-package -// references are package-qualified. -// -func relName(v Value, i Instruction) string { - var from *types.Package - if i != nil { - from = i.Parent().pkg() - } - switch v := v.(type) { - case Member: // *Function or *Global - return v.RelString(from) - case *Const: - return v.RelString(from) - } - return v.Name() -} - -func relType(t types.Type, from *types.Package) string { - return types.TypeString(t, types.RelativeTo(from)) -} - -func relString(m Member, from *types.Package) string { - // NB: not all globals have an Object (e.g. init$guard), - // so use Package().Object not Object.Package(). - if pkg := m.Package().Pkg; pkg != nil && pkg != from { - return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) - } - return m.Name() -} - -// Value.String() -// -// This method is provided only for debugging. -// It never appears in disassembly, which uses Value.Name(). - -func (v *Parameter) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) -} - -func (v *FreeVar) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) -} - -func (v *Builtin) String() string { - return fmt.Sprintf("builtin %s", v.Name()) -} - -// Instruction.String() - -func (v *Alloc) String() string { - op := "local" - if v.Heap { - op = "new" - } - from := v.Parent().pkg() - return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment) -} - -func (v *Phi) String() string { - var b bytes.Buffer - b.WriteString("φ [") - for i, edge := range v.Edges { - if i > 0 { - b.WriteString(", ") - } - // Be robust against malformed CFG. - block := -1 - if v.block != nil && i < len(v.block.Preds) { - block = v.block.Preds[i].Index - } - fmt.Fprintf(&b, "%d: ", block) - edgeVal := "" // be robust - if edge != nil { - edgeVal = relName(edge, v) - } - b.WriteString(edgeVal) - } - b.WriteString("]") - if v.Comment != "" { - b.WriteString(" #") - b.WriteString(v.Comment) - } - return b.String() -} - -func printCall(v *CallCommon, prefix string, instr Instruction) string { - var b bytes.Buffer - b.WriteString(prefix) - if !v.IsInvoke() { - b.WriteString(relName(v.Value, instr)) - } else { - fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) - } - b.WriteString("(") - for i, arg := range v.Args { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(relName(arg, instr)) - } - if v.Signature().Variadic() { - b.WriteString("...") - } - b.WriteString(")") - return b.String() -} - -func (c *CallCommon) String() string { - return printCall(c, "", nil) -} - -func (v *Call) String() string { - return printCall(&v.Call, "", v) -} - -func (v *BinOp) String() string { - return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) -} - -func (v *UnOp) String() string { - return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) -} - -func printConv(prefix string, v, x Value) string { - from := v.Parent().pkg() - return fmt.Sprintf("%s %s <- %s (%s)", - prefix, - relType(v.Type(), from), - relType(x.Type(), from), - relName(x, v.(Instruction))) -} - -func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } -func (v *Convert) String() string { return printConv("convert", v, v.X) } -func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } -func (v *MakeInterface) String() string { return printConv("make", v, v.X) } - -func (v *MakeClosure) String() string { - var b bytes.Buffer - fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) - if v.Bindings != nil { - b.WriteString(" [") - for i, c := range v.Bindings { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(relName(c, v)) - } - b.WriteString("]") - } - return b.String() -} - -func (v *MakeSlice) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("make %s %s %s", - relType(v.Type(), from), - relName(v.Len, v), - relName(v.Cap, v)) -} - -func (v *Slice) String() string { - var b bytes.Buffer - b.WriteString("slice ") - b.WriteString(relName(v.X, v)) - b.WriteString("[") - if v.Low != nil { - b.WriteString(relName(v.Low, v)) - } - b.WriteString(":") - if v.High != nil { - b.WriteString(relName(v.High, v)) - } - if v.Max != nil { - b.WriteString(":") - b.WriteString(relName(v.Max, v)) - } - b.WriteString("]") - return b.String() -} - -func (v *MakeMap) String() string { - res := "" - if v.Reserve != nil { - res = relName(v.Reserve, v) - } - from := v.Parent().pkg() - return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) -} - -func (v *MakeChan) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) -} - -func (v *FieldAddr) String() string { - st := deref(v.X.Type()).Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) -} - -func (v *Field) String() string { - st := v.X.Type().Underlying().(*types.Struct) - // Be robust against a bad index. - name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() - } - return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) -} - -func (v *IndexAddr) String() string { - return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) -} - -func (v *Index) String() string { - return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) -} - -func (v *Lookup) String() string { - return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) -} - -func (v *Range) String() string { - return "range " + relName(v.X, v) -} - -func (v *Next) String() string { - return "next " + relName(v.Iter, v) -} - -func (v *TypeAssert) String() string { - from := v.Parent().pkg() - return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) -} - -func (v *Extract) String() string { - return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) -} - -func (s *Jump) String() string { - // Be robust against malformed CFG. - block := -1 - if s.block != nil && len(s.block.Succs) == 1 { - block = s.block.Succs[0].Index - } - return fmt.Sprintf("jump %d", block) -} - -func (s *If) String() string { - // Be robust against malformed CFG. - tblock, fblock := -1, -1 - if s.block != nil && len(s.block.Succs) == 2 { - tblock = s.block.Succs[0].Index - fblock = s.block.Succs[1].Index - } - return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) -} - -func (s *Go) String() string { - return printCall(&s.Call, "go ", s) -} - -func (s *Panic) String() string { - return "panic " + relName(s.X, s) -} - -func (s *Return) String() string { - var b bytes.Buffer - b.WriteString("return") - for i, r := range s.Results { - if i == 0 { - b.WriteString(" ") - } else { - b.WriteString(", ") - } - b.WriteString(relName(r, s)) - } - return b.String() -} - -func (*RunDefers) String() string { - return "rundefers" -} - -func (s *Send) String() string { - return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) -} - -func (s *Defer) String() string { - return printCall(&s.Call, "defer ", s) -} - -func (s *Select) String() string { - var b bytes.Buffer - for i, st := range s.States { - if i > 0 { - b.WriteString(", ") - } - if st.Dir == types.RecvOnly { - b.WriteString("<-") - b.WriteString(relName(st.Chan, s)) - } else { - b.WriteString(relName(st.Chan, s)) - b.WriteString("<-") - b.WriteString(relName(st.Send, s)) - } - } - non := "" - if !s.Blocking { - non = "non" - } - return fmt.Sprintf("select %sblocking [%s]", non, b.String()) -} - -func (s *Store) String() string { - return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) -} - -func (s *BlankStore) String() string { - return fmt.Sprintf("_ = %s", relName(s.Val, s)) -} - -func (s *MapUpdate) String() string { - return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) -} - -func (s *DebugRef) String() string { - p := s.Parent().Prog.Fset.Position(s.Pos()) - var descr interface{} - if s.object != nil { - descr = s.object // e.g. "var x int" - } else { - descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" - } - var addr string - if s.IsAddr { - addr = "address of " - } - return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) -} - -func (p *Package) String() string { - return "package " + p.Pkg.Path() -} - -var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer - -func (p *Package) WriteTo(w io.Writer) (int64, error) { - var buf bytes.Buffer - WritePackage(&buf, p) - n, err := w.Write(buf.Bytes()) - return int64(n), err -} - -// WritePackage writes to buf a human-readable summary of p. -func WritePackage(buf *bytes.Buffer, p *Package) { - fmt.Fprintf(buf, "%s:\n", p) - - var names []string - maxname := 0 - for name := range p.Members { - if l := len(name); l > maxname { - maxname = l - } - names = append(names, name) - } - - from := p.Pkg - sort.Strings(names) - for _, name := range names { - switch mem := p.Members[name].(type) { - case *NamedConst: - fmt.Fprintf(buf, " const %-*s %s = %s\n", - maxname, name, mem.Name(), mem.Value.RelString(from)) - - case *Function: - fmt.Fprintf(buf, " func %-*s %s\n", - maxname, name, relType(mem.Type(), from)) - - case *Type: - fmt.Fprintf(buf, " type %-*s %s\n", - maxname, name, relType(mem.Type().Underlying(), from)) - for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { - fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) - } - - case *Global: - fmt.Fprintf(buf, " var %-*s %s\n", - maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) - } - } - - fmt.Fprintf(buf, "\n") -} - -func commaOk(x bool) string { - if x { - return ",ok" - } - return "" -} diff --git a/vendor/honnef.co/go/tools/ssa/sanity.go b/vendor/honnef.co/go/tools/ssa/sanity.go deleted file mode 100644 index bd7377c..0000000 --- a/vendor/honnef.co/go/tools/ssa/sanity.go +++ /dev/null @@ -1,523 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// An optional pass for sanity-checking invariants of the SSA representation. -// Currently it checks CFG invariants but little at the instruction level. - -import ( - "fmt" - "go/types" - "io" - "os" - "strings" -) - -type sanity struct { - reporter io.Writer - fn *Function - block *BasicBlock - instrs map[Instruction]struct{} - insane bool -} - -// sanityCheck performs integrity checking of the SSA representation -// of the function fn and returns true if it was valid. Diagnostics -// are written to reporter if non-nil, os.Stderr otherwise. Some -// diagnostics are only warnings and do not imply a negative result. -// -// Sanity-checking is intended to facilitate the debugging of code -// transformation passes. -// -func sanityCheck(fn *Function, reporter io.Writer) bool { - if reporter == nil { - reporter = os.Stderr - } - return (&sanity{reporter: reporter}).checkFunction(fn) -} - -// mustSanityCheck is like sanityCheck but panics instead of returning -// a negative result. -// -func mustSanityCheck(fn *Function, reporter io.Writer) { - if !sanityCheck(fn, reporter) { - fn.WriteTo(os.Stderr) - panic("SanityCheck failed") - } -} - -func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { - fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) - if s.block != nil { - fmt.Fprintf(s.reporter, ", block %s", s.block) - } - io.WriteString(s.reporter, ": ") - fmt.Fprintf(s.reporter, format, args...) - io.WriteString(s.reporter, "\n") -} - -func (s *sanity) errorf(format string, args ...interface{}) { - s.insane = true - s.diagnostic("Error", format, args...) -} - -func (s *sanity) warnf(format string, args ...interface{}) { - s.diagnostic("Warning", format, args...) -} - -// findDuplicate returns an arbitrary basic block that appeared more -// than once in blocks, or nil if all were unique. -func findDuplicate(blocks []*BasicBlock) *BasicBlock { - if len(blocks) < 2 { - return nil - } - if blocks[0] == blocks[1] { - return blocks[0] - } - // Slow path: - m := make(map[*BasicBlock]bool) - for _, b := range blocks { - if m[b] { - return b - } - m[b] = true - } - return nil -} - -func (s *sanity) checkInstr(idx int, instr Instruction) { - switch instr := instr.(type) { - case *If, *Jump, *Return, *Panic: - s.errorf("control flow instruction not at end of block") - case *Phi: - if idx == 0 { - // It suffices to apply this check to just the first phi node. - if dup := findDuplicate(s.block.Preds); dup != nil { - s.errorf("phi node in block with duplicate predecessor %s", dup) - } - } else { - prev := s.block.Instrs[idx-1] - if _, ok := prev.(*Phi); !ok { - s.errorf("Phi instruction follows a non-Phi: %T", prev) - } - } - if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { - s.errorf("phi node has %d edges but %d predecessors", ne, np) - - } else { - for i, e := range instr.Edges { - if e == nil { - s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) - } - } - } - - case *Alloc: - if !instr.Heap { - found := false - for _, l := range s.fn.Locals { - if l == instr { - found = true - break - } - } - if !found { - s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) - } - } - - case *BinOp: - case *Call: - case *ChangeInterface: - case *ChangeType: - case *Convert: - if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { - if _, ok := instr.Type().Underlying().(*types.Basic); !ok { - s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) - } - } - - case *Defer: - case *Extract: - case *Field: - case *FieldAddr: - case *Go: - case *Index: - case *IndexAddr: - case *Lookup: - case *MakeChan: - case *MakeClosure: - numFree := len(instr.Fn.(*Function).FreeVars) - numBind := len(instr.Bindings) - if numFree != numBind { - s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", - numBind, instr.Fn, numFree) - - } - if recv := instr.Type().(*types.Signature).Recv(); recv != nil { - s.errorf("MakeClosure's type includes receiver %s", recv.Type()) - } - - case *MakeInterface: - case *MakeMap: - case *MakeSlice: - case *MapUpdate: - case *Next: - case *Range: - case *RunDefers: - case *Select: - case *Send: - case *Slice: - case *Store: - case *TypeAssert: - case *UnOp: - case *DebugRef: - case *BlankStore: - case *Sigma: - // TODO(adonovan): implement checks. - default: - panic(fmt.Sprintf("Unknown instruction type: %T", instr)) - } - - if call, ok := instr.(CallInstruction); ok { - if call.Common().Signature() == nil { - s.errorf("nil signature: %s", call) - } - } - - // Check that value-defining instructions have valid types - // and a valid referrer list. - if v, ok := instr.(Value); ok { - t := v.Type() - if t == nil { - s.errorf("no type: %s = %s", v.Name(), v) - } else if t == tRangeIter { - // not a proper type; ignore. - } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { - s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) - } - s.checkReferrerList(v) - } - - // Untyped constants are legal as instruction Operands(), - // for example: - // _ = "foo"[0] - // or: - // if wordsize==64 {...} - - // All other non-Instruction Values can be found via their - // enclosing Function or Package. -} - -func (s *sanity) checkFinalInstr(idx int, instr Instruction) { - switch instr := instr.(type) { - case *If: - if nsuccs := len(s.block.Succs); nsuccs != 2 { - s.errorf("If-terminated block has %d successors; expected 2", nsuccs) - return - } - if s.block.Succs[0] == s.block.Succs[1] { - s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) - return - } - - case *Jump: - if nsuccs := len(s.block.Succs); nsuccs != 1 { - s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) - return - } - - case *Return: - if nsuccs := len(s.block.Succs); nsuccs != 0 { - s.errorf("Return-terminated block has %d successors; expected none", nsuccs) - return - } - if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { - s.errorf("%d-ary return in %d-ary function", na, nf) - } - - case *Panic: - if nsuccs := len(s.block.Succs); nsuccs != 0 { - s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) - return - } - - default: - s.errorf("non-control flow instruction at end of block") - } -} - -func (s *sanity) checkBlock(b *BasicBlock, index int) { - s.block = b - - if b.Index != index { - s.errorf("block has incorrect Index %d", b.Index) - } - if b.parent != s.fn { - s.errorf("block has incorrect parent %s", b.parent) - } - - // Check all blocks are reachable. - // (The entry block is always implicitly reachable, - // as is the Recover block, if any.) - if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { - s.warnf("unreachable block") - if b.Instrs == nil { - // Since this block is about to be pruned, - // tolerating transient problems in it - // simplifies other optimizations. - return - } - } - - // Check predecessor and successor relations are dual, - // and that all blocks in CFG belong to same function. - for _, a := range b.Preds { - found := false - for _, bb := range a.Succs { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) - } - if a.parent != s.fn { - s.errorf("predecessor %s belongs to different function %s", a, a.parent) - } - } - for _, c := range b.Succs { - found := false - for _, bb := range c.Preds { - if bb == b { - found = true - break - } - } - if !found { - s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) - } - if c.parent != s.fn { - s.errorf("successor %s belongs to different function %s", c, c.parent) - } - } - - // Check each instruction is sane. - n := len(b.Instrs) - if n == 0 { - s.errorf("basic block contains no instructions") - } - var rands [10]*Value // reuse storage - for j, instr := range b.Instrs { - if instr == nil { - s.errorf("nil instruction at index %d", j) - continue - } - if b2 := instr.Block(); b2 == nil { - s.errorf("nil Block() for instruction at index %d", j) - continue - } else if b2 != b { - s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) - continue - } - if j < n-1 { - s.checkInstr(j, instr) - } else { - s.checkFinalInstr(j, instr) - } - - // Check Instruction.Operands. - operands: - for i, op := range instr.Operands(rands[:0]) { - if op == nil { - s.errorf("nil operand pointer %d of %s", i, instr) - continue - } - val := *op - if val == nil { - continue // a nil operand is ok - } - - // Check that "untyped" types only appear on constant operands. - if _, ok := (*op).(*Const); !ok { - if basic, ok := (*op).Type().(*types.Basic); ok { - if basic.Info()&types.IsUntyped != 0 { - s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) - } - } - } - - // Check that Operands that are also Instructions belong to same function. - // TODO(adonovan): also check their block dominates block b. - if val, ok := val.(Instruction); ok { - if val.Parent() != s.fn { - s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) - } - } - - // Check that each function-local operand of - // instr refers back to instr. (NB: quadratic) - switch val := val.(type) { - case *Const, *Global, *Builtin: - continue // not local - case *Function: - if val.parent == nil { - continue // only anon functions are local - } - } - - // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. - - if refs := val.Referrers(); refs != nil { - for _, ref := range *refs { - if ref == instr { - continue operands - } - } - s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) - } else { - s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) - } - } - } -} - -func (s *sanity) checkReferrerList(v Value) { - refs := v.Referrers() - if refs == nil { - s.errorf("%s has missing referrer list", v.Name()) - return - } - for i, ref := range *refs { - if _, ok := s.instrs[ref]; !ok { - s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) - } - } -} - -func (s *sanity) checkFunction(fn *Function) bool { - // TODO(adonovan): check Function invariants: - // - check params match signature - // - check transient fields are nil - // - warn if any fn.Locals do not appear among block instructions. - s.fn = fn - if fn.Prog == nil { - s.errorf("nil Prog") - } - - fn.String() // must not crash - fn.RelString(fn.pkg()) // must not crash - - // All functions have a package, except delegates (which are - // shared across packages, or duplicated as weak symbols in a - // separate-compilation model), and error.Error. - if fn.Pkg == nil { - if strings.HasPrefix(fn.Synthetic, "wrapper ") || - strings.HasPrefix(fn.Synthetic, "bound ") || - strings.HasPrefix(fn.Synthetic, "thunk ") || - strings.HasSuffix(fn.name, "Error") { - // ok - } else { - s.errorf("nil Pkg") - } - } - if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { - s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) - } - for i, l := range fn.Locals { - if l.Parent() != fn { - s.errorf("Local %s at index %d has wrong parent", l.Name(), i) - } - if l.Heap { - s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) - } - } - // Build the set of valid referrers. - s.instrs = make(map[Instruction]struct{}) - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - s.instrs[instr] = struct{}{} - } - } - for i, p := range fn.Params { - if p.Parent() != fn { - s.errorf("Param %s at index %d has wrong parent", p.Name(), i) - } - s.checkReferrerList(p) - } - for i, fv := range fn.FreeVars { - if fv.Parent() != fn { - s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) - } - s.checkReferrerList(fv) - } - - if fn.Blocks != nil && len(fn.Blocks) == 0 { - // Function _had_ blocks (so it's not external) but - // they were "optimized" away, even the entry block. - s.errorf("Blocks slice is non-nil but empty") - } - for i, b := range fn.Blocks { - if b == nil { - s.warnf("nil *BasicBlock at f.Blocks[%d]", i) - continue - } - s.checkBlock(b, i) - } - if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { - s.errorf("Recover block is not in Blocks slice") - } - - s.block = nil - for i, anon := range fn.AnonFuncs { - if anon.Parent() != fn { - s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) - } - } - s.fn = nil - return !s.insane -} - -// sanityCheckPackage checks invariants of packages upon creation. -// It does not require that the package is built. -// Unlike sanityCheck (for functions), it just panics at the first error. -func sanityCheckPackage(pkg *Package) { - if pkg.Pkg == nil { - panic(fmt.Sprintf("Package %s has no Object", pkg)) - } - pkg.String() // must not crash - - for name, mem := range pkg.Members { - if name != mem.Name() { - panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", - pkg.Pkg.Path(), mem, mem.Name(), name)) - } - obj := mem.Object() - if obj == nil { - // This check is sound because fields - // {Global,Function}.object have type - // types.Object. (If they were declared as - // *types.{Var,Func}, we'd have a non-empty - // interface containing a nil pointer.) - - continue // not all members have typechecker objects - } - if obj.Name() != name { - if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { - // Ok. The name of a declared init function varies between - // its types.Func ("init") and its ssa.Function ("init#%d"). - } else { - panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", - pkg.Pkg.Path(), mem, obj.Name(), name)) - } - } - if obj.Pos() != mem.Pos() { - panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) - } - } -} diff --git a/vendor/honnef.co/go/tools/ssa/source.go b/vendor/honnef.co/go/tools/ssa/source.go deleted file mode 100644 index e17e023..0000000 --- a/vendor/honnef.co/go/tools/ssa/source.go +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file defines utilities for working with source positions -// or source-level named entities ("objects"). - -// TODO(adonovan): test that {Value,Instruction}.Pos() positions match -// the originating syntax, as specified. - -import ( - "go/ast" - "go/token" - "go/types" - "log" -) - -// EnclosingFunction returns the function that contains the syntax -// node denoted by path. -// -// Syntax associated with package-level variable specifications is -// enclosed by the package's init() function. -// -// Returns nil if not found; reasons might include: -// - the node is not enclosed by any function. -// - the node is within an anonymous function (FuncLit) and -// its SSA function has not been created yet -// (pkg.Build() has not yet been called). -// -func EnclosingFunction(pkg *Package, path []ast.Node) *Function { - // Start with package-level function... - fn := findEnclosingPackageLevelFunction(pkg, path) - if fn == nil { - return nil // not in any function - } - - // ...then walk down the nested anonymous functions. - n := len(path) -outer: - for i := range path { - if lit, ok := path[n-1-i].(*ast.FuncLit); ok { - for _, anon := range fn.AnonFuncs { - if anon.Pos() == lit.Type.Func { - fn = anon - continue outer - } - } - // SSA function not found: - // - package not yet built, or maybe - // - builder skipped FuncLit in dead block - // (in principle; but currently the Builder - // generates even dead FuncLits). - return nil - } - } - return fn -} - -// HasEnclosingFunction returns true if the AST node denoted by path -// is contained within the declaration of some function or -// package-level variable. -// -// Unlike EnclosingFunction, the behaviour of this function does not -// depend on whether SSA code for pkg has been built, so it can be -// used to quickly reject check inputs that will cause -// EnclosingFunction to fail, prior to SSA building. -// -func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { - return findEnclosingPackageLevelFunction(pkg, path) != nil -} - -// findEnclosingPackageLevelFunction returns the Function -// corresponding to the package-level function enclosing path. -// -func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { - if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] - switch decl := path[n-2].(type) { - case *ast.GenDecl: - if decl.Tok == token.VAR && n >= 3 { - // Package-level 'var' initializer. - return pkg.init - } - - case *ast.FuncDecl: - if decl.Recv == nil && decl.Name.Name == "init" { - // Explicit init() function. - for _, b := range pkg.init.Blocks { - for _, instr := range b.Instrs { - if instr, ok := instr.(*Call); ok { - if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { - return callee - } - } - } - } - // Hack: return non-nil when SSA is not yet - // built so that HasEnclosingFunction works. - return pkg.init - } - // Declared function/method. - return findNamedFunc(pkg, decl.Name.NamePos) - } - } - return nil // not in any function -} - -// findNamedFunc returns the named function whose FuncDecl.Ident is at -// position pos. -// -func findNamedFunc(pkg *Package, pos token.Pos) *Function { - // Look at all package members and method sets of named types. - // Not very efficient. - for _, mem := range pkg.Members { - switch mem := mem.(type) { - case *Function: - if mem.Pos() == pos { - return mem - } - case *Type: - mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) - for i, n := 0, mset.Len(); i < n; i++ { - // Don't call Program.Method: avoid creating wrappers. - obj := mset.At(i).Obj().(*types.Func) - if obj.Pos() == pos { - if pkg.values[obj] == nil { - log.Println(obj) - } - return pkg.values[obj].(*Function) - } - } - } - } - return nil -} - -// ValueForExpr returns the SSA Value that corresponds to non-constant -// expression e. -// -// It returns nil if no value was found, e.g. -// - the expression is not lexically contained within f; -// - f was not built with debug information; or -// - e is a constant expression. (For efficiency, no debug -// information is stored for constants. Use -// go/types.Info.Types[e].Value instead.) -// - e is a reference to nil or a built-in function. -// - the value was optimised away. -// -// If e is an addressable expression used in an lvalue context, -// value is the address denoted by e, and isAddr is true. -// -// The types of e (or &e, if isAddr) and the result are equal -// (modulo "untyped" bools resulting from comparisons). -// -// (Tip: to find the ssa.Value given a source position, use -// importer.PathEnclosingInterval to locate the ast.Node, then -// EnclosingFunction to locate the Function, then ValueForExpr to find -// the ssa.Value.) -// -func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { - if f.debugInfo() { // (opt) - e = unparen(e) - for _, b := range f.Blocks { - for _, instr := range b.Instrs { - if ref, ok := instr.(*DebugRef); ok { - if ref.Expr == e { - return ref.X, ref.IsAddr - } - } - } - } - } - return -} - -// --- Lookup functions for source-level named entities (types.Objects) --- - -// Package returns the SSA Package corresponding to the specified -// type-checker package object. -// It returns nil if no such SSA package has been created. -// -func (prog *Program) Package(obj *types.Package) *Package { - return prog.packages[obj] -} - -// packageLevelValue returns the package-level value corresponding to -// the specified named object, which may be a package-level const -// (*Const), var (*Global) or func (*Function) of some package in -// prog. It returns nil if the object is not found. -// -func (prog *Program) packageLevelValue(obj types.Object) Value { - if pkg, ok := prog.packages[obj.Pkg()]; ok { - return pkg.values[obj] - } - return nil -} - -// FuncValue returns the concrete Function denoted by the source-level -// named function obj, or nil if obj denotes an interface method. -// -// TODO(adonovan): check the invariant that obj.Type() matches the -// result's Signature, both in the params/results and in the receiver. -// -func (prog *Program) FuncValue(obj *types.Func) *Function { - fn, _ := prog.packageLevelValue(obj).(*Function) - return fn -} - -// ConstValue returns the SSA Value denoted by the source-level named -// constant obj. -// -func (prog *Program) ConstValue(obj *types.Const) *Const { - // TODO(adonovan): opt: share (don't reallocate) - // Consts for const objects and constant ast.Exprs. - - // Universal constant? {true,false,nil} - if obj.Parent() == types.Universe { - return NewConst(obj.Val(), obj.Type()) - } - // Package-level named constant? - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Const) - } - return NewConst(obj.Val(), obj.Type()) -} - -// VarValue returns the SSA Value that corresponds to a specific -// identifier denoting the source-level named variable obj. -// -// VarValue returns nil if a local variable was not found, perhaps -// because its package was not built, the debug information was not -// requested during SSA construction, or the value was optimized away. -// -// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), -// and that ident must resolve to obj. -// -// pkg is the package enclosing the reference. (A reference to a var -// always occurs within a function, so we need to know where to find it.) -// -// If the identifier is a field selector and its base expression is -// non-addressable, then VarValue returns the value of that field. -// For example: -// func f() struct {x int} -// f().x // VarValue(x) returns a *Field instruction of type int -// -// All other identifiers denote addressable locations (variables). -// For them, VarValue may return either the variable's address or its -// value, even when the expression is evaluated only for its value; the -// situation is reported by isAddr, the second component of the result. -// -// If !isAddr, the returned value is the one associated with the -// specific identifier. For example, -// var x int // VarValue(x) returns Const 0 here -// x = 1 // VarValue(x) returns Const 1 here -// -// It is not specified whether the value or the address is returned in -// any particular case, as it may depend upon optimizations performed -// during SSA code generation, such as registerization, constant -// folding, avoidance of materialization of subexpressions, etc. -// -func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { - // All references to a var are local to some function, possibly init. - fn := EnclosingFunction(pkg, ref) - if fn == nil { - return // e.g. def of struct field; SSA not built? - } - - id := ref[0].(*ast.Ident) - - // Defining ident of a parameter? - if id.Pos() == obj.Pos() { - for _, param := range fn.Params { - if param.Object() == obj { - return param, false - } - } - } - - // Other ident? - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - if dr, ok := instr.(*DebugRef); ok { - if dr.Pos() == id.Pos() { - return dr.X, dr.IsAddr - } - } - } - } - - // Defining ident of package-level var? - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Global), true - } - - return // e.g. debug info not requested, or var optimized away -} diff --git a/vendor/honnef.co/go/tools/ssa/ssa.go b/vendor/honnef.co/go/tools/ssa/ssa.go deleted file mode 100644 index ed280c7..0000000 --- a/vendor/honnef.co/go/tools/ssa/ssa.go +++ /dev/null @@ -1,1751 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This package defines a high-level intermediate representation for -// Go programs using static single-assignment (SSA) form. - -import ( - "fmt" - "go/ast" - exact "go/constant" - "go/token" - "go/types" - "sync" - - "golang.org/x/tools/go/types/typeutil" -) - -// A Program is a partial or complete Go program converted to SSA form. -type Program struct { - Fset *token.FileSet // position information for the files of this Program - imported map[string]*Package // all importable Packages, keyed by import path - packages map[*types.Package]*Package // all loaded Packages, keyed by object - mode BuilderMode // set of mode bits for SSA construction - MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets - - methodsMu sync.Mutex // guards the following maps: - methodSets typeutil.Map // maps type to its concrete methodSet - runtimeTypes typeutil.Map // types for which rtypes are needed - canon typeutil.Map // type canonicalization map - bounds map[*types.Func]*Function // bounds for curried x.Method closures - thunks map[selectionKey]*Function // thunks for T.Method expressions -} - -// A Package is a single analyzed Go package containing Members for -// all package-level functions, variables, constants and types it -// declares. These may be accessed directly via Members, or via the -// type-specific accessor methods Func, Type, Var and Const. -// -// Members also contains entries for "init" (the synthetic package -// initializer) and "init#%d", the nth declared init function, -// and unspecified other things too. -// -type Package struct { - Prog *Program // the owning program - Pkg *types.Package // the corresponding go/types.Package - Members map[string]Member // all package members keyed by name (incl. init and init#%d) - values map[types.Object]Value // package members (incl. types and methods), keyed by object - init *Function // Func("init"); the package's init function - debug bool // include full debug info in this package - - // The following fields are set transiently, then cleared - // after building. - buildOnce sync.Once // ensures package building occurs once - ninit int32 // number of init functions - info *types.Info // package type information - files []*ast.File // package ASTs -} - -// A Member is a member of a Go package, implemented by *NamedConst, -// *Global, *Function, or *Type; they are created by package-level -// const, var, func and type declarations respectively. -// -type Member interface { - Name() string // declared name of the package member - String() string // package-qualified name of the package member - RelString(*types.Package) string // like String, but relative refs are unqualified - Object() types.Object // typechecker's object for this member, if any - Pos() token.Pos // position of member's declaration, if known - Type() types.Type // type of the package member - Token() token.Token // token.{VAR,FUNC,CONST,TYPE} - Package() *Package // the containing package -} - -// A Type is a Member of a Package representing a package-level named type. -// -// Type() returns a *types.Named. -// -type Type struct { - object *types.TypeName - pkg *Package -} - -// A NamedConst is a Member of a Package representing a package-level -// named constant. -// -// Pos() returns the position of the declaring ast.ValueSpec.Names[*] -// identifier. -// -// NB: a NamedConst is not a Value; it contains a constant Value, which -// it augments with the name and position of its 'const' declaration. -// -type NamedConst struct { - object *types.Const - Value *Const - pos token.Pos - pkg *Package -} - -// A Value is an SSA value that can be referenced by an instruction. -type Value interface { - // Name returns the name of this value, and determines how - // this Value appears when used as an operand of an - // Instruction. - // - // This is the same as the source name for Parameters, - // Builtins, Functions, FreeVars, Globals. - // For constants, it is a representation of the constant's value - // and type. For all other Values this is the name of the - // virtual register defined by the instruction. - // - // The name of an SSA Value is not semantically significant, - // and may not even be unique within a function. - Name() string - - // If this value is an Instruction, String returns its - // disassembled form; otherwise it returns unspecified - // human-readable information about the Value, such as its - // kind, name and type. - String() string - - // Type returns the type of this value. Many instructions - // (e.g. IndexAddr) change their behaviour depending on the - // types of their operands. - Type() types.Type - - // Parent returns the function to which this Value belongs. - // It returns nil for named Functions, Builtin, Const and Global. - Parent() *Function - - // Referrers returns the list of instructions that have this - // value as one of their operands; it may contain duplicates - // if an instruction has a repeated operand. - // - // Referrers actually returns a pointer through which the - // caller may perform mutations to the object's state. - // - // Referrers is currently only defined if Parent()!=nil, - // i.e. for the function-local values FreeVar, Parameter, - // Functions (iff anonymous) and all value-defining instructions. - // It returns nil for named Functions, Builtin, Const and Global. - // - // Instruction.Operands contains the inverse of this relation. - Referrers() *[]Instruction - - // Pos returns the location of the AST token most closely - // associated with the operation that gave rise to this value, - // or token.NoPos if it was not explicit in the source. - // - // For each ast.Node type, a particular token is designated as - // the closest location for the expression, e.g. the Lparen - // for an *ast.CallExpr. This permits a compact but - // approximate mapping from Values to source positions for use - // in diagnostic messages, for example. - // - // (Do not use this position to determine which Value - // corresponds to an ast.Expr; use Function.ValueForExpr - // instead. NB: it requires that the function was built with - // debug information.) - Pos() token.Pos -} - -// An Instruction is an SSA instruction that computes a new Value or -// has some effect. -// -// An Instruction that defines a value (e.g. BinOp) also implements -// the Value interface; an Instruction that only has an effect (e.g. Store) -// does not. -// -type Instruction interface { - // String returns the disassembled form of this value. - // - // Examples of Instructions that are Values: - // "x + y" (BinOp) - // "len([])" (Call) - // Note that the name of the Value is not printed. - // - // Examples of Instructions that are not Values: - // "return x" (Return) - // "*y = x" (Store) - // - // (The separation Value.Name() from Value.String() is useful - // for some analyses which distinguish the operation from the - // value it defines, e.g., 'y = local int' is both an allocation - // of memory 'local int' and a definition of a pointer y.) - String() string - - // Parent returns the function to which this instruction - // belongs. - Parent() *Function - - // Block returns the basic block to which this instruction - // belongs. - Block() *BasicBlock - - // setBlock sets the basic block to which this instruction belongs. - setBlock(*BasicBlock) - - // Operands returns the operands of this instruction: the - // set of Values it references. - // - // Specifically, it appends their addresses to rands, a - // user-provided slice, and returns the resulting slice, - // permitting avoidance of memory allocation. - // - // The operands are appended in undefined order, but the order - // is consistent for a given Instruction; the addresses are - // always non-nil but may point to a nil Value. Clients may - // store through the pointers, e.g. to effect a value - // renaming. - // - // Value.Referrers is a subset of the inverse of this - // relation. (Referrers are not tracked for all types of - // Values.) - Operands(rands []*Value) []*Value - - // Pos returns the location of the AST token most closely - // associated with the operation that gave rise to this - // instruction, or token.NoPos if it was not explicit in the - // source. - // - // For each ast.Node type, a particular token is designated as - // the closest location for the expression, e.g. the Go token - // for an *ast.GoStmt. This permits a compact but approximate - // mapping from Instructions to source positions for use in - // diagnostic messages, for example. - // - // (Do not use this position to determine which Instruction - // corresponds to an ast.Expr; see the notes for Value.Pos. - // This position may be used to determine which non-Value - // Instruction corresponds to some ast.Stmts, but not all: If - // and Jump instructions have no Pos(), for example.) - Pos() token.Pos -} - -// A Node is a node in the SSA value graph. Every concrete type that -// implements Node is also either a Value, an Instruction, or both. -// -// Node contains the methods common to Value and Instruction, plus the -// Operands and Referrers methods generalized to return nil for -// non-Instructions and non-Values, respectively. -// -// Node is provided to simplify SSA graph algorithms. Clients should -// use the more specific and informative Value or Instruction -// interfaces where appropriate. -// -type Node interface { - // Common methods: - String() string - Pos() token.Pos - Parent() *Function - - // Partial methods: - Operands(rands []*Value) []*Value // nil for non-Instructions - Referrers() *[]Instruction // nil for non-Values -} - -// Function represents the parameters, results, and code of a function -// or method. -// -// If Blocks is nil, this indicates an external function for which no -// Go source code is available. In this case, FreeVars and Locals -// are nil too. Clients performing whole-program analysis must -// handle external functions specially. -// -// Blocks contains the function's control-flow graph (CFG). -// Blocks[0] is the function entry point; block order is not otherwise -// semantically significant, though it may affect the readability of -// the disassembly. -// To iterate over the blocks in dominance order, use DomPreorder(). -// -// Recover is an optional second entry point to which control resumes -// after a recovered panic. The Recover block may contain only a return -// statement, preceded by a load of the function's named return -// parameters, if any. -// -// A nested function (Parent()!=nil) that refers to one or more -// lexically enclosing local variables ("free variables") has FreeVars. -// Such functions cannot be called directly but require a -// value created by MakeClosure which, via its Bindings, supplies -// values for these parameters. -// -// If the function is a method (Signature.Recv() != nil) then the first -// element of Params is the receiver parameter. -// -// A Go package may declare many functions called "init". -// For each one, Object().Name() returns "init" but Name() returns -// "init#1", etc, in declaration order. -// -// Pos() returns the declaring ast.FuncLit.Type.Func or the position -// of the ast.FuncDecl.Name, if the function was explicit in the -// source. Synthetic wrappers, for which Synthetic != "", may share -// the same position as the function they wrap. -// Syntax.Pos() always returns the position of the declaring "func" token. -// -// Type() returns the function's Signature. -// -type Function struct { - name string - object types.Object // a declared *types.Func or one of its wrappers - method *types.Selection // info about provenance of synthetic methods - Signature *types.Signature - pos token.Pos - - Synthetic string // provenance of synthetic function; "" for true source functions - syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode - parent *Function // enclosing function if anon; nil if global - Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) - Prog *Program // enclosing program - Params []*Parameter // function parameters; for methods, includes receiver - FreeVars []*FreeVar // free variables whose values must be supplied by closure - Locals []*Alloc // local variables of this function - Blocks []*BasicBlock // basic blocks of the function; nil => external - Recover *BasicBlock // optional; control transfers here after recovered panic - AnonFuncs []*Function // anonymous functions directly beneath this one - referrers []Instruction // referring instructions (iff Parent() != nil) - - // The following fields are set transiently during building, - // then cleared. - currentBlock *BasicBlock // where to emit code - objects map[types.Object]Value // addresses of local variables - namedResults []*Alloc // tuple of named results - targets *targets // linked stack of branch targets - lblocks map[*ast.Object]*lblock // labelled blocks -} - -// BasicBlock represents an SSA basic block. -// -// The final element of Instrs is always an explicit transfer of -// control (If, Jump, Return, or Panic). -// -// A block may contain no Instructions only if it is unreachable, -// i.e., Preds is nil. Empty blocks are typically pruned. -// -// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) -// graph independent of the SSA Value graph: the control-flow graph or -// CFG. It is illegal for multiple edges to exist between the same -// pair of blocks. -// -// Each BasicBlock is also a node in the dominator tree of the CFG. -// The tree may be navigated using Idom()/Dominees() and queried using -// Dominates(). -// -// The order of Preds and Succs is significant (to Phi and If -// instructions, respectively). -// -type BasicBlock struct { - Index int // index of this block within Parent().Blocks - Comment string // optional label; no semantic significance - parent *Function // parent function - Instrs []Instruction // instructions in order - Preds, Succs []*BasicBlock // predecessors and successors - succs2 [2]*BasicBlock // initial space for Succs - dom domInfo // dominator tree info - gaps int // number of nil Instrs (transient) - rundefers int // number of rundefers (transient) -} - -// Pure values ---------------------------------------- - -// A FreeVar represents a free variable of the function to which it -// belongs. -// -// FreeVars are used to implement anonymous functions, whose free -// variables are lexically captured in a closure formed by -// MakeClosure. The value of such a free var is an Alloc or another -// FreeVar and is considered a potentially escaping heap address, with -// pointer type. -// -// FreeVars are also used to implement bound method closures. Such a -// free var represents the receiver value and may be of any type that -// has concrete methods. -// -// Pos() returns the position of the value that was captured, which -// belongs to an enclosing function. -// -type FreeVar struct { - name string - typ types.Type - pos token.Pos - parent *Function - referrers []Instruction - - // Transiently needed during building. - outer Value // the Value captured from the enclosing context. -} - -// A Parameter represents an input parameter of a function. -// -type Parameter struct { - name string - object types.Object // a *types.Var; nil for non-source locals - typ types.Type - pos token.Pos - parent *Function - referrers []Instruction -} - -// A Const represents the value of a constant expression. -// -// The underlying type of a constant may be any boolean, numeric, or -// string type. In addition, a Const may represent the nil value of -// any reference type---interface, map, channel, pointer, slice, or -// function---but not "untyped nil". -// -// All source-level constant expressions are represented by a Const -// of the same type and value. -// -// Value holds the exact value of the constant, independent of its -// Type(), using the same representation as package go/exact uses for -// constants, or nil for a typed nil value. -// -// Pos() returns token.NoPos. -// -// Example printed form: -// 42:int -// "hello":untyped string -// 3+4i:MyComplex -// -type Const struct { - typ types.Type - Value exact.Value -} - -// A Global is a named Value holding the address of a package-level -// variable. -// -// Pos() returns the position of the ast.ValueSpec.Names[*] -// identifier. -// -type Global struct { - name string - object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard - typ types.Type - pos token.Pos - - Pkg *Package -} - -// A Builtin represents a specific use of a built-in function, e.g. len. -// -// Builtins are immutable values. Builtins do not have addresses. -// Builtins can only appear in CallCommon.Func. -// -// Name() indicates the function: one of the built-in functions from the -// Go spec (excluding "make" and "new") or one of these ssa-defined -// intrinsics: -// -// // wrapnilchk returns ptr if non-nil, panics otherwise. -// // (For use in indirection wrappers.) -// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T -// -// Object() returns a *types.Builtin for built-ins defined by the spec, -// nil for others. -// -// Type() returns a *types.Signature representing the effective -// signature of the built-in for this call. -// -type Builtin struct { - name string - sig *types.Signature -} - -// Value-defining instructions ---------------------------------------- - -// The Alloc instruction reserves space for a variable of the given type, -// zero-initializes it, and yields its address. -// -// Alloc values are always addresses, and have pointer types, so the -// type of the allocated variable is actually -// Type().Underlying().(*types.Pointer).Elem(). -// -// If Heap is false, Alloc allocates space in the function's -// activation record (frame); we refer to an Alloc(Heap=false) as a -// "local" alloc. Each local Alloc returns the same address each time -// it is executed within the same activation; the space is -// re-initialized to zero. -// -// If Heap is true, Alloc allocates space in the heap; we -// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc -// returns a different address each time it is executed. -// -// When Alloc is applied to a channel, map or slice type, it returns -// the address of an uninitialized (nil) reference of that kind; store -// the result of MakeSlice, MakeMap or MakeChan in that location to -// instantiate these types. -// -// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, -// or the ast.CallExpr.Rparen for a call to new() or for a call that -// allocates a varargs slice. -// -// Example printed form: -// t0 = local int -// t1 = new int -// -type Alloc struct { - register - Comment string - Heap bool - index int // dense numbering; for lifting -} - -var _ Instruction = (*Sigma)(nil) -var _ Value = (*Sigma)(nil) - -type Sigma struct { - register - X Value - Branch bool -} - -func (p *Sigma) Value() Value { - v := p.X - for { - sigma, ok := v.(*Sigma) - if !ok { - break - } - v = sigma - } - return v -} - -func (p *Sigma) String() string { - return fmt.Sprintf("σ [%s.%t]", relName(p.X, p), p.Branch) -} - -// The Phi instruction represents an SSA φ-node, which combines values -// that differ across incoming control-flow edges and yields a new -// value. Within a block, all φ-nodes must appear before all non-φ -// nodes. -// -// Pos() returns the position of the && or || for short-circuit -// control-flow joins, or that of the *Alloc for φ-nodes inserted -// during SSA renaming. -// -// Example printed form: -// t2 = phi [0: t0, 1: t1] -// -type Phi struct { - register - Comment string // a hint as to its purpose - Edges []Value // Edges[i] is value for Block().Preds[i] -} - -// The Call instruction represents a function or method call. -// -// The Call instruction yields the function result if there is exactly -// one. Otherwise it returns a tuple, the components of which are -// accessed via Extract. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. -// -// Example printed form: -// t2 = println(t0, t1) -// t4 = t3() -// t7 = invoke t5.Println(...t6) -// -type Call struct { - register - Call CallCommon -} - -// The BinOp instruction yields the result of binary operation X Op Y. -// -// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. -// -// Example printed form: -// t1 = t0 + 1:int -// -type BinOp struct { - register - // One of: - // ADD SUB MUL QUO REM + - * / % - // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~ - // EQL LSS GTR NEQ LEQ GEQ == != < <= < >= - Op token.Token - X, Y Value -} - -// The UnOp instruction yields the result of Op X. -// ARROW is channel receive. -// MUL is pointer indirection (load). -// XOR is bitwise complement. -// SUB is negation. -// NOT is logical negation. -// -// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above -// and a boolean indicating the success of the receive. The -// components of the tuple are accessed using Extract. -// -// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. -// For receive operations (ARROW) implicit in ranging over a channel, -// Pos() returns the ast.RangeStmt.For. -// For implicit memory loads (STAR), Pos() returns the position of the -// most closely associated source-level construct; the details are not -// specified. -// -// Example printed form: -// t0 = *x -// t2 = <-t1,ok -// -type UnOp struct { - register - Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ - X Value - CommaOk bool -} - -// The ChangeType instruction applies to X a value-preserving type -// change to Type(). -// -// Type changes are permitted: -// - between a named type and its underlying type. -// - between two named types of the same underlying type. -// - between (possibly named) pointers to identical base types. -// - from a bidirectional channel to a read- or write-channel, -// optionally adding/removing a name. -// -// This operation cannot fail dynamically. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// t1 = changetype *int <- IntPtr (t0) -// -type ChangeType struct { - register - X Value -} - -// The Convert instruction yields the conversion of value X to type -// Type(). One or both of those types is basic (but possibly named). -// -// A conversion may change the value and representation of its operand. -// Conversions are permitted: -// - between real numeric types. -// - between complex numeric types. -// - between string and []byte or []rune. -// - between pointers and unsafe.Pointer. -// - between unsafe.Pointer and uintptr. -// - from (Unicode) integer to (UTF-8) string. -// A conversion may imply a type name change also. -// -// This operation cannot fail dynamically. -// -// Conversions of untyped string/number/bool constants to a specific -// representation are eliminated during SSA construction. -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// t1 = convert []byte <- string (t0) -// -type Convert struct { - register - X Value -} - -// ChangeInterface constructs a value of one interface type from a -// value of another interface type known to be assignable to it. -// This operation cannot fail. -// -// Pos() returns the ast.CallExpr.Lparen if the instruction arose from -// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the -// instruction arose from an explicit e.(T) operation; or token.NoPos -// otherwise. -// -// Example printed form: -// t1 = change interface interface{} <- I (t0) -// -type ChangeInterface struct { - register - X Value -} - -// MakeInterface constructs an instance of an interface type from a -// value of a concrete type. -// -// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set -// of X, and Program.Method(m) to find the implementation of a method. -// -// To construct the zero value of an interface type T, use: -// NewConst(exact.MakeNil(), T, pos) -// -// Pos() returns the ast.CallExpr.Lparen, if the instruction arose -// from an explicit conversion in the source. -// -// Example printed form: -// t1 = make interface{} <- int (42:int) -// t2 = make Stringer <- t0 -// -type MakeInterface struct { - register - X Value -} - -// The MakeClosure instruction yields a closure value whose code is -// Fn and whose free variables' values are supplied by Bindings. -// -// Type() returns a (possibly named) *types.Signature. -// -// Pos() returns the ast.FuncLit.Type.Func for a function literal -// closure or the ast.SelectorExpr.Sel for a bound method closure. -// -// Example printed form: -// t0 = make closure anon@1.2 [x y z] -// t1 = make closure bound$(main.I).add [i] -// -type MakeClosure struct { - register - Fn Value // always a *Function - Bindings []Value // values for each free variable in Fn.FreeVars -} - -// The MakeMap instruction creates a new hash-table-based map object -// and yields a value of kind map. -// -// Type() returns a (possibly named) *types.Map. -// -// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or -// the ast.CompositeLit.Lbrack if created by a literal. -// -// Example printed form: -// t1 = make map[string]int t0 -// t1 = make StringIntMap t0 -// -type MakeMap struct { - register - Reserve Value // initial space reservation; nil => default -} - -// The MakeChan instruction creates a new channel object and yields a -// value of kind chan. -// -// Type() returns a (possibly named) *types.Chan. -// -// Pos() returns the ast.CallExpr.Lparen for the make(chan) that -// created it. -// -// Example printed form: -// t0 = make chan int 0 -// t0 = make IntChan 0 -// -type MakeChan struct { - register - Size Value // int; size of buffer; zero => synchronous. -} - -// The MakeSlice instruction yields a slice of length Len backed by a -// newly allocated array of length Cap. -// -// Both Len and Cap must be non-nil Values of integer type. -// -// (Alloc(types.Array) followed by Slice will not suffice because -// Alloc can only create arrays of constant length.) -// -// Type() returns a (possibly named) *types.Slice. -// -// Pos() returns the ast.CallExpr.Lparen for the make([]T) that -// created it. -// -// Example printed form: -// t1 = make []string 1:int t0 -// t1 = make StringSlice 1:int t0 -// -type MakeSlice struct { - register - Len Value - Cap Value -} - -// The Slice instruction yields a slice of an existing string, slice -// or *array X between optional integer bounds Low and High. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns string if the type of X was string, otherwise a -// *types.Slice with the same element type as X. -// -// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice -// operation, the ast.CompositeLit.Lbrace if created by a literal, or -// NoPos if not explicit in the source (e.g. a variadic argument slice). -// -// Example printed form: -// t1 = slice t0[1:] -// -type Slice struct { - register - X Value // slice, string, or *array - Low, High, Max Value // each may be nil -} - -// The FieldAddr instruction yields the address of Field of *struct X. -// -// The field is identified by its index within the field list of the -// struct type of X. -// -// Dynamically, this instruction panics if X evaluates to a nil -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. -// -// Example printed form: -// t1 = &t0.name [#1] -// -type FieldAddr struct { - register - X Value // *struct - Field int // index into X.Type().Deref().(*types.Struct).Fields -} - -// The Field instruction yields the Field of struct X. -// -// The field is identified by its index within the field list of the -// struct type of X; by using numeric indices we avoid ambiguity of -// package-local identifiers and permit compact representations. -// -// Pos() returns the position of the ast.SelectorExpr.Sel for the -// field, if explicit in the source. -// -// Example printed form: -// t1 = t0.name [#1] -// -type Field struct { - register - X Value // struct - Field int // index into X.Type().(*types.Struct).Fields -} - -// The IndexAddr instruction yields the address of the element at -// index Index of collection X. Index is an integer expression. -// -// The elements of maps and strings are not addressable; use Lookup or -// MapUpdate instead. -// -// Dynamically, this instruction panics if X evaluates to a nil *array -// pointer. -// -// Type() returns a (possibly named) *types.Pointer. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// t2 = &t0[t1] -// -type IndexAddr struct { - register - X Value // slice or *array, - Index Value // numeric index -} - -// The Index instruction yields element Index of array X. -// -// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if -// explicit in the source. -// -// Example printed form: -// t2 = t0[t1] -// -type Index struct { - register - X Value // array - Index Value // integer index -} - -// The Lookup instruction yields element Index of collection X, a map -// or string. Index is an integer expression if X is a string or the -// appropriate key type if X is a map. -// -// If CommaOk, the result is a 2-tuple of the value above and a -// boolean indicating the result of a map membership test for the key. -// The components of the tuple are accessed using Extract. -// -// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. -// -// Example printed form: -// t2 = t0[t1] -// t5 = t3[t4],ok -// -type Lookup struct { - register - X Value // string or map - Index Value // numeric or key-typed index - CommaOk bool // return a value,ok pair -} - -// SelectState is a helper for Select. -// It represents one goal state and its corresponding communication. -// -type SelectState struct { - Dir types.ChanDir // direction of case (SendOnly or RecvOnly) - Chan Value // channel to use (for send or receive) - Send Value // value to send (for send) - Pos token.Pos // position of token.ARROW - DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] -} - -// The Select instruction tests whether (or blocks until) one -// of the specified sent or received states is entered. -// -// Let n be the number of States for which Dir==RECV and T_i (0<=i string iterator; false => map iterator. -} - -// The TypeAssert instruction tests whether interface value X has type -// AssertedType. -// -// If !CommaOk, on success it returns v, the result of the conversion -// (defined below); on failure it panics. -// -// If CommaOk: on success it returns a pair (v, true) where v is the -// result of the conversion; on failure it returns (z, false) where z -// is AssertedType's zero value. The components of the pair must be -// accessed using the Extract instruction. -// -// If AssertedType is a concrete type, TypeAssert checks whether the -// dynamic type in interface X is equal to it, and if so, the result -// of the conversion is a copy of the value in the interface. -// -// If AssertedType is an interface, TypeAssert checks whether the -// dynamic type of the interface is assignable to it, and if so, the -// result of the conversion is a copy of the interface value X. -// If AssertedType is a superinterface of X.Type(), the operation will -// fail iff the operand is nil. (Contrast with ChangeInterface, which -// performs no nil-check.) -// -// Type() reflects the actual type of the result, possibly a -// 2-types.Tuple; AssertedType is the asserted type. -// -// Pos() returns the ast.CallExpr.Lparen if the instruction arose from -// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the -// instruction arose from an explicit e.(T) operation; or the -// ast.CaseClause.Case if the instruction arose from a case of a -// type-switch statement. -// -// Example printed form: -// t1 = typeassert t0.(int) -// t3 = typeassert,ok t2.(T) -// -type TypeAssert struct { - register - X Value - AssertedType types.Type - CommaOk bool -} - -// The Extract instruction yields component Index of Tuple. -// -// This is used to access the results of instructions with multiple -// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and -// IndexExpr(Map). -// -// Example printed form: -// t1 = extract t0 #1 -// -type Extract struct { - register - Tuple Value - Index int -} - -// Instructions executed for effect. They do not yield a value. -------------------- - -// The Jump instruction transfers control to the sole successor of its -// owning block. -// -// A Jump must be the last instruction of its containing BasicBlock. -// -// Pos() returns NoPos. -// -// Example printed form: -// jump done -// -type Jump struct { - anInstruction -} - -// The If instruction transfers control to one of the two successors -// of its owning block, depending on the boolean Cond: the first if -// true, the second if false. -// -// An If instruction must be the last instruction of its containing -// BasicBlock. -// -// Pos() returns NoPos. -// -// Example printed form: -// if t0 goto done else body -// -type If struct { - anInstruction - Cond Value -} - -// The Return instruction returns values and control back to the calling -// function. -// -// len(Results) is always equal to the number of results in the -// function's signature. -// -// If len(Results) > 1, Return returns a tuple value with the specified -// components which the caller must access using Extract instructions. -// -// There is no instruction to return a ready-made tuple like those -// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or -// a tail-call to a function with multiple result parameters. -// -// Return must be the last instruction of its containing BasicBlock. -// Such a block has no successors. -// -// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. -// -// Example printed form: -// return -// return nil:I, 2:int -// -type Return struct { - anInstruction - Results []Value - pos token.Pos -} - -// The RunDefers instruction pops and invokes the entire stack of -// procedure calls pushed by Defer instructions in this function. -// -// It is legal to encounter multiple 'rundefers' instructions in a -// single control-flow path through a function; this is useful in -// the combined init() function, for example. -// -// Pos() returns NoPos. -// -// Example printed form: -// rundefers -// -type RunDefers struct { - anInstruction -} - -// The Panic instruction initiates a panic with value X. -// -// A Panic instruction must be the last instruction of its containing -// BasicBlock, which must have no successors. -// -// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; -// they are treated as calls to a built-in function. -// -// Pos() returns the ast.CallExpr.Lparen if this panic was explicit -// in the source. -// -// Example printed form: -// panic t0 -// -type Panic struct { - anInstruction - X Value // an interface{} - pos token.Pos -} - -// The Go instruction creates a new goroutine and calls the specified -// function within it. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.GoStmt.Go. -// -// Example printed form: -// go println(t0, t1) -// go t3() -// go invoke t5.Println(...t6) -// -type Go struct { - anInstruction - Call CallCommon - pos token.Pos -} - -// The Defer instruction pushes the specified call onto a stack of -// functions to be called by a RunDefers instruction or by a panic. -// -// See CallCommon for generic function call documentation. -// -// Pos() returns the ast.DeferStmt.Defer. -// -// Example printed form: -// defer println(t0, t1) -// defer t3() -// defer invoke t5.Println(...t6) -// -type Defer struct { - anInstruction - Call CallCommon - pos token.Pos -} - -// The Send instruction sends X on channel Chan. -// -// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. -// -// Example printed form: -// send t0 <- t1 -// -type Send struct { - anInstruction - Chan, X Value - pos token.Pos -} - -// The Store instruction stores Val at address Addr. -// Stores can be of arbitrary types. -// -// Pos() returns the position of the source-level construct most closely -// associated with the memory store operation. -// Since implicit memory stores are numerous and varied and depend upon -// implementation choices, the details are not specified. -// -// Example printed form: -// *x = y -// -type Store struct { - anInstruction - Addr Value - Val Value - pos token.Pos -} - -// The BlankStore instruction is emitted for assignments to the blank -// identifier. -// -// BlankStore is a pseudo-instruction: it has no dynamic effect. -// -// Pos() returns NoPos. -// -// Example printed form: -// _ = t0 -// -type BlankStore struct { - anInstruction - Val Value -} - -// The MapUpdate instruction updates the association of Map[Key] to -// Value. -// -// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, -// if explicit in the source. -// -// Example printed form: -// t0[t1] = t2 -// -type MapUpdate struct { - anInstruction - Map Value - Key Value - Value Value - pos token.Pos -} - -// A DebugRef instruction maps a source-level expression Expr to the -// SSA value X that represents the value (!IsAddr) or address (IsAddr) -// of that expression. -// -// DebugRef is a pseudo-instruction: it has no dynamic effect. -// -// Pos() returns Expr.Pos(), the start position of the source-level -// expression. This is not the same as the "designated" token as -// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the -// position of the ("designated") Lparen token. -// -// If Expr is an *ast.Ident denoting a var or func, Object() returns -// the object; though this information can be obtained from the type -// checker, including it here greatly facilitates debugging. -// For non-Ident expressions, Object() returns nil. -// -// DebugRefs are generated only for functions built with debugging -// enabled; see Package.SetDebugMode() and the GlobalDebug builder -// mode flag. -// -// DebugRefs are not emitted for ast.Idents referring to constants or -// predeclared identifiers, since they are trivial and numerous. -// Nor are they emitted for ast.ParenExprs. -// -// (By representing these as instructions, rather than out-of-band, -// consistency is maintained during transformation passes by the -// ordinary SSA renaming machinery.) -// -// Example printed form: -// ; *ast.CallExpr @ 102:9 is t5 -// ; var x float64 @ 109:72 is x -// ; address of *ast.CompositeLit @ 216:10 is t0 -// -type DebugRef struct { - anInstruction - Expr ast.Expr // the referring expression (never *ast.ParenExpr) - object types.Object // the identity of the source var/func - IsAddr bool // Expr is addressable and X is the address it denotes - X Value // the value or address of Expr -} - -// Embeddable mix-ins and helpers for common parts of other structs. ----------- - -// register is a mix-in embedded by all SSA values that are also -// instructions, i.e. virtual registers, and provides a uniform -// implementation of most of the Value interface: Value.Name() is a -// numbered register (e.g. "t0"); the other methods are field accessors. -// -// Temporary names are automatically assigned to each register on -// completion of building a function in SSA form. -// -// Clients must not assume that the 'id' value (and the Name() derived -// from it) is unique within a function. As always in this API, -// semantics are determined only by identity; names exist only to -// facilitate debugging. -// -type register struct { - anInstruction - num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. - typ types.Type // type of virtual register - pos token.Pos // position of source expression, or NoPos - referrers []Instruction -} - -// anInstruction is a mix-in embedded by all Instructions. -// It provides the implementations of the Block and setBlock methods. -type anInstruction struct { - block *BasicBlock // the basic block of this instruction -} - -// CallCommon is contained by Go, Defer and Call to hold the -// common parts of a function or method call. -// -// Each CallCommon exists in one of two modes, function call and -// interface method invocation, or "call" and "invoke" for short. -// -// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon -// represents an ordinary function call of the value in Value, -// which may be a *Builtin, a *Function or any other value of kind -// 'func'. -// -// Value may be one of: -// (a) a *Function, indicating a statically dispatched call -// to a package-level function, an anonymous function, or -// a method of a named type. -// (b) a *MakeClosure, indicating an immediately applied -// function literal with free variables. -// (c) a *Builtin, indicating a statically dispatched call -// to a built-in function. -// (d) any other value, indicating a dynamically dispatched -// function call. -// StaticCallee returns the identity of the callee in cases -// (a) and (b), nil otherwise. -// -// Args contains the arguments to the call. If Value is a method, -// Args[0] contains the receiver parameter. -// -// Example printed form: -// t2 = println(t0, t1) -// go t3() -// defer t5(...t6) -// -// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon -// represents a dynamically dispatched call to an interface method. -// In this mode, Value is the interface value and Method is the -// interface's abstract method. Note: an abstract method may be -// shared by multiple interfaces due to embedding; Value.Type() -// provides the specific interface used for this call. -// -// Value is implicitly supplied to the concrete method implementation -// as the receiver parameter; in other words, Args[0] holds not the -// receiver but the first true argument. -// -// Example printed form: -// t1 = invoke t0.String() -// go invoke t3.Run(t2) -// defer invoke t4.Handle(...t5) -// -// For all calls to variadic functions (Signature().Variadic()), -// the last element of Args is a slice. -// -type CallCommon struct { - Value Value // receiver (invoke mode) or func value (call mode) - Method *types.Func // abstract method (invoke mode) - Args []Value // actual parameters (in static method call, includes receiver) - pos token.Pos // position of CallExpr.Lparen, iff explicit in source -} - -// IsInvoke returns true if this call has "invoke" (not "call") mode. -func (c *CallCommon) IsInvoke() bool { - return c.Method != nil -} - -func (c *CallCommon) Pos() token.Pos { return c.pos } - -// Signature returns the signature of the called function. -// -// For an "invoke"-mode call, the signature of the interface method is -// returned. -// -// In either "call" or "invoke" mode, if the callee is a method, its -// receiver is represented by sig.Recv, not sig.Params().At(0). -// -func (c *CallCommon) Signature() *types.Signature { - if c.Method != nil { - return c.Method.Type().(*types.Signature) - } - return c.Value.Type().Underlying().(*types.Signature) -} - -// StaticCallee returns the callee if this is a trivially static -// "call"-mode call to a function. -func (c *CallCommon) StaticCallee() *Function { - switch fn := c.Value.(type) { - case *Function: - return fn - case *MakeClosure: - return fn.Fn.(*Function) - } - return nil -} - -// Description returns a description of the mode of this call suitable -// for a user interface, e.g., "static method call". -func (c *CallCommon) Description() string { - switch fn := c.Value.(type) { - case *Builtin: - return "built-in function call" - case *MakeClosure: - return "static function closure call" - case *Function: - if fn.Signature.Recv() != nil { - return "static method call" - } - return "static function call" - } - if c.IsInvoke() { - return "dynamic method call" // ("invoke" mode) - } - return "dynamic function call" -} - -// The CallInstruction interface, implemented by *Go, *Defer and *Call, -// exposes the common parts of function-calling instructions, -// yet provides a way back to the Value defined by *Call alone. -// -type CallInstruction interface { - Instruction - Common() *CallCommon // returns the common parts of the call - Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) -} - -func (s *Call) Common() *CallCommon { return &s.Call } -func (s *Defer) Common() *CallCommon { return &s.Call } -func (s *Go) Common() *CallCommon { return &s.Call } - -func (s *Call) Value() *Call { return s } -func (s *Defer) Value() *Call { return nil } -func (s *Go) Value() *Call { return nil } - -func (v *Builtin) Type() types.Type { return v.sig } -func (v *Builtin) Name() string { return v.name } -func (*Builtin) Referrers() *[]Instruction { return nil } -func (v *Builtin) Pos() token.Pos { return token.NoPos } -func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } -func (v *Builtin) Parent() *Function { return nil } - -func (v *FreeVar) Type() types.Type { return v.typ } -func (v *FreeVar) Name() string { return v.name } -func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } -func (v *FreeVar) Pos() token.Pos { return v.pos } -func (v *FreeVar) Parent() *Function { return v.parent } - -func (v *Global) Type() types.Type { return v.typ } -func (v *Global) Name() string { return v.name } -func (v *Global) Parent() *Function { return nil } -func (v *Global) Pos() token.Pos { return v.pos } -func (v *Global) Referrers() *[]Instruction { return nil } -func (v *Global) Token() token.Token { return token.VAR } -func (v *Global) Object() types.Object { return v.object } -func (v *Global) String() string { return v.RelString(nil) } -func (v *Global) Package() *Package { return v.Pkg } -func (v *Global) RelString(from *types.Package) string { return relString(v, from) } - -func (v *Function) Name() string { return v.name } -func (v *Function) Type() types.Type { return v.Signature } -func (v *Function) Pos() token.Pos { return v.pos } -func (v *Function) Token() token.Token { return token.FUNC } -func (v *Function) Object() types.Object { return v.object } -func (v *Function) String() string { return v.RelString(nil) } -func (v *Function) Package() *Package { return v.Pkg } -func (v *Function) Parent() *Function { return v.parent } -func (v *Function) Referrers() *[]Instruction { - if v.parent != nil { - return &v.referrers - } - return nil -} - -func (v *Parameter) Type() types.Type { return v.typ } -func (v *Parameter) Name() string { return v.name } -func (v *Parameter) Object() types.Object { return v.object } -func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } -func (v *Parameter) Pos() token.Pos { return v.pos } -func (v *Parameter) Parent() *Function { return v.parent } - -func (v *Alloc) Type() types.Type { return v.typ } -func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } -func (v *Alloc) Pos() token.Pos { return v.pos } - -func (v *register) Type() types.Type { return v.typ } -func (v *register) setType(typ types.Type) { v.typ = typ } -func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } -func (v *register) setNum(num int) { v.num = num } -func (v *register) Referrers() *[]Instruction { return &v.referrers } -func (v *register) Pos() token.Pos { return v.pos } -func (v *register) setPos(pos token.Pos) { v.pos = pos } - -func (v *anInstruction) Parent() *Function { return v.block.parent } -func (v *anInstruction) Block() *BasicBlock { return v.block } -func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } -func (v *anInstruction) Referrers() *[]Instruction { return nil } - -func (t *Type) Name() string { return t.object.Name() } -func (t *Type) Pos() token.Pos { return t.object.Pos() } -func (t *Type) Type() types.Type { return t.object.Type() } -func (t *Type) Token() token.Token { return token.TYPE } -func (t *Type) Object() types.Object { return t.object } -func (t *Type) String() string { return t.RelString(nil) } -func (t *Type) Package() *Package { return t.pkg } -func (t *Type) RelString(from *types.Package) string { return relString(t, from) } - -func (c *NamedConst) Name() string { return c.object.Name() } -func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } -func (c *NamedConst) String() string { return c.RelString(nil) } -func (c *NamedConst) Type() types.Type { return c.object.Type() } -func (c *NamedConst) Token() token.Token { return token.CONST } -func (c *NamedConst) Object() types.Object { return c.object } -func (c *NamedConst) Package() *Package { return c.pkg } -func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } - -// Func returns the package-level function of the specified name, -// or nil if not found. -// -func (p *Package) Func(name string) (f *Function) { - f, _ = p.Members[name].(*Function) - return -} - -// Var returns the package-level variable of the specified name, -// or nil if not found. -// -func (p *Package) Var(name string) (g *Global) { - g, _ = p.Members[name].(*Global) - return -} - -// Const returns the package-level constant of the specified name, -// or nil if not found. -// -func (p *Package) Const(name string) (c *NamedConst) { - c, _ = p.Members[name].(*NamedConst) - return -} - -// Type returns the package-level type of the specified name, -// or nil if not found. -// -func (p *Package) Type(name string) (t *Type) { - t, _ = p.Members[name].(*Type) - return -} - -func (v *Call) Pos() token.Pos { return v.Call.pos } -func (s *Defer) Pos() token.Pos { return s.pos } -func (s *Go) Pos() token.Pos { return s.pos } -func (s *MapUpdate) Pos() token.Pos { return s.pos } -func (s *Panic) Pos() token.Pos { return s.pos } -func (s *Return) Pos() token.Pos { return s.pos } -func (s *Send) Pos() token.Pos { return s.pos } -func (s *Store) Pos() token.Pos { return s.pos } -func (s *BlankStore) Pos() token.Pos { return token.NoPos } -func (s *If) Pos() token.Pos { return token.NoPos } -func (s *Jump) Pos() token.Pos { return token.NoPos } -func (s *RunDefers) Pos() token.Pos { return token.NoPos } -func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } - -// Operands. - -func (v *Alloc) Operands(rands []*Value) []*Value { - return rands -} - -func (v *BinOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Y) -} - -func (c *CallCommon) Operands(rands []*Value) []*Value { - rands = append(rands, &c.Value) - for i := range c.Args { - rands = append(rands, &c.Args[i]) - } - return rands -} - -func (s *Go) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Call) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (s *Defer) Operands(rands []*Value) []*Value { - return s.Call.Operands(rands) -} - -func (v *ChangeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *ChangeType) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Convert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *DebugRef) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Extract) Operands(rands []*Value) []*Value { - return append(rands, &v.Tuple) -} - -func (v *Field) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *FieldAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *If) Operands(rands []*Value) []*Value { - return append(rands, &s.Cond) -} - -func (v *Index) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *IndexAddr) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (*Jump) Operands(rands []*Value) []*Value { - return rands -} - -func (v *Lookup) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Index) -} - -func (v *MakeChan) Operands(rands []*Value) []*Value { - return append(rands, &v.Size) -} - -func (v *MakeClosure) Operands(rands []*Value) []*Value { - rands = append(rands, &v.Fn) - for i := range v.Bindings { - rands = append(rands, &v.Bindings[i]) - } - return rands -} - -func (v *MakeInterface) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *MakeMap) Operands(rands []*Value) []*Value { - return append(rands, &v.Reserve) -} - -func (v *MakeSlice) Operands(rands []*Value) []*Value { - return append(rands, &v.Len, &v.Cap) -} - -func (v *MapUpdate) Operands(rands []*Value) []*Value { - return append(rands, &v.Map, &v.Key, &v.Value) -} - -func (v *Next) Operands(rands []*Value) []*Value { - return append(rands, &v.Iter) -} - -func (s *Panic) Operands(rands []*Value) []*Value { - return append(rands, &s.X) -} - -func (v *Sigma) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *Phi) Operands(rands []*Value) []*Value { - for i := range v.Edges { - rands = append(rands, &v.Edges[i]) - } - return rands -} - -func (v *Range) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (s *Return) Operands(rands []*Value) []*Value { - for i := range s.Results { - rands = append(rands, &s.Results[i]) - } - return rands -} - -func (*RunDefers) Operands(rands []*Value) []*Value { - return rands -} - -func (v *Select) Operands(rands []*Value) []*Value { - for i := range v.States { - rands = append(rands, &v.States[i].Chan, &v.States[i].Send) - } - return rands -} - -func (s *Send) Operands(rands []*Value) []*Value { - return append(rands, &s.Chan, &s.X) -} - -func (v *Slice) Operands(rands []*Value) []*Value { - return append(rands, &v.X, &v.Low, &v.High, &v.Max) -} - -func (s *Store) Operands(rands []*Value) []*Value { - return append(rands, &s.Addr, &s.Val) -} - -func (s *BlankStore) Operands(rands []*Value) []*Value { - return append(rands, &s.Val) -} - -func (v *TypeAssert) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -func (v *UnOp) Operands(rands []*Value) []*Value { - return append(rands, &v.X) -} - -// Non-Instruction Values: -func (v *Builtin) Operands(rands []*Value) []*Value { return rands } -func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } -func (v *Const) Operands(rands []*Value) []*Value { return rands } -func (v *Function) Operands(rands []*Value) []*Value { return rands } -func (v *Global) Operands(rands []*Value) []*Value { return rands } -func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/load.go b/vendor/honnef.co/go/tools/ssa/ssautil/load.go deleted file mode 100644 index 2fc108f..0000000 --- a/vendor/honnef.co/go/tools/ssa/ssautil/load.go +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssautil - -// This file defines utility functions for constructing programs in SSA form. - -import ( - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/loader" - "honnef.co/go/tools/ssa" -) - -// CreateProgram returns a new program in SSA form, given a program -// loaded from source. An SSA package is created for each transitively -// error-free package of lprog. -// -// Code for bodies of functions is not built until Build is called -// on the result. -// -// mode controls diagnostics and checking during SSA construction. -// -func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { - prog := ssa.NewProgram(lprog.Fset, mode) - - for _, info := range lprog.AllPackages { - if info.TransitivelyErrorFree { - prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) - } - } - - return prog -} - -// BuildPackage builds an SSA program with IR for a single package. -// -// It populates pkg by type-checking the specified file ASTs. All -// dependencies are loaded using the importer specified by tc, which -// typically loads compiler export data; SSA code cannot be built for -// those packages. BuildPackage then constructs an ssa.Program with all -// dependency packages created, and builds and returns the SSA package -// corresponding to pkg. -// -// The caller must have set pkg.Path() to the import path. -// -// The operation fails if there were any type-checking or import errors. -// -// See ../ssa/example_test.go for an example. -// -func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { - if fset == nil { - panic("no token.FileSet") - } - if pkg.Path() == "" { - panic("package has no import path") - } - - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { - return nil, nil, err - } - - prog := ssa.NewProgram(fset, mode) - - // Create SSA packages for all imports. - // Order is not significant. - created := make(map[*types.Package]bool) - var createAll func(pkgs []*types.Package) - createAll = func(pkgs []*types.Package) { - for _, p := range pkgs { - if !created[p] { - created[p] = true - prog.CreatePackage(p, nil, nil, true) - createAll(p.Imports()) - } - } - } - createAll(pkg.Imports()) - - // Create and build the primary package. - ssapkg := prog.CreatePackage(pkg, files, info, false) - ssapkg.Build() - return ssapkg, info, nil -} diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/switch.go b/vendor/honnef.co/go/tools/ssa/ssautil/switch.go deleted file mode 100644 index ef698b3..0000000 --- a/vendor/honnef.co/go/tools/ssa/ssautil/switch.go +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssautil - -// This file implements discovery of switch and type-switch constructs -// from low-level control flow. -// -// Many techniques exist for compiling a high-level switch with -// constant cases to efficient machine code. The optimal choice will -// depend on the data type, the specific case values, the code in the -// body of each case, and the hardware. -// Some examples: -// - a lookup table (for a switch that maps constants to constants) -// - a computed goto -// - a binary tree -// - a perfect hash -// - a two-level switch (to partition constant strings by their first byte). - -import ( - "bytes" - "fmt" - "go/token" - "go/types" - - "honnef.co/go/tools/ssa" -) - -// A ConstCase represents a single constant comparison. -// It is part of a Switch. -type ConstCase struct { - Block *ssa.BasicBlock // block performing the comparison - Body *ssa.BasicBlock // body of the case - Value *ssa.Const // case comparand -} - -// A TypeCase represents a single type assertion. -// It is part of a Switch. -type TypeCase struct { - Block *ssa.BasicBlock // block performing the type assert - Body *ssa.BasicBlock // body of the case - Type types.Type // case type - Binding ssa.Value // value bound by this case -} - -// A Switch is a logical high-level control flow operation -// (a multiway branch) discovered by analysis of a CFG containing -// only if/else chains. It is not part of the ssa.Instruction set. -// -// One of ConstCases and TypeCases has length >= 2; -// the other is nil. -// -// In a value switch, the list of cases may contain duplicate constants. -// A type switch may contain duplicate types, or types assignable -// to an interface type also in the list. -// TODO(adonovan): eliminate such duplicates. -// -type Switch struct { - Start *ssa.BasicBlock // block containing start of if/else chain - X ssa.Value // the switch operand - ConstCases []ConstCase // ordered list of constant comparisons - TypeCases []TypeCase // ordered list of type assertions - Default *ssa.BasicBlock // successor if all comparisons fail -} - -func (sw *Switch) String() string { - // We represent each block by the String() of its - // first Instruction, e.g. "print(42:int)". - var buf bytes.Buffer - if sw.ConstCases != nil { - fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) - for _, c := range sw.ConstCases { - fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) - } - } else { - fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) - for _, c := range sw.TypeCases { - fmt.Fprintf(&buf, "case %s %s: %s\n", - c.Binding.Name(), c.Type, c.Body.Instrs[0]) - } - } - if sw.Default != nil { - fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) - } - fmt.Fprintf(&buf, "}") - return buf.String() -} - -// Switches examines the control-flow graph of fn and returns the -// set of inferred value and type switches. A value switch tests an -// ssa.Value for equality against two or more compile-time constant -// values. Switches involving link-time constants (addresses) are -// ignored. A type switch type-asserts an ssa.Value against two or -// more types. -// -// The switches are returned in dominance order. -// -// The resulting switches do not necessarily correspond to uses of the -// 'switch' keyword in the source: for example, a single source-level -// switch statement with non-constant cases may result in zero, one or -// many Switches, one per plural sequence of constant cases. -// Switches may even be inferred from if/else- or goto-based control flow. -// (In general, the control flow constructs of the source program -// cannot be faithfully reproduced from the SSA representation.) -// -func Switches(fn *ssa.Function) []Switch { - // Traverse the CFG in dominance order, so we don't - // enter an if/else-chain in the middle. - var switches []Switch - seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet - for _, b := range fn.DomPreorder() { - if x, k := isComparisonBlock(b); x != nil { - // Block b starts a switch. - sw := Switch{Start: b, X: x} - valueSwitch(&sw, k, seen) - if len(sw.ConstCases) > 1 { - switches = append(switches, sw) - } - } - - if y, x, T := isTypeAssertBlock(b); y != nil { - // Block b starts a type switch. - sw := Switch{Start: b, X: x} - typeSwitch(&sw, y, T, seen) - if len(sw.TypeCases) > 1 { - switches = append(switches, sw) - } - } - } - return switches -} - -func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { - b := sw.Start - x := sw.X - for x == sw.X { - if seen[b] { - break - } - seen[b] = true - - sw.ConstCases = append(sw.ConstCases, ConstCase{ - Block: b, - Body: b.Succs[0], - Value: k, - }) - b = b.Succs[1] - if len(b.Instrs) > 2 { - // Block b contains not just 'if x == k', - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - x, k = isComparisonBlock(b) - } - sw.Default = b -} - -func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { - b := sw.Start - x := sw.X - for x == sw.X { - if seen[b] { - break - } - seen[b] = true - - sw.TypeCases = append(sw.TypeCases, TypeCase{ - Block: b, - Body: b.Succs[0], - Type: T, - Binding: y, - }) - b = b.Succs[1] - if len(b.Instrs) > 4 { - // Block b contains not just - // {TypeAssert; Extract #0; Extract #1; If} - // so it may have side effects that - // make it unsafe to elide. - break - } - if len(b.Preds) != 1 { - // Block b has multiple predecessors, - // so it cannot be treated as a case. - break - } - y, x, T = isTypeAssertBlock(b) - } - sw.Default = b -} - -// isComparisonBlock returns the operands (v, k) if a block ends with -// a comparison v==k, where k is a compile-time constant. -// -func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { - if n := len(b.Instrs); n >= 2 { - if i, ok := b.Instrs[n-1].(*ssa.If); ok { - if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { - if k, ok := binop.Y.(*ssa.Const); ok { - return binop.X, k - } - if k, ok := binop.X.(*ssa.Const); ok { - return binop.Y, k - } - } - } - } - return -} - -// isTypeAssertBlock returns the operands (y, x, T) if a block ends with -// a type assertion "if y, ok := x.(T); ok {". -// -func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { - if n := len(b.Instrs); n >= 4 { - if i, ok := b.Instrs[n-1].(*ssa.If); ok { - if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { - if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { - // hack: relies upon instruction ordering. - if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { - return ext0, ta.X, ta.AssertedType - } - } - } - } - } - return -} diff --git a/vendor/honnef.co/go/tools/ssa/ssautil/visit.go b/vendor/honnef.co/go/tools/ssa/ssautil/visit.go deleted file mode 100644 index 5c14845..0000000 --- a/vendor/honnef.co/go/tools/ssa/ssautil/visit.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ssautil // import "honnef.co/go/tools/ssa/ssautil" - -import "honnef.co/go/tools/ssa" - -// This file defines utilities for visiting the SSA representation of -// a Program. -// -// TODO(adonovan): test coverage. - -// AllFunctions finds and returns the set of functions potentially -// needed by program prog, as determined by a simple linker-style -// reachability algorithm starting from the members and method-sets of -// each package. The result may include anonymous functions and -// synthetic wrappers. -// -// Precondition: all packages are built. -// -func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { - visit := visitor{ - prog: prog, - seen: make(map[*ssa.Function]bool), - } - visit.program() - return visit.seen -} - -type visitor struct { - prog *ssa.Program - seen map[*ssa.Function]bool -} - -func (visit *visitor) program() { - for _, pkg := range visit.prog.AllPackages() { - for _, mem := range pkg.Members { - if fn, ok := mem.(*ssa.Function); ok { - visit.function(fn) - } - } - } - for _, T := range visit.prog.RuntimeTypes() { - mset := visit.prog.MethodSets.MethodSet(T) - for i, n := 0, mset.Len(); i < n; i++ { - visit.function(visit.prog.MethodValue(mset.At(i))) - } - } -} - -func (visit *visitor) function(fn *ssa.Function) { - if !visit.seen[fn] { - visit.seen[fn] = true - var buf [10]*ssa.Value // avoid alloc in common case - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - for _, op := range instr.Operands(buf[:0]) { - if fn, ok := (*op).(*ssa.Function); ok { - visit.function(fn) - } - } - } - } - } -} - -// MainPackages returns the subset of the specified packages -// named "main" that define a main function. -// The result may include synthetic "testmain" packages. -func MainPackages(pkgs []*ssa.Package) []*ssa.Package { - var mains []*ssa.Package - for _, pkg := range pkgs { - if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { - mains = append(mains, pkg) - } - } - return mains -} diff --git a/vendor/honnef.co/go/tools/ssa/testmain.go b/vendor/honnef.co/go/tools/ssa/testmain.go deleted file mode 100644 index 2b89724..0000000 --- a/vendor/honnef.co/go/tools/ssa/testmain.go +++ /dev/null @@ -1,266 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// CreateTestMainPackage synthesizes a main package that runs all the -// tests of the supplied packages. -// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. -// -// TODO(adonovan): this file no longer needs to live in the ssa package. -// Move it to ssautil. - -import ( - "bytes" - "fmt" - "go/ast" - "go/parser" - "go/types" - "log" - "os" - "strings" - "text/template" -) - -// FindTests returns the Test, Benchmark, and Example functions -// (as defined by "go test") defined in the specified package, -// and its TestMain function, if any. -func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { - prog := pkg.Prog - - // The first two of these may be nil: if the program doesn't import "testing", - // it can't contain any tests, but it may yet contain Examples. - var testSig *types.Signature // func(*testing.T) - var benchmarkSig *types.Signature // func(*testing.B) - var exampleSig = types.NewSignature(nil, nil, nil, false) // func() - - // Obtain the types from the parameters of testing.MainStart. - if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { - mainStart := testingPkg.Func("MainStart") - params := mainStart.Signature.Params() - testSig = funcField(params.At(1).Type()) - benchmarkSig = funcField(params.At(2).Type()) - - // Does the package define this function? - // func TestMain(*testing.M) - if f := pkg.Func("TestMain"); f != nil { - sig := f.Type().(*types.Signature) - starM := mainStart.Signature.Results().At(0).Type() // *testing.M - if sig.Results().Len() == 0 && - sig.Params().Len() == 1 && - types.Identical(sig.Params().At(0).Type(), starM) { - main = f - } - } - } - - // TODO(adonovan): use a stable order, e.g. lexical. - for _, mem := range pkg.Members { - if f, ok := mem.(*Function); ok && - ast.IsExported(f.Name()) && - strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") { - - switch { - case testSig != nil && isTestSig(f, "Test", testSig): - tests = append(tests, f) - case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig): - benchmarks = append(benchmarks, f) - case isTestSig(f, "Example", exampleSig): - examples = append(examples, f) - default: - continue - } - } - } - return -} - -// Like isTest, but checks the signature too. -func isTestSig(f *Function, prefix string, sig *types.Signature) bool { - return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig) -} - -// Given the type of one of the three slice parameters of testing.Main, -// returns the function type. -func funcField(slice types.Type) *types.Signature { - return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature) -} - -// isTest tells whether name looks like a test (or benchmark, according to prefix). -// It is a Test (say) if there is a character after Test that is not a lower-case letter. -// We don't want TesticularCancer. -// Plundered from $GOROOT/src/cmd/go/test.go -func isTest(name, prefix string) bool { - if !strings.HasPrefix(name, prefix) { - return false - } - if len(name) == len(prefix) { // "Test" is ok - return true - } - return ast.IsExported(name[len(prefix):]) -} - -// CreateTestMainPackage creates and returns a synthetic "testmain" -// package for the specified package if it defines tests, benchmarks or -// executable examples, or nil otherwise. The new package is named -// "main" and provides a function named "main" that runs the tests, -// similar to the one that would be created by the 'go test' tool. -// -// Subsequent calls to prog.AllPackages include the new package. -// The package pkg must belong to the program prog. -func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { - if pkg.Prog != prog { - log.Fatal("Package does not belong to Program") - } - - // Template data - var data struct { - Pkg *Package - Tests, Benchmarks, Examples []*Function - Main *Function - Go18 bool - } - data.Pkg = pkg - - // Enumerate tests. - data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg) - if data.Main == nil && - data.Tests == nil && data.Benchmarks == nil && data.Examples == nil { - return nil - } - - // Synthesize source for testmain package. - path := pkg.Pkg.Path() + "$testmain" - tmpl := testmainTmpl - if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { - // In Go 1.8, testing.MainStart's first argument is an interface, not a func. - data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type()) - } else { - // The program does not import "testing", but FindTests - // returned non-nil, which must mean there were Examples - // but no Test, Benchmark, or TestMain functions. - - // We'll simply call them from testmain.main; this will - // ensure they don't panic, but will not check any - // "Output:" comments. - // (We should not execute an Example that has no - // "Output:" comment, but it's impossible to tell here.) - tmpl = examplesOnlyTmpl - } - var buf bytes.Buffer - if err := tmpl.Execute(&buf, data); err != nil { - log.Fatalf("internal error expanding template for %s: %v", path, err) - } - if false { // debugging - fmt.Fprintln(os.Stderr, buf.String()) - } - - // Parse and type-check the testmain package. - f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0)) - if err != nil { - log.Fatalf("internal error parsing %s: %v", path, err) - } - conf := types.Config{ - DisableUnusedImportCheck: true, - Importer: importer{pkg}, - } - files := []*ast.File{f} - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - testmainPkg, err := conf.Check(path, prog.Fset, files, info) - if err != nil { - log.Fatalf("internal error type-checking %s: %v", path, err) - } - - // Create and build SSA code. - testmain := prog.CreatePackage(testmainPkg, files, info, false) - testmain.SetDebugMode(false) - testmain.Build() - testmain.Func("main").Synthetic = "test main function" - testmain.Func("init").Synthetic = "package initializer" - return testmain -} - -// An implementation of types.Importer for an already loaded SSA program. -type importer struct { - pkg *Package // package under test; may be non-importable -} - -func (imp importer) Import(path string) (*types.Package, error) { - if p := imp.pkg.Prog.ImportedPackage(path); p != nil { - return p.Pkg, nil - } - if path == imp.pkg.Pkg.Path() { - return imp.pkg.Pkg, nil - } - return nil, fmt.Errorf("not found") // can't happen -} - -var testmainTmpl = template.Must(template.New("testmain").Parse(` -package main - -import "io" -import "os" -import "testing" -import p {{printf "%q" .Pkg.Pkg.Path}} - -{{if .Go18}} -type deps struct{} - -func (deps) MatchString(pat, str string) (bool, error) { return true, nil } -func (deps) StartCPUProfile(io.Writer) error { return nil } -func (deps) StopCPUProfile() {} -func (deps) WriteHeapProfile(io.Writer) error { return nil } -func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } - -var match deps -{{else}} -func match(_, _ string) (bool, error) { return true, nil } -{{end}} - -func main() { - tests := []testing.InternalTest{ -{{range .Tests}} - { {{printf "%q" .Name}}, p.{{.Name}} }, -{{end}} - } - benchmarks := []testing.InternalBenchmark{ -{{range .Benchmarks}} - { {{printf "%q" .Name}}, p.{{.Name}} }, -{{end}} - } - examples := []testing.InternalExample{ -{{range .Examples}} - {Name: {{printf "%q" .Name}}, F: p.{{.Name}}}, -{{end}} - } - m := testing.MainStart(match, tests, benchmarks, examples) -{{with .Main}} - p.{{.Name}}(m) -{{else}} - os.Exit(m.Run()) -{{end}} -} - -`)) - -var examplesOnlyTmpl = template.Must(template.New("examples").Parse(` -package main - -import p {{printf "%q" .Pkg.Pkg.Path}} - -func main() { -{{range .Examples}} - p.{{.Name}}() -{{end}} -} -`)) diff --git a/vendor/honnef.co/go/tools/ssa/util.go b/vendor/honnef.co/go/tools/ssa/util.go deleted file mode 100644 index 317a013..0000000 --- a/vendor/honnef.co/go/tools/ssa/util.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file defines a number of miscellaneous utility functions. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "os" - - "golang.org/x/tools/go/ast/astutil" -) - -//// AST utilities - -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } - -// isBlankIdent returns true iff e is an Ident with name "_". -// They have no associated types.Object, and thus no type. -// -func isBlankIdent(e ast.Expr) bool { - id, ok := e.(*ast.Ident) - return ok && id.Name == "_" -} - -//// Type utilities. Some of these belong in go/types. - -// isPointer returns true for types whose underlying type is a pointer. -func isPointer(typ types.Type) bool { - _, ok := typ.Underlying().(*types.Pointer) - return ok -} - -func isInterface(T types.Type) bool { return types.IsInterface(T) } - -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return typ -} - -// recvType returns the receiver type of method obj. -func recvType(obj *types.Func) types.Type { - return obj.Type().(*types.Signature).Recv().Type() -} - -// DefaultType returns the default "typed" type for an "untyped" type; -// it returns the incoming type for all other types. The default type -// for untyped nil is untyped nil. -// -// Exported to ssa/interp. -// -// TODO(gri): this is a copy of go/types.defaultType; export that function. -// -func DefaultType(typ types.Type) types.Type { - if t, ok := typ.(*types.Basic); ok { - k := t.Kind() - switch k { - case types.UntypedBool: - k = types.Bool - case types.UntypedInt: - k = types.Int - case types.UntypedRune: - k = types.Rune - case types.UntypedFloat: - k = types.Float64 - case types.UntypedComplex: - k = types.Complex128 - case types.UntypedString: - k = types.String - } - typ = types.Typ[k] - } - return typ -} - -// logStack prints the formatted "start" message to stderr and -// returns a closure that prints the corresponding "end" message. -// Call using 'defer logStack(...)()' to show builder stack on panic. -// Don't forget trailing parens! -// -func logStack(format string, args ...interface{}) func() { - msg := fmt.Sprintf(format, args...) - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, "\n") - return func() { - io.WriteString(os.Stderr, msg) - io.WriteString(os.Stderr, " end\n") - } -} - -// newVar creates a 'var' for use in a types.Tuple. -func newVar(name string, typ types.Type) *types.Var { - return types.NewParam(token.NoPos, nil, name, typ) -} - -// anonVar creates an anonymous 'var' for use in a types.Tuple. -func anonVar(typ types.Type) *types.Var { - return newVar("", typ) -} - -var lenResults = types.NewTuple(anonVar(tInt)) - -// makeLen returns the len builtin specialized to type func(T)int. -func makeLen(T types.Type) *Builtin { - lenParams := types.NewTuple(anonVar(T)) - return &Builtin{ - name: "len", - sig: types.NewSignature(nil, lenParams, lenResults, false), - } -} diff --git a/vendor/honnef.co/go/tools/ssa/wrappers.go b/vendor/honnef.co/go/tools/ssa/wrappers.go deleted file mode 100644 index 6ca01ab..0000000 --- a/vendor/honnef.co/go/tools/ssa/wrappers.go +++ /dev/null @@ -1,296 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.5 - -package ssa - -// This file defines synthesis of Functions that delegate to declared -// methods; they come in three kinds: -// -// (1) wrappers: methods that wrap declared methods, performing -// implicit pointer indirections and embedded field selections. -// -// (2) thunks: funcs that wrap declared methods. Like wrappers, -// thunks perform indirections and field selections. The thunk's -// first parameter is used as the receiver for the method call. -// -// (3) bounds: funcs that wrap declared methods. The bound's sole -// free variable, supplied by a closure, is used as the receiver -// for the method call. No indirections or field selections are -// performed since they can be done before the call. - -import ( - "fmt" - - "go/types" -) - -// -- wrappers ----------------------------------------------------------- - -// makeWrapper returns a synthetic method that delegates to the -// declared method denoted by meth.Obj(), first performing any -// necessary pointer indirections or field selections implied by meth. -// -// The resulting method's receiver type is meth.Recv(). -// -// This function is versatile but quite subtle! Consider the -// following axes of variation when making changes: -// - optional receiver indirection -// - optional implicit field selections -// - meth.Obj() may denote a concrete or an interface method -// - the result may be a thunk or a wrapper. -// -// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -// -func makeWrapper(prog *Program, sel *types.Selection) *Function { - obj := sel.Obj().(*types.Func) // the declared function - sig := sel.Type().(*types.Signature) // type of this wrapper - - var recv *types.Var // wrapper's receiver or thunk's params[0] - name := obj.Name() - var description string - var start int // first regular param - if sel.Kind() == types.MethodExpr { - name += "$thunk" - description = "thunk" - recv = sig.Params().At(0) - start = 1 - } else { - description = "wrapper" - recv = sig.Recv() - } - - description = fmt.Sprintf("%s for %s", description, sel.Obj()) - if prog.mode&LogSource != 0 { - defer logStack("make %s to (%s)", description, recv.Type())() - } - fn := &Function{ - name: name, - method: sel, - object: obj, - Signature: sig, - Synthetic: description, - Prog: prog, - pos: obj.Pos(), - } - fn.startBody() - fn.addSpilledParam(recv) - createParams(fn, start) - - indices := sel.Index() - - var v Value = fn.Locals[0] // spilled receiver - if isPointer(sel.Recv()) { - v = emitLoad(fn, v) - - // For simple indirection wrappers, perform an informative nil-check: - // "value method (T).f called using nil *T pointer" - if len(indices) == 1 && !isPointer(recvType(obj)) { - var c Call - c.Call.Value = &Builtin{ - name: "ssa:wrapnilchk", - sig: types.NewSignature(nil, - types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), - types.NewTuple(anonVar(sel.Recv())), false), - } - c.Call.Args = []Value{ - v, - stringConst(deref(sel.Recv()).String()), - stringConst(sel.Obj().Name()), - } - c.setType(v.Type()) - v = fn.emit(&c) - } - } - - // Invariant: v is a pointer, either - // value of *A receiver param, or - // address of A spilled receiver. - - // We use pointer arithmetic (FieldAddr possibly followed by - // Load) in preference to value extraction (Field possibly - // preceded by Load). - - v = emitImplicitSelections(fn, v, indices[:len(indices)-1]) - - // Invariant: v is a pointer, either - // value of implicit *C field, or - // address of implicit C field. - - var c Call - if r := recvType(obj); !isInterface(r) { // concrete method - if !isPointer(r) { - v = emitLoad(fn, v) - } - c.Call.Value = prog.declaredFunc(obj) - c.Call.Args = append(c.Call.Args, v) - } else { - c.Call.Method = obj - c.Call.Value = emitLoad(fn, v) - } - for _, arg := range fn.Params[1:] { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c) - fn.finishBody() - return fn -} - -// createParams creates parameters for wrapper method fn based on its -// Signature.Params, which do not include the receiver. -// start is the index of the first regular parameter to use. -// -func createParams(fn *Function, start int) { - var last *Parameter - tparams := fn.Signature.Params() - for i, n := start, tparams.Len(); i < n; i++ { - last = fn.addParamObj(tparams.At(i)) - } - if fn.Signature.Variadic() { - last.typ = types.NewSlice(last.typ) - } -} - -// -- bounds ----------------------------------------------------------- - -// makeBound returns a bound method wrapper (or "bound"), a synthetic -// function that delegates to a concrete or interface method denoted -// by obj. The resulting function has no receiver, but has one free -// variable which will be used as the method's receiver in the -// tail-call. -// -// Use MakeClosure with such a wrapper to construct a bound method -// closure. e.g.: -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// var t T -// f := t.meth -// f() // calls t.meth() -// -// f is a closure of a synthetic wrapper defined as if by: -// -// f := func() { return t.meth() } -// -// Unlike makeWrapper, makeBound need perform no indirection or field -// selections because that can be done before the closure is -// constructed. -// -// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -// -func makeBound(prog *Program, obj *types.Func) *Function { - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - fn, ok := prog.bounds[obj] - if !ok { - description := fmt.Sprintf("bound method wrapper for %s", obj) - if prog.mode&LogSource != 0 { - defer logStack("%s", description)() - } - fn = &Function{ - name: obj.Name() + "$bound", - object: obj, - Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver - Synthetic: description, - Prog: prog, - pos: obj.Pos(), - } - - fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} - fn.FreeVars = []*FreeVar{fv} - fn.startBody() - createParams(fn, 0) - var c Call - - if !isInterface(recvType(obj)) { // concrete - c.Call.Value = prog.declaredFunc(obj) - c.Call.Args = []Value{fv} - } else { - c.Call.Value = fv - c.Call.Method = obj - } - for _, arg := range fn.Params { - c.Call.Args = append(c.Call.Args, arg) - } - emitTailCall(fn, &c) - fn.finishBody() - - prog.bounds[obj] = fn - } - return fn -} - -// -- thunks ----------------------------------------------------------- - -// makeThunk returns a thunk, a synthetic function that delegates to a -// concrete or interface method denoted by sel.Obj(). The resulting -// function has no receiver, but has an additional (first) regular -// parameter. -// -// Precondition: sel.Kind() == types.MethodExpr. -// -// type T int or: type T interface { meth() } -// func (t T) meth() -// f := T.meth -// var t T -// f(t) // calls t.meth() -// -// f is a synthetic wrapper defined as if by: -// -// f := func(t T) { return t.meth() } -// -// TODO(adonovan): opt: currently the stub is created even when used -// directly in a function call: C.f(i, 0). This is less efficient -// than inlining the stub. -// -// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -// -func makeThunk(prog *Program, sel *types.Selection) *Function { - if sel.Kind() != types.MethodExpr { - panic(sel) - } - - key := selectionKey{ - kind: sel.Kind(), - recv: sel.Recv(), - obj: sel.Obj(), - index: fmt.Sprint(sel.Index()), - indirect: sel.Indirect(), - } - - prog.methodsMu.Lock() - defer prog.methodsMu.Unlock() - - // Canonicalize key.recv to avoid constructing duplicate thunks. - canonRecv, ok := prog.canon.At(key.recv).(types.Type) - if !ok { - canonRecv = key.recv - prog.canon.Set(key.recv, canonRecv) - } - key.recv = canonRecv - - fn, ok := prog.thunks[key] - if !ok { - fn = makeWrapper(prog, sel) - if fn.Signature.Recv() != nil { - panic(fn) // unexpected receiver - } - prog.thunks[key] = fn - } - return fn -} - -func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { - return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) -} - -// selectionKey is like types.Selection but a usable map key. -type selectionKey struct { - kind types.SelectionKind - recv types.Type // canonicalized via Program.canon - obj types.Object - index string - indirect bool -} diff --git a/vendor/honnef.co/go/tools/ssa/write.go b/vendor/honnef.co/go/tools/ssa/write.go deleted file mode 100644 index 89761a1..0000000 --- a/vendor/honnef.co/go/tools/ssa/write.go +++ /dev/null @@ -1,5 +0,0 @@ -package ssa - -func NewJump(parent *BasicBlock) *Jump { - return &Jump{anInstruction{parent}} -} diff --git a/vendor/honnef.co/go/tools/staticcheck/buildtag.go b/vendor/honnef.co/go/tools/staticcheck/buildtag.go deleted file mode 100644 index 27c31c0..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/buildtag.go +++ /dev/null @@ -1,21 +0,0 @@ -package staticcheck - -import ( - "go/ast" - "strings" - - "honnef.co/go/tools/lint" -) - -func buildTags(f *ast.File) [][]string { - var out [][]string - for _, line := range strings.Split(lint.Preamble(f), "\n") { - if !strings.HasPrefix(line, "+build ") { - continue - } - line = strings.TrimSpace(strings.TrimPrefix(line, "+build ")) - fields := strings.Fields(line) - out = append(out, fields) - } - return out -} diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go deleted file mode 100644 index fdc395e..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/lint.go +++ /dev/null @@ -1,2738 +0,0 @@ -// Package staticcheck contains a linter for Go source code. -package staticcheck // import "honnef.co/go/tools/staticcheck" - -import ( - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - htmltemplate "html/template" - "net/http" - "regexp" - "sort" - "strconv" - "strings" - "sync" - texttemplate "text/template" - - "honnef.co/go/tools/deprecated" - "honnef.co/go/tools/functions" - "honnef.co/go/tools/internal/sharedcheck" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/staticcheck/vrp" - - "golang.org/x/tools/go/ast/astutil" -) - -func validRegexp(call *Call) { - arg := call.Args[0] - err := ValidateRegexp(arg.Value) - if err != nil { - arg.Invalid(err.Error()) - } -} - -type runeSlice []rune - -func (rs runeSlice) Len() int { return len(rs) } -func (rs runeSlice) Less(i int, j int) bool { return rs[i] < rs[j] } -func (rs runeSlice) Swap(i int, j int) { rs[i], rs[j] = rs[j], rs[i] } - -func utf8Cutset(call *Call) { - arg := call.Args[1] - if InvalidUTF8(arg.Value) { - arg.Invalid(MsgInvalidUTF8) - } -} - -func uniqueCutset(call *Call) { - arg := call.Args[1] - if !UniqueStringCutset(arg.Value) { - arg.Invalid(MsgNonUniqueCutset) - } -} - -func unmarshalPointer(name string, arg int) CallCheck { - return func(call *Call) { - if !Pointer(call.Args[arg].Value) { - call.Args[arg].Invalid(fmt.Sprintf("%s expects to unmarshal into a pointer, but the provided value is not a pointer", name)) - } - } -} - -func pointlessIntMath(call *Call) { - if ConvertedFromInt(call.Args[0].Value) { - call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", lint.CallName(call.Instr.Common()))) - } -} - -func checkValidHostPort(arg int) CallCheck { - return func(call *Call) { - if !ValidHostPort(call.Args[arg].Value) { - call.Args[arg].Invalid(MsgInvalidHostPort) - } - } -} - -var ( - checkRegexpRules = map[string]CallCheck{ - "regexp.MustCompile": validRegexp, - "regexp.Compile": validRegexp, - } - - checkTimeParseRules = map[string]CallCheck{ - "time.Parse": func(call *Call) { - arg := call.Args[0] - err := ValidateTimeLayout(arg.Value) - if err != nil { - arg.Invalid(err.Error()) - } - }, - } - - checkEncodingBinaryRules = map[string]CallCheck{ - "encoding/binary.Write": func(call *Call) { - arg := call.Args[2] - if !CanBinaryMarshal(call.Job, arg.Value) { - arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) - } - }, - } - - checkURLsRules = map[string]CallCheck{ - "net/url.Parse": func(call *Call) { - arg := call.Args[0] - err := ValidateURL(arg.Value) - if err != nil { - arg.Invalid(err.Error()) - } - }, - } - - checkSyncPoolValueRules = map[string]CallCheck{ - "(*sync.Pool).Put": func(call *Call) { - arg := call.Args[0] - typ := arg.Value.Value.Type() - if !lint.IsPointerLike(typ) { - arg.Invalid("argument should be pointer-like to avoid allocations") - } - }, - } - - checkRegexpFindAllRules = map[string]CallCheck{ - "(*regexp.Regexp).FindAll": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllIndex": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllString": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllStringIndex": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllStringSubmatch": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllStringSubmatchIndex": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllSubmatch": RepeatZeroTimes("a FindAll method", 1), - "(*regexp.Regexp).FindAllSubmatchIndex": RepeatZeroTimes("a FindAll method", 1), - } - - checkUTF8CutsetRules = map[string]CallCheck{ - "strings.IndexAny": utf8Cutset, - "strings.LastIndexAny": utf8Cutset, - "strings.ContainsAny": utf8Cutset, - "strings.Trim": utf8Cutset, - "strings.TrimLeft": utf8Cutset, - "strings.TrimRight": utf8Cutset, - } - - checkUniqueCutsetRules = map[string]CallCheck{ - "strings.Trim": uniqueCutset, - "strings.TrimLeft": uniqueCutset, - "strings.TrimRight": uniqueCutset, - } - - checkUnmarshalPointerRules = map[string]CallCheck{ - "encoding/xml.Unmarshal": unmarshalPointer("xml.Unmarshal", 1), - "(*encoding/xml.Decoder).Decode": unmarshalPointer("Decode", 0), - "encoding/json.Unmarshal": unmarshalPointer("json.Unmarshal", 1), - "(*encoding/json.Decoder).Decode": unmarshalPointer("Decode", 0), - } - - checkUnbufferedSignalChanRules = map[string]CallCheck{ - "os/signal.Notify": func(call *Call) { - arg := call.Args[0] - if UnbufferedChannel(arg.Value) { - arg.Invalid("the channel used with signal.Notify should be buffered") - } - }, - } - - checkMathIntRules = map[string]CallCheck{ - "math.Ceil": pointlessIntMath, - "math.Floor": pointlessIntMath, - "math.IsNaN": pointlessIntMath, - "math.Trunc": pointlessIntMath, - "math.IsInf": pointlessIntMath, - } - - checkStringsReplaceZeroRules = map[string]CallCheck{ - "strings.Replace": RepeatZeroTimes("strings.Replace", 3), - "bytes.Replace": RepeatZeroTimes("bytes.Replace", 3), - } - - checkListenAddressRules = map[string]CallCheck{ - "net/http.ListenAndServe": checkValidHostPort(0), - "net/http.ListenAndServeTLS": checkValidHostPort(0), - } - - checkBytesEqualIPRules = map[string]CallCheck{ - "bytes.Equal": func(call *Call) { - if ConvertedFrom(call.Args[0].Value, "net.IP") && ConvertedFrom(call.Args[1].Value, "net.IP") { - call.Invalid("use net.IP.Equal to compare net.IPs, not bytes.Equal") - } - }, - } - - checkRegexpMatchLoopRules = map[string]CallCheck{ - "regexp.Match": loopedRegexp("regexp.Match"), - "regexp.MatchReader": loopedRegexp("regexp.MatchReader"), - "regexp.MatchString": loopedRegexp("regexp.MatchString"), - } -) - -type Checker struct { - CheckGenerated bool - funcDescs *functions.Descriptions - deprecatedObjs map[types.Object]string - nodeFns map[ast.Node]*ssa.Function -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "staticcheck" } -func (*Checker) Prefix() string { return "SA" } - -func (c *Checker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "SA1000": c.callChecker(checkRegexpRules), - "SA1001": c.CheckTemplate, - "SA1002": c.callChecker(checkTimeParseRules), - "SA1003": c.callChecker(checkEncodingBinaryRules), - "SA1004": c.CheckTimeSleepConstant, - "SA1005": c.CheckExec, - "SA1006": c.CheckUnsafePrintf, - "SA1007": c.callChecker(checkURLsRules), - "SA1008": c.CheckCanonicalHeaderKey, - "SA1009": nil, - "SA1010": c.callChecker(checkRegexpFindAllRules), - "SA1011": c.callChecker(checkUTF8CutsetRules), - "SA1012": c.CheckNilContext, - "SA1013": c.CheckSeeker, - "SA1014": c.callChecker(checkUnmarshalPointerRules), - "SA1015": c.CheckLeakyTimeTick, - "SA1016": c.CheckUntrappableSignal, - "SA1017": c.callChecker(checkUnbufferedSignalChanRules), - "SA1018": c.callChecker(checkStringsReplaceZeroRules), - "SA1019": c.CheckDeprecated, - "SA1020": c.callChecker(checkListenAddressRules), - "SA1021": c.callChecker(checkBytesEqualIPRules), - "SA1022": nil, - "SA1023": c.CheckWriterBufferModified, - "SA1024": c.callChecker(checkUniqueCutsetRules), - - "SA2000": c.CheckWaitgroupAdd, - "SA2001": c.CheckEmptyCriticalSection, - "SA2002": c.CheckConcurrentTesting, - "SA2003": c.CheckDeferLock, - - "SA3000": c.CheckTestMainExit, - "SA3001": c.CheckBenchmarkN, - - "SA4000": c.CheckLhsRhsIdentical, - "SA4001": c.CheckIneffectiveCopy, - "SA4002": c.CheckDiffSizeComparison, - "SA4003": c.CheckUnsignedComparison, - "SA4004": c.CheckIneffectiveLoop, - "SA4005": nil, - "SA4006": c.CheckUnreadVariableValues, - // "SA4007": c.CheckPredeterminedBooleanExprs, - "SA4007": nil, - "SA4008": c.CheckLoopCondition, - "SA4009": c.CheckArgOverwritten, - "SA4010": c.CheckIneffectiveAppend, - "SA4011": c.CheckScopedBreak, - "SA4012": c.CheckNaNComparison, - "SA4013": c.CheckDoubleNegation, - "SA4014": c.CheckRepeatedIfElse, - "SA4015": c.callChecker(checkMathIntRules), - "SA4016": c.CheckSillyBitwiseOps, - "SA4017": c.CheckPureFunctions, - "SA4018": c.CheckSelfAssignment, - "SA4019": c.CheckDuplicateBuildConstraints, - - "SA5000": c.CheckNilMaps, - "SA5001": c.CheckEarlyDefer, - "SA5002": c.CheckInfiniteEmptyLoop, - "SA5003": c.CheckDeferInInfiniteLoop, - "SA5004": c.CheckLoopEmptyDefault, - "SA5005": c.CheckCyclicFinalizer, - // "SA5006": c.CheckSliceOutOfBounds, - "SA5007": c.CheckInfiniteRecursion, - - "SA6000": c.callChecker(checkRegexpMatchLoopRules), - "SA6001": c.CheckMapBytesKey, - "SA6002": c.callChecker(checkSyncPoolValueRules), - "SA6003": c.CheckRangeStringRunes, - "SA6004": nil, - - "SA9000": nil, - "SA9001": c.CheckDubiousDeferInChannelRangeLoop, - "SA9002": c.CheckNonOctalFileMode, - "SA9003": c.CheckEmptyBranch, - } -} - -func (c *Checker) filterGenerated(files []*ast.File) []*ast.File { - if c.CheckGenerated { - return files - } - var out []*ast.File - for _, f := range files { - if !lint.IsGenerated(f) { - out = append(out, f) - } - } - return out -} - -func (c *Checker) deprecateObject(m map[types.Object]string, prog *lint.Program, obj types.Object) { - if obj.Pkg() == nil { - return - } - - f := prog.File(obj) - if f == nil { - return - } - msg := c.deprecationMessage(f, prog.Prog.Fset, obj) - if msg != "" { - m[obj] = msg - } -} - -func (c *Checker) Init(prog *lint.Program) { - wg := &sync.WaitGroup{} - wg.Add(3) - go func() { - c.funcDescs = functions.NewDescriptions(prog.SSA) - for _, fn := range prog.AllFunctions { - if fn.Blocks != nil { - applyStdlibKnowledge(fn) - ssa.OptimizeBlocks(fn) - } - } - wg.Done() - }() - - go func() { - c.nodeFns = lint.NodeFns(prog.Packages) - wg.Done() - }() - - go func() { - c.deprecatedObjs = map[types.Object]string{} - for _, ssapkg := range prog.SSA.AllPackages() { - ssapkg := ssapkg - for _, member := range ssapkg.Members { - obj := member.Object() - if obj == nil { - continue - } - c.deprecateObject(c.deprecatedObjs, prog, obj) - if typ, ok := obj.Type().(*types.Named); ok { - for i := 0; i < typ.NumMethods(); i++ { - meth := typ.Method(i) - c.deprecateObject(c.deprecatedObjs, prog, meth) - } - - if iface, ok := typ.Underlying().(*types.Interface); ok { - for i := 0; i < iface.NumExplicitMethods(); i++ { - meth := iface.ExplicitMethod(i) - c.deprecateObject(c.deprecatedObjs, prog, meth) - } - } - } - if typ, ok := obj.Type().Underlying().(*types.Struct); ok { - n := typ.NumFields() - for i := 0; i < n; i++ { - // FIXME(dh): This code will not find deprecated - // fields in anonymous structs. - field := typ.Field(i) - c.deprecateObject(c.deprecatedObjs, prog, field) - } - } - } - } - wg.Done() - }() - - wg.Wait() -} - -func (c *Checker) deprecationMessage(file *ast.File, fset *token.FileSet, obj types.Object) (message string) { - pos := obj.Pos() - path, _ := astutil.PathEnclosingInterval(file, pos, pos) - if len(path) <= 2 { - return "" - } - var docs []*ast.CommentGroup - switch n := path[1].(type) { - case *ast.FuncDecl: - docs = append(docs, n.Doc) - case *ast.Field: - docs = append(docs, n.Doc) - case *ast.ValueSpec: - docs = append(docs, n.Doc) - if len(path) >= 3 { - if n, ok := path[2].(*ast.GenDecl); ok { - docs = append(docs, n.Doc) - } - } - case *ast.TypeSpec: - docs = append(docs, n.Doc) - if len(path) >= 3 { - if n, ok := path[2].(*ast.GenDecl); ok { - docs = append(docs, n.Doc) - } - } - default: - return "" - } - - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - last := parts[len(parts)-1] - if !strings.HasPrefix(last, "Deprecated: ") { - continue - } - alt := last[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return alt - } - return "" -} - -func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { - sets := c.funcDescs.Get(b.Parent()).Loops - for _, set := range sets { - if set[b] { - return true - } - } - return false -} - -func applyStdlibKnowledge(fn *ssa.Function) { - if len(fn.Blocks) == 0 { - return - } - - // comma-ok receiving from a time.Tick channel will never return - // ok == false, so any branching on the value of ok can be - // replaced with an unconditional jump. This will primarily match - // `for range time.Tick(x)` loops, but it can also match - // user-written code. - for _, block := range fn.Blocks { - if len(block.Instrs) < 3 { - continue - } - if len(block.Succs) != 2 { - continue - } - var instrs []*ssa.Instruction - for i, ins := range block.Instrs { - if _, ok := ins.(*ssa.DebugRef); ok { - continue - } - instrs = append(instrs, &block.Instrs[i]) - } - - for i, ins := range instrs { - unop, ok := (*ins).(*ssa.UnOp) - if !ok || unop.Op != token.ARROW { - continue - } - call, ok := unop.X.(*ssa.Call) - if !ok { - continue - } - if !lint.IsCallTo(call.Common(), "time.Tick") { - continue - } - ex, ok := (*instrs[i+1]).(*ssa.Extract) - if !ok || ex.Tuple != unop || ex.Index != 1 { - continue - } - - ifstmt, ok := (*instrs[i+2]).(*ssa.If) - if !ok || ifstmt.Cond != ex { - continue - } - - *instrs[i+2] = ssa.NewJump(block) - succ := block.Succs[1] - block.Succs = block.Succs[0:1] - succ.RemovePred(block) - } - } -} - -func hasType(j *lint.Job, expr ast.Expr, name string) bool { - return types.TypeString(j.Program.Info.TypeOf(expr), nil) == name -} - -func (c *Checker) CheckUntrappableSignal(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAnyAST(call, - "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { - return true - } - for _, arg := range call.Args { - if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { - arg = conv.Args[0] - } - - if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") { - j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", j.Render(arg)) - } - if isName(j, arg, "syscall.SIGSTOP") { - j.Errorf(arg, "%s signal cannot be trapped", j.Render(arg)) - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckTemplate(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - var kind string - if j.IsCallToAST(call, "(*text/template.Template).Parse") { - kind = "text" - } else if j.IsCallToAST(call, "(*html/template.Template).Parse") { - kind = "html" - } else { - return true - } - sel := call.Fun.(*ast.SelectorExpr) - if !j.IsCallToAST(sel.X, "text/template.New") && - !j.IsCallToAST(sel.X, "html/template.New") { - // TODO(dh): this is a cheap workaround for templates with - // different delims. A better solution with less false - // negatives would use data flow analysis to see where the - // template comes from and where it has been - return true - } - s, ok := j.ExprToString(call.Args[0]) - if !ok { - return true - } - var err error - switch kind { - case "text": - _, err = texttemplate.New("").Parse(s) - case "html": - _, err = htmltemplate.New("").Parse(s) - } - if err != nil { - // TODO(dominikh): whitelist other parse errors, if any - if strings.Contains(err.Error(), "unexpected") { - j.Errorf(call.Args[0], "%s", err) - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "time.Sleep") { - return true - } - lit, ok := call.Args[0].(*ast.BasicLit) - if !ok { - return true - } - n, err := strconv.Atoi(lit.Value) - if err != nil { - return true - } - if n == 0 || n > 120 { - // time.Sleep(0) is a seldomly used pattern in concurrency - // tests. >120 might be intentional. 120 was chosen - // because the user could've meant 2 minutes. - return true - } - recommendation := "time.Sleep(time.Nanosecond)" - if n != 1 { - recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) - } - j.Errorf(call.Args[0], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { - fn := func(node ast.Node) bool { - g, ok := node.(*ast.GoStmt) - if !ok { - return true - } - fun, ok := g.Call.Fun.(*ast.FuncLit) - if !ok { - return true - } - if len(fun.Body.List) == 0 { - return true - } - stmt, ok := fun.Body.List[0].(*ast.ExprStmt) - if !ok { - return true - } - call, ok := stmt.X.(*ast.CallExpr) - if !ok { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) - if !ok { - return true - } - if fn.FullName() == "(*sync.WaitGroup).Add" { - j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", - j.Render(stmt)) - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 0 || loop.Post != nil { - return true - } - - if loop.Init != nil { - // TODO(dh): this isn't strictly necessary, it just makes - // the check easier. - return true - } - // An empty loop is bad news in two cases: 1) The loop has no - // condition. In that case, it's just a loop that spins - // forever and as fast as it can, keeping a core busy. 2) The - // loop condition only consists of variable or field reads and - // operators on those. The only way those could change their - // value is with unsynchronised access, which constitutes a - // data race. - // - // If the condition contains any function calls, its behaviour - // is dynamic and the loop might terminate. Similarly for - // channel receives. - - if loop.Cond != nil && hasSideEffects(loop.Cond) { - return true - } - - j.Errorf(loop, "this loop will spin, using 100%% CPU") - if loop.Cond != nil { - j.Errorf(loop, "loop condition never changes or has a race condition") - } - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - mightExit := false - var defers []ast.Stmt - loop, ok := node.(*ast.ForStmt) - if !ok || loop.Cond != nil { - return true - } - fn2 := func(node ast.Node) bool { - switch stmt := node.(type) { - case *ast.ReturnStmt: - mightExit = true - case *ast.BranchStmt: - // TODO(dominikh): if this sees a break in a switch or - // select, it doesn't check if it breaks the loop or - // just the select/switch. This causes some false - // negatives. - if stmt.Tok == token.BREAK { - mightExit = true - } - case *ast.DeferStmt: - defers = append(defers, stmt) - case *ast.FuncLit: - // Don't look into function bodies - return false - } - return true - } - ast.Inspect(loop.Body, fn2) - if mightExit { - return true - } - for _, stmt := range defers { - j.Errorf(stmt, "defers in this infinite loop will never run") - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } - typ := j.Program.Info.TypeOf(loop.X) - _, ok = typ.Underlying().(*types.Chan) - if !ok { - return true - } - fn2 := func(node ast.Node) bool { - switch stmt := node.(type) { - case *ast.DeferStmt: - j.Errorf(stmt, "defers in this range loop won't run unless the channel gets closed") - case *ast.FuncLit: - // Don't look into function bodies - return false - } - return true - } - ast.Inspect(loop.Body, fn2) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckTestMainExit(j *lint.Job) { - fn := func(node ast.Node) bool { - if !isTestMain(j, node) { - return true - } - - arg := j.Program.Info.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) - callsRun := false - fn2 := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - ident, ok := sel.X.(*ast.Ident) - if !ok { - return true - } - if arg != j.Program.Info.ObjectOf(ident) { - return true - } - if sel.Sel.Name == "Run" { - callsRun = true - return false - } - return true - } - ast.Inspect(node.(*ast.FuncDecl).Body, fn2) - - callsExit := false - fn3 := func(node ast.Node) bool { - if j.IsCallToAST(node, "os.Exit") { - callsExit = true - return false - } - return true - } - ast.Inspect(node.(*ast.FuncDecl).Body, fn3) - if !callsExit && callsRun { - j.Errorf(node, "TestMain should call os.Exit to set exit code") - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func isTestMain(j *lint.Job, node ast.Node) bool { - decl, ok := node.(*ast.FuncDecl) - if !ok { - return false - } - if decl.Name.Name != "TestMain" { - return false - } - if len(decl.Type.Params.List) != 1 { - return false - } - arg := decl.Type.Params.List[0] - if len(arg.Names) != 1 { - return false - } - typ := j.Program.Info.TypeOf(arg.Type) - return typ != nil && typ.String() == "*testing.M" -} - -func (c *Checker) CheckExec(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAST(call, "os/exec.Command") { - return true - } - val, ok := j.ExprToString(call.Args[0]) - if !ok { - return true - } - if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { - return true - } - j.Errorf(call.Args[0], "first argument to exec.Command looks like a shell command, but a program name or path are expected") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { - return true - } - sel, ok := loop.Body.List[0].(*ast.SelectStmt) - if !ok { - return true - } - for _, c := range sel.Body.List { - if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { - j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { - fn := func(node ast.Node) bool { - op, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - switch op.Op { - case token.EQL, token.NEQ: - if basic, ok := j.Program.Info.TypeOf(op.X).(*types.Basic); ok { - if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { - // f == f and f != f might be used to check for NaN - return true - } - } - case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT, - token.LAND, token.LOR, token.LSS, token.GTR, token.LEQ, token.GEQ: - default: - // For some ops, such as + and *, it can make sense to - // have identical operands - return true - } - - if j.Render(op.X) != j.Render(op.Y) { - return true - } - j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckScopedBreak(j *lint.Job) { - fn := func(node ast.Node) bool { - var body *ast.BlockStmt - switch node := node.(type) { - case *ast.ForStmt: - body = node.Body - case *ast.RangeStmt: - body = node.Body - default: - return true - } - for _, stmt := range body.List { - var blocks [][]ast.Stmt - switch stmt := stmt.(type) { - case *ast.SwitchStmt: - for _, c := range stmt.Body.List { - blocks = append(blocks, c.(*ast.CaseClause).Body) - } - case *ast.SelectStmt: - for _, c := range stmt.Body.List { - blocks = append(blocks, c.(*ast.CommClause).Body) - } - default: - continue - } - - for _, body := range blocks { - if len(body) == 0 { - continue - } - lasts := []ast.Stmt{body[len(body)-1]} - // TODO(dh): unfold all levels of nested block - // statements, not just a single level if statement - if ifs, ok := lasts[0].(*ast.IfStmt); ok { - if len(ifs.Body.List) == 0 { - continue - } - lasts[0] = ifs.Body.List[len(ifs.Body.List)-1] - - if block, ok := ifs.Else.(*ast.BlockStmt); ok { - if len(block.List) != 0 { - lasts = append(lasts, block.List[len(block.List)-1]) - } - } - } - for _, last := range lasts { - branch, ok := last.(*ast.BranchStmt) - if !ok || branch.Tok != token.BREAK || branch.Label != nil { - continue - } - j.Errorf(branch, "ineffective break statement. Did you mean to break out of the outer loop?") - } - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckUnsafePrintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if !j.IsCallToAnyAST(call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { - return true - } - if len(call.Args) != 1 { - return true - } - switch call.Args[0].(type) { - case *ast.CallExpr, *ast.Ident: - default: - return true - } - j.Errorf(call.Args[0], "printf-style function with dynamic first argument and no further arguments should use print-style function instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckEarlyDefer(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } - if len(block.List) < 2 { - return true - } - for i, stmt := range block.List { - if i == len(block.List)-1 { - break - } - assign, ok := stmt.(*ast.AssignStmt) - if !ok { - continue - } - if len(assign.Rhs) != 1 { - continue - } - if len(assign.Lhs) < 2 { - continue - } - if lhs, ok := assign.Lhs[len(assign.Lhs)-1].(*ast.Ident); ok && lhs.Name == "_" { - continue - } - call, ok := assign.Rhs[0].(*ast.CallExpr) - if !ok { - continue - } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) - if !ok { - continue - } - if sig.Results().Len() < 2 { - continue - } - last := sig.Results().At(sig.Results().Len() - 1) - // FIXME(dh): check that it's error from universe, not - // another type of the same name - if last.Type().String() != "error" { - continue - } - lhs, ok := assign.Lhs[0].(*ast.Ident) - if !ok { - continue - } - def, ok := block.List[i+1].(*ast.DeferStmt) - if !ok { - continue - } - sel, ok := def.Call.Fun.(*ast.SelectorExpr) - if !ok { - continue - } - ident, ok := selectorX(sel).(*ast.Ident) - if !ok { - continue - } - if ident.Obj != lhs.Obj { - continue - } - if sel.Sel.Name != "Close" { - continue - } - j.Errorf(def, "should check returned error before deferring %s", j.Render(def.Call)) - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func selectorX(sel *ast.SelectorExpr) ast.Node { - switch x := sel.X.(type) { - case *ast.SelectorExpr: - return selectorX(x) - default: - return x - } -} - -func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { - // Initially it might seem like this check would be easier to - // implement in SSA. After all, we're only checking for two - // consecutive method calls. In reality, however, there may be any - // number of other instructions between the lock and unlock, while - // still constituting an empty critical section. For example, - // given `m.x().Lock(); m.x().Unlock()`, there will be a call to - // x(). In the AST-based approach, this has a tiny potential for a - // false positive (the second call to x might be doing work that - // is protected by the mutex). In an SSA-based approach, however, - // it would miss a lot of real bugs. - - mutexParams := func(s ast.Stmt) (x ast.Expr, funcName string, ok bool) { - expr, ok := s.(*ast.ExprStmt) - if !ok { - return nil, "", false - } - call, ok := expr.X.(*ast.CallExpr) - if !ok { - return nil, "", false - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return nil, "", false - } - - fn, ok := j.Program.Info.ObjectOf(sel.Sel).(*types.Func) - if !ok { - return nil, "", false - } - sig := fn.Type().(*types.Signature) - if sig.Params().Len() != 0 || sig.Results().Len() != 0 { - return nil, "", false - } - - return sel.X, fn.Name(), true - } - - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } - if len(block.List) < 2 { - return true - } - for i := range block.List[:len(block.List)-1] { - sel1, method1, ok1 := mutexParams(block.List[i]) - sel2, method2, ok2 := mutexParams(block.List[i+1]) - - if !ok1 || !ok2 || j.Render(sel1) != j.Render(sel2) { - continue - } - if (method1 == "Lock" && method2 == "Unlock") || - (method1 == "RLock" && method2 == "RUnlock") { - j.Errorf(block.List[i+1], "empty critical section") - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -// cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't -// want to flag. -var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) - -func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { - fn := func(node ast.Node) bool { - if unary, ok := node.(*ast.UnaryExpr); ok { - if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { - ident, ok := star.X.(*ast.Ident) - if !ok || !cgoIdent.MatchString(ident.Name) { - j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") - } - } - } - - if star, ok := node.(*ast.StarExpr); ok { - if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { - j.Errorf(star, "*&x will be simplified to x. It will not copy x.") - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, b := range ssafn.Blocks { - for _, ins := range b.Instrs { - binop, ok := ins.(*ssa.BinOp) - if !ok { - continue - } - if binop.Op != token.EQL && binop.Op != token.NEQ { - continue - } - _, ok1 := binop.X.(*ssa.Slice) - _, ok2 := binop.Y.(*ssa.Slice) - if !ok1 && !ok2 { - continue - } - r := c.funcDescs.Get(ssafn).Ranges - r1, ok1 := r.Get(binop.X).(vrp.StringInterval) - r2, ok2 := r.Get(binop.Y).(vrp.StringInterval) - if !ok1 || !ok2 { - continue - } - if r1.Length.Intersection(r2.Length).Empty() { - j.Errorf(binop, "comparing strings of different sizes for equality will always return false") - } - } - } - } -} - -func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if ok { - // TODO(dh): This risks missing some Header reads, for - // example in `h1["foo"] = h2["foo"]` – these edge - // cases are probably rare enough to ignore for now. - for _, expr := range assign.Lhs { - op, ok := expr.(*ast.IndexExpr) - if !ok { - continue - } - if hasType(j, op.X, "net/http.Header") { - return false - } - } - return true - } - op, ok := node.(*ast.IndexExpr) - if !ok { - return true - } - if !hasType(j, op.X, "net/http.Header") { - return true - } - s, ok := j.ExprToString(op.Index) - if !ok { - return true - } - if s == http.CanonicalHeaderKey(s) { - return true - } - j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckBenchmarkN(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true - } - sel, ok := assign.Lhs[0].(*ast.SelectorExpr) - if !ok { - return true - } - if sel.Sel.Name != "N" { - return true - } - if !hasType(j, sel.X, "*testing.B") { - return true - } - j.Errorf(assign, "should not assign to %s", j.Render(sel)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - fn := func(node ast.Node) bool { - switch node.(type) { - case *ast.FuncDecl, *ast.FuncLit: - default: - return true - } - - ssafn := c.nodeFns[node] - if ssafn == nil { - return true - } - if lint.IsExample(ssafn) { - return true - } - ast.Inspect(node, func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } - if len(assign.Lhs) > 1 && len(assign.Rhs) == 1 { - // Either a function call with multiple return values, - // or a comma-ok assignment - - val, _ := ssafn.ValueForExpr(assign.Rhs[0]) - if val == nil { - return true - } - refs := val.Referrers() - if refs == nil { - return true - } - for _, ref := range *refs { - ex, ok := ref.(*ssa.Extract) - if !ok { - continue - } - exrefs := ex.Referrers() - if exrefs == nil { - continue - } - if len(lint.FilterDebug(*exrefs)) == 0 { - lhs := assign.Lhs[ex.Index] - if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { - continue - } - j.Errorf(lhs, "this value of %s is never used", lhs) - } - } - return true - } - for i, lhs := range assign.Lhs { - rhs := assign.Rhs[i] - if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { - continue - } - val, _ := ssafn.ValueForExpr(rhs) - if val == nil { - continue - } - - refs := val.Referrers() - if refs == nil { - // TODO investigate why refs can be nil - return true - } - if len(lint.FilterDebug(*refs)) == 0 { - j.Errorf(lhs, "this value of %s is never used", lhs) - } - } - return true - }) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - ssabinop, ok := ins.(*ssa.BinOp) - if !ok { - continue - } - switch ssabinop.Op { - case token.GTR, token.LSS, token.EQL, token.NEQ, token.LEQ, token.GEQ: - default: - continue - } - - xs, ok1 := consts(ssabinop.X, nil, nil) - ys, ok2 := consts(ssabinop.Y, nil, nil) - if !ok1 || !ok2 || len(xs) == 0 || len(ys) == 0 { - continue - } - - trues := 0 - for _, x := range xs { - for _, y := range ys { - if x.Value == nil { - if y.Value == nil { - trues++ - } - continue - } - if constant.Compare(x.Value, ssabinop.Op, y.Value) { - trues++ - } - } - } - b := trues != 0 - if trues == 0 || trues == len(xs)*len(ys) { - j.Errorf(ssabinop, "binary expression is always %t for all possible values (%s %s %s)", - b, xs, ssabinop.Op, ys) - } - } - } - } -} - -func (c *Checker) CheckNilMaps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - mu, ok := ins.(*ssa.MapUpdate) - if !ok { - continue - } - c, ok := mu.Map.(*ssa.Const) - if !ok { - continue - } - if c.Value != nil { - continue - } - j.Errorf(mu, "assignment to nil map") - } - } - } -} - -func (c *Checker) CheckUnsignedComparison(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } - tx := j.Program.Info.TypeOf(expr.X) - basic, ok := tx.Underlying().(*types.Basic) - if !ok { - return true - } - if (basic.Info() & types.IsUnsigned) == 0 { - return true - } - lit, ok := expr.Y.(*ast.BasicLit) - if !ok || lit.Value != "0" { - return true - } - switch expr.Op { - case token.GEQ: - j.Errorf(expr, "unsigned values are always >= 0") - case token.LSS: - j.Errorf(expr, "unsigned values are never < 0") - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { - if visitedPhis == nil { - visitedPhis = map[string]bool{} - } - var ok bool - switch val := val.(type) { - case *ssa.Phi: - if visitedPhis[val.Name()] { - break - } - visitedPhis[val.Name()] = true - vals := val.Operands(nil) - for _, phival := range vals { - out, ok = consts(*phival, out, visitedPhis) - if !ok { - return nil, false - } - } - case *ssa.Const: - out = append(out, val) - case *ssa.Convert: - out, ok = consts(val.X, out, visitedPhis) - if !ok { - return nil, false - } - default: - return nil, false - } - if len(out) < 2 { - return out, true - } - uniq := []*ssa.Const{out[0]} - for _, val := range out[1:] { - if val.Value == uniq[len(uniq)-1].Value { - continue - } - uniq = append(uniq, val) - } - return uniq, true -} - -func (c *Checker) CheckLoopCondition(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok { - return true - } - if loop.Init == nil || loop.Cond == nil || loop.Post == nil { - return true - } - init, ok := loop.Init.(*ast.AssignStmt) - if !ok || len(init.Lhs) != 1 || len(init.Rhs) != 1 { - return true - } - cond, ok := loop.Cond.(*ast.BinaryExpr) - if !ok { - return true - } - x, ok := cond.X.(*ast.Ident) - if !ok { - return true - } - lhs, ok := init.Lhs[0].(*ast.Ident) - if !ok { - return true - } - if x.Obj != lhs.Obj { - return true - } - if _, ok := loop.Post.(*ast.IncDecStmt); !ok { - return true - } - - ssafn := c.nodeFns[cond] - if ssafn == nil { - return true - } - v, isAddr := ssafn.ValueForExpr(cond.X) - if v == nil || isAddr { - return true - } - switch v := v.(type) { - case *ssa.Phi: - ops := v.Operands(nil) - if len(ops) != 2 { - return true - } - _, ok := (*ops[0]).(*ssa.Const) - if !ok { - return true - } - sigma, ok := (*ops[1]).(*ssa.Sigma) - if !ok { - return true - } - if sigma.X != v { - return true - } - case *ssa.UnOp: - return true - } - j.Errorf(cond, "variable in loop condition never changes") - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckArgOverwritten(j *lint.Job) { - fn := func(node ast.Node) bool { - var typ *ast.FuncType - var body *ast.BlockStmt - switch fn := node.(type) { - case *ast.FuncDecl: - typ = fn.Type - body = fn.Body - case *ast.FuncLit: - typ = fn.Type - body = fn.Body - } - if body == nil { - return true - } - ssafn := c.nodeFns[node] - if ssafn == nil { - return true - } - if len(typ.Params.List) == 0 { - return true - } - for _, field := range typ.Params.List { - for _, arg := range field.Names { - obj := j.Program.Info.ObjectOf(arg) - var ssaobj *ssa.Parameter - for _, param := range ssafn.Params { - if param.Object() == obj { - ssaobj = param - break - } - } - if ssaobj == nil { - continue - } - refs := ssaobj.Referrers() - if refs == nil { - continue - } - if len(lint.FilterDebug(*refs)) != 0 { - continue - } - - assigned := false - ast.Inspect(body, func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } - for _, lhs := range assign.Lhs { - ident, ok := lhs.(*ast.Ident) - if !ok { - continue - } - if j.Program.Info.ObjectOf(ident) == obj { - assigned = true - return false - } - } - return true - }) - if assigned { - j.Errorf(arg, "argument %s is overwritten before first use", arg) - } - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { - // This check detects some, but not all unconditional loop exits. - // We give up in the following cases: - // - // - a goto anywhere in the loop. The goto might skip over our - // return, and we don't check that it doesn't. - // - // - any nested, unlabelled continue, even if it is in another - // loop or closure. - fn := func(node ast.Node) bool { - var body *ast.BlockStmt - switch fn := node.(type) { - case *ast.FuncDecl: - body = fn.Body - case *ast.FuncLit: - body = fn.Body - default: - return true - } - if body == nil { - return true - } - labels := map[*ast.Object]ast.Stmt{} - ast.Inspect(body, func(node ast.Node) bool { - label, ok := node.(*ast.LabeledStmt) - if !ok { - return true - } - labels[label.Label.Obj] = label.Stmt - return true - }) - - ast.Inspect(body, func(node ast.Node) bool { - var loop ast.Node - var body *ast.BlockStmt - switch node := node.(type) { - case *ast.ForStmt: - body = node.Body - loop = node - case *ast.RangeStmt: - typ := j.Program.Info.TypeOf(node.X) - if _, ok := typ.Underlying().(*types.Map); ok { - // looping once over a map is a valid pattern for - // getting an arbitrary element. - return true - } - body = node.Body - loop = node - default: - return true - } - if len(body.List) < 2 { - // avoid flagging the somewhat common pattern of using - // a range loop to get the first element in a slice, - // or the first rune in a string. - return true - } - var unconditionalExit ast.Node - hasBranching := false - for _, stmt := range body.List { - switch stmt := stmt.(type) { - case *ast.BranchStmt: - switch stmt.Tok { - case token.BREAK: - if stmt.Label == nil || labels[stmt.Label.Obj] == loop { - unconditionalExit = stmt - } - case token.CONTINUE: - if stmt.Label == nil || labels[stmt.Label.Obj] == loop { - unconditionalExit = nil - return false - } - } - case *ast.ReturnStmt: - unconditionalExit = stmt - case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.SelectStmt: - hasBranching = true - } - } - if unconditionalExit == nil || !hasBranching { - return false - } - ast.Inspect(body, func(node ast.Node) bool { - if branch, ok := node.(*ast.BranchStmt); ok { - - switch branch.Tok { - case token.GOTO: - unconditionalExit = nil - return false - case token.CONTINUE: - if branch.Label != nil && labels[branch.Label.Obj] != loop { - return true - } - unconditionalExit = nil - return false - } - } - return true - }) - if unconditionalExit != nil { - j.Errorf(unconditionalExit, "the surrounding loop is unconditionally terminated") - } - return true - }) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckNilContext(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - if len(call.Args) == 0 { - return true - } - if typ, ok := j.Program.Info.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { - return true - } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) - if !ok { - return true - } - if sig.Params().Len() == 0 { - return true - } - if types.TypeString(sig.Params().At(0).Type(), nil) != "context.Context" { - return true - } - j.Errorf(call.Args[0], - "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckSeeker(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if sel.Sel.Name != "Seek" { - return true - } - if len(call.Args) != 2 { - return true - } - arg0, ok := call.Args[0].(*ast.SelectorExpr) - if !ok { - return true - } - switch arg0.Sel.Name { - case "SeekStart", "SeekCurrent", "SeekEnd": - default: - return true - } - pkg, ok := arg0.X.(*ast.Ident) - if !ok { - return true - } - if pkg.Name != "io" { - return true - } - j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { - isAppend := func(ins ssa.Value) bool { - call, ok := ins.(*ssa.Call) - if !ok { - return false - } - if call.Call.IsInvoke() { - return false - } - if builtin, ok := call.Call.Value.(*ssa.Builtin); !ok || builtin.Name() != "append" { - return false - } - return true - } - - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - val, ok := ins.(ssa.Value) - if !ok || !isAppend(val) { - continue - } - - isUsed := false - visited := map[ssa.Instruction]bool{} - var walkRefs func(refs []ssa.Instruction) - walkRefs = func(refs []ssa.Instruction) { - loop: - for _, ref := range refs { - if visited[ref] { - continue - } - visited[ref] = true - if _, ok := ref.(*ssa.DebugRef); ok { - continue - } - switch ref := ref.(type) { - case *ssa.Phi: - walkRefs(*ref.Referrers()) - case *ssa.Sigma: - walkRefs(*ref.Referrers()) - case ssa.Value: - if !isAppend(ref) { - isUsed = true - } else { - walkRefs(*ref.Referrers()) - } - case ssa.Instruction: - isUsed = true - break loop - } - } - } - refs := val.Referrers() - if refs == nil { - continue - } - walkRefs(*refs) - if !isUsed { - j.Errorf(ins, "this result of append is never used, except maybe in other appends") - } - } - } - } -} - -func (c *Checker) CheckConcurrentTesting(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - gostmt, ok := ins.(*ssa.Go) - if !ok { - continue - } - var fn *ssa.Function - switch val := gostmt.Call.Value.(type) { - case *ssa.Function: - fn = val - case *ssa.MakeClosure: - fn = val.Fn.(*ssa.Function) - default: - continue - } - if fn.Blocks == nil { - continue - } - for _, block := range fn.Blocks { - for _, ins := range block.Instrs { - call, ok := ins.(*ssa.Call) - if !ok { - continue - } - if call.Call.IsInvoke() { - continue - } - callee := call.Call.StaticCallee() - if callee == nil { - continue - } - recv := callee.Signature.Recv() - if recv == nil { - continue - } - if types.TypeString(recv.Type(), nil) != "*testing.common" { - continue - } - fn, ok := call.Call.StaticCallee().Object().(*types.Func) - if !ok { - continue - } - name := fn.Name() - switch name { - case "FailNow", "Fatal", "Fatalf", "SkipNow", "Skip", "Skipf": - default: - continue - } - j.Errorf(gostmt, "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) - } - } - } - } - } -} - -func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { - continue - } - arg0 := edge.Site.Common().Args[0] - if iface, ok := arg0.(*ssa.MakeInterface); ok { - arg0 = iface.X - } - unop, ok := arg0.(*ssa.UnOp) - if !ok { - continue - } - v, ok := unop.X.(*ssa.Alloc) - if !ok { - continue - } - arg1 := edge.Site.Common().Args[1] - if iface, ok := arg1.(*ssa.MakeInterface); ok { - arg1 = iface.X - } - mc, ok := arg1.(*ssa.MakeClosure) - if !ok { - continue - } - for _, b := range mc.Bindings { - if b == v { - pos := j.Program.DisplayPosition(mc.Fn.Pos()) - j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) - } - } - } - } -} - -func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - ia, ok := ins.(*ssa.IndexAddr) - if !ok { - continue - } - if _, ok := ia.X.Type().Underlying().(*types.Slice); !ok { - continue - } - sr, ok1 := c.funcDescs.Get(ssafn).Ranges[ia.X].(vrp.SliceInterval) - idxr, ok2 := c.funcDescs.Get(ssafn).Ranges[ia.Index].(vrp.IntInterval) - if !ok1 || !ok2 || !sr.IsKnown() || !idxr.IsKnown() || sr.Length.Empty() || idxr.Empty() { - continue - } - if idxr.Lower.Cmp(sr.Length.Upper) >= 0 { - j.Errorf(ia, "index out of bounds") - } - } - } - } -} - -func (c *Checker) CheckDeferLock(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - instrs := lint.FilterDebug(block.Instrs) - if len(instrs) < 2 { - continue - } - for i, ins := range instrs[:len(instrs)-1] { - call, ok := ins.(*ssa.Call) - if !ok { - continue - } - if !lint.IsCallTo(call.Common(), "(*sync.Mutex).Lock") && !lint.IsCallTo(call.Common(), "(*sync.RWMutex).RLock") { - continue - } - nins, ok := instrs[i+1].(*ssa.Defer) - if !ok { - continue - } - if !lint.IsCallTo(&nins.Call, "(*sync.Mutex).Lock") && !lint.IsCallTo(&nins.Call, "(*sync.RWMutex).RLock") { - continue - } - if call.Common().Args[0] != nins.Call.Args[0] { - continue - } - name := shortCallName(call.Common()) - alt := "" - switch name { - case "Lock": - alt = "Unlock" - case "RLock": - alt = "RUnlock" - } - j.Errorf(nins, "deferring %s right after having locked already; did you mean to defer %s?", name, alt) - } - } - } -} - -func (c *Checker) CheckNaNComparison(j *lint.Job) { - isNaN := func(v ssa.Value) bool { - call, ok := v.(*ssa.Call) - if !ok { - return false - } - return lint.IsCallTo(call.Common(), "math.NaN") - } - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - ins, ok := ins.(*ssa.BinOp) - if !ok { - continue - } - if isNaN(ins.X) || isNaN(ins.Y) { - j.Errorf(ins, "no value is equal to NaN, not even NaN itself") - } - } - } - } -} - -func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee != node { - continue - } - if _, ok := edge.Site.(*ssa.Go); ok { - // Recursively spawning goroutines doesn't consume - // stack space infinitely, so don't flag it. - continue - } - - block := edge.Site.Block() - canReturn := false - for _, b := range ssafn.Blocks { - if block.Dominates(b) { - continue - } - if len(b.Instrs) == 0 { - continue - } - if _, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return); ok { - canReturn = true - break - } - } - if canReturn { - continue - } - j.Errorf(edge.Site, "infinite recursive call") - } - } -} - -func objectName(obj types.Object) string { - if obj == nil { - return "" - } - var name string - if obj.Pkg() != nil && obj.Pkg().Scope().Lookup(obj.Name()) == obj { - var s string - s = obj.Pkg().Path() - if s != "" { - name += s + "." - } - } - name += obj.Name() - return name -} - -func isName(j *lint.Job, expr ast.Expr, name string) bool { - var obj types.Object - switch expr := expr.(type) { - case *ast.Ident: - obj = j.Program.Info.ObjectOf(expr) - case *ast.SelectorExpr: - obj = j.Program.Info.ObjectOf(expr.Sel) - } - return objectName(obj) == name -} - -func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - if j.IsInMain(ssafn) || j.IsInTest(ssafn) { - continue - } - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - call, ok := ins.(*ssa.Call) - if !ok || !lint.IsCallTo(call.Common(), "time.Tick") { - continue - } - if c.funcDescs.Get(call.Parent()).Infinite { - continue - } - j.Errorf(call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") - } - } - } -} - -func (c *Checker) CheckDoubleNegation(j *lint.Job) { - fn := func(node ast.Node) bool { - unary1, ok := node.(*ast.UnaryExpr) - if !ok { - return true - } - unary2, ok := unary1.X.(*ast.UnaryExpr) - if !ok { - return true - } - if unary1.Op != token.NOT || unary2.Op != token.NOT { - return true - } - j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func hasSideEffects(node ast.Node) bool { - dynamic := false - ast.Inspect(node, func(node ast.Node) bool { - switch node := node.(type) { - case *ast.CallExpr: - dynamic = true - return false - case *ast.UnaryExpr: - if node.Op == token.ARROW { - dynamic = true - return false - } - } - return true - }) - return dynamic -} - -func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { - seen := map[ast.Node]bool{} - - var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) - collectConds = func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) { - seen[ifstmt] = true - if ifstmt.Init != nil { - inits = append(inits, ifstmt.Init) - } - conds = append(conds, ifstmt.Cond) - if elsestmt, ok := ifstmt.Else.(*ast.IfStmt); ok { - return collectConds(elsestmt, inits, conds) - } - return inits, conds - } - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - if seen[ifstmt] { - return true - } - inits, conds := collectConds(ifstmt, nil, nil) - if len(inits) > 0 { - return true - } - for _, cond := range conds { - if hasSideEffects(cond) { - return true - } - } - counts := map[string]int{} - for _, cond := range conds { - s := j.Render(cond) - counts[s]++ - if counts[s] == 2 { - j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - ins, ok := ins.(*ssa.BinOp) - if !ok { - continue - } - - if c, ok := ins.Y.(*ssa.Const); !ok || c.Value == nil || c.Value.Kind() != constant.Int || c.Uint64() != 0 { - continue - } - switch ins.Op { - case token.AND, token.OR, token.XOR: - default: - // we do not flag shifts because too often, x<<0 is part - // of a pattern, x<<0, x<<8, x<<16, ... - continue - } - path, _ := astutil.PathEnclosingInterval(j.File(ins), ins.Pos(), ins.Pos()) - if len(path) == 0 { - continue - } - if node, ok := path[0].(*ast.BinaryExpr); !ok || !lint.IsZero(node.Y) { - continue - } - - switch ins.Op { - case token.AND: - j.Errorf(ins, "x & 0 always equals 0") - case token.OR, token.XOR: - j.Errorf(ins, "x %s 0 always equals x", ins.Op) - } - } - } - } -} - -func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } - sig, ok := j.Program.Info.TypeOf(call.Fun).(*types.Signature) - if !ok { - return true - } - n := sig.Params().Len() - var args []int - for i := 0; i < n; i++ { - typ := sig.Params().At(i).Type() - if types.TypeString(typ, nil) == "os.FileMode" { - args = append(args, i) - } - } - for _, i := range args { - lit, ok := call.Args[i].(*ast.BasicLit) - if !ok { - continue - } - if len(lit.Value) == 3 && - lit.Value[0] != '0' && - lit.Value[0] >= '0' && lit.Value[0] <= '7' && - lit.Value[1] >= '0' && lit.Value[1] <= '7' && - lit.Value[2] >= '0' && lit.Value[2] <= '7' { - - v, err := strconv.ParseInt(lit.Value, 10, 64) - if err != nil { - continue - } - j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) - } - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckPureFunctions(j *lint.Job) { -fnLoop: - for _, ssafn := range j.Program.InitialFunctions { - if j.IsInTest(ssafn) { - params := ssafn.Signature.Params() - for i := 0; i < params.Len(); i++ { - param := params.At(i) - if types.TypeString(param.Type(), nil) == "*testing.B" { - // Ignore discarded pure functions in code related - // to benchmarks. Instead of matching BenchmarkFoo - // functions, we match any function accepting a - // *testing.B. Benchmarks sometimes call generic - // functions for doing the actual work, and - // checking for the parameter is a lot easier and - // faster than analyzing call trees. - continue fnLoop - } - } - } - - for _, b := range ssafn.Blocks { - for _, ins := range b.Instrs { - ins, ok := ins.(*ssa.Call) - if !ok { - continue - } - refs := ins.Referrers() - if refs == nil || len(lint.FilterDebug(*refs)) > 0 { - continue - } - callee := ins.Common().StaticCallee() - if callee == nil { - continue - } - if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub { - j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name()) - continue - } - } - } - } -} - -func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := j.Program.Info.ObjectOf(ident) - if obj.Pkg() == nil { - return false, "" - } - alt := c.deprecatedObjs[obj] - return alt != "", alt -} - -func selectorName(j *lint.Job, expr *ast.SelectorExpr) string { - sel := j.Program.Info.Selections[expr] - if sel == nil { - if x, ok := expr.X.(*ast.Ident); ok { - pkg, ok := j.Program.Info.ObjectOf(x).(*types.PkgName) - if !ok { - // This shouldn't happen - return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) - } - return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) - } - panic(fmt.Sprintf("unsupported selector: %v", expr)) - } - return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) -} - -func (c *Checker) enclosingFunc(sel *ast.SelectorExpr) *ssa.Function { - fn := c.nodeFns[sel] - if fn == nil { - return nil - } - for fn.Parent() != nil { - fn = fn.Parent() - } - return fn -} - -func (c *Checker) CheckDeprecated(j *lint.Job) { - fn := func(node ast.Node) bool { - sel, ok := node.(*ast.SelectorExpr) - if !ok { - return true - } - - obj := j.Program.Info.ObjectOf(sel.Sel) - if obj.Pkg() == nil { - return true - } - nodePkg := j.NodePackage(node).Pkg - if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { - // Don't flag stuff in our own package - return true - } - if ok, alt := c.isDeprecated(j, sel.Sel); ok { - // Look for the first available alternative, not the first - // version something was deprecated in. If a function was - // deprecated in Go 1.6, an alternative has been available - // already in 1.0, and we're targetting 1.2, it still - // makes sense to use the alternative from 1.0, to be - // future-proof. - minVersion := deprecated.Stdlib[selectorName(j, sel)].AlternativeAvailableSince - if !j.IsGoVersion(minVersion) { - return true - } - - if fn := c.enclosingFunc(sel); fn != nil { - if _, ok := c.deprecatedObjs[fn.Object()]; ok { - // functions that are deprecated may use deprecated - // symbols - return true - } - } - j.Errorf(sel, "%s is deprecated: %s", j.Render(sel), alt) - return true - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } -} - -func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { - return func(j *lint.Job) { - c.checkCalls(j, rules) - } -} - -func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { - for _, ssafn := range j.Program.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - callee := edge.Callee.Func - obj, ok := callee.Object().(*types.Func) - if !ok { - continue - } - - r, ok := rules[obj.FullName()] - if !ok { - continue - } - var args []*Argument - ssaargs := edge.Site.Common().Args - if callee.Signature.Recv() != nil { - ssaargs = ssaargs[1:] - } - for _, arg := range ssaargs { - if iarg, ok := arg.(*ssa.MakeInterface); ok { - arg = iarg.X - } - vr := c.funcDescs.Get(edge.Site.Parent()).Ranges[arg] - args = append(args, &Argument{Value: Value{arg, vr}}) - } - call := &Call{ - Job: j, - Instr: edge.Site, - Args: args, - Checker: c, - Parent: edge.Site.Parent(), - } - r(call) - for idx, arg := range call.Args { - _ = idx - for _, e := range arg.invalids { - // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) - // if len(path) < 2 { - // continue - // } - // astcall, ok := path[0].(*ast.CallExpr) - // if !ok { - // continue - // } - // j.Errorf(astcall.Args[idx], "%s", e) - - j.Errorf(edge.Site, "%s", e) - } - } - for _, e := range call.invalids { - j.Errorf(call.Instr.Common(), "%s", e) - } - } - } -} - -func unwrapFunction(val ssa.Value) *ssa.Function { - switch val := val.(type) { - case *ssa.Function: - return val - case *ssa.MakeClosure: - return val.Fn.(*ssa.Function) - default: - return nil - } -} - -func shortCallName(call *ssa.CallCommon) string { - if call.IsInvoke() { - return "" - } - switch v := call.Value.(type) { - case *ssa.Function: - fn, ok := v.Object().(*types.Func) - if !ok { - return "" - } - return fn.Name() - case *ssa.Builtin: - return v.Name() - } - return "" -} - -func hasCallTo(block *ssa.BasicBlock, name string) bool { - for _, ins := range block.Instrs { - call, ok := ins.(*ssa.Call) - if !ok { - continue - } - if lint.IsCallTo(call.Common(), name) { - return true - } - } - return false -} - -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { - if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return typ -} - -func (c *Checker) CheckWriterBufferModified(j *lint.Job) { - // TODO(dh): this might be a good candidate for taint analysis. - // Taint the argument as MUST_NOT_MODIFY, then propagate that - // through functions like bytes.Split - - for _, ssafn := range j.Program.InitialFunctions { - sig := ssafn.Signature - if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { - continue - } - tArg, ok := sig.Params().At(0).Type().(*types.Slice) - if !ok { - continue - } - if basic, ok := tArg.Elem().(*types.Basic); !ok || basic.Kind() != types.Byte { - continue - } - if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int { - continue - } - if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || types.TypeString(named, nil) != "error" { - continue - } - - for _, block := range ssafn.Blocks { - for _, ins := range block.Instrs { - switch ins := ins.(type) { - case *ssa.Store: - addr, ok := ins.Addr.(*ssa.IndexAddr) - if !ok { - continue - } - if addr.X != ssafn.Params[1] { - continue - } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") - case *ssa.Call: - if !lint.IsCallTo(ins.Common(), "append") { - continue - } - if ins.Common().Args[0] != ssafn.Params[1] { - continue - } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") - } - } - } - } -} - -func loopedRegexp(name string) CallCheck { - return func(call *Call) { - if len(extractConsts(call.Args[0].Value.Value)) == 0 { - return - } - if !call.Checker.isInLoop(call.Instr.Block()) { - return - } - call.Invalid(fmt.Sprintf("calling %s in a loop has poor performance, consider using regexp.Compile", name)) - } -} - -func (c *Checker) CheckEmptyBranch(j *lint.Job) { - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - ssafn := c.nodeFns[node] - if lint.IsExample(ssafn) { - return true - } - if ifstmt.Else != nil { - b, ok := ifstmt.Else.(*ast.BlockStmt) - if !ok || len(b.List) != 0 { - return true - } - j.Errorf(ifstmt.Else, "empty branch") - } - if len(ifstmt.Body.List) != 0 { - return true - } - j.Errorf(ifstmt, "empty branch") - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func (c *Checker) CheckMapBytesKey(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { - for _, b := range fn.Blocks { - insLoop: - for _, ins := range b.Instrs { - // find []byte -> string conversions - conv, ok := ins.(*ssa.Convert) - if !ok || conv.Type() != types.Universe.Lookup("string").Type() { - continue - } - if s, ok := conv.X.Type().(*types.Slice); !ok || s.Elem() != types.Universe.Lookup("byte").Type() { - continue - } - refs := conv.Referrers() - // need at least two (DebugRef) references: the - // conversion and the *ast.Ident - if refs == nil || len(*refs) < 2 { - continue - } - ident := false - // skip first reference, that's the conversion itself - for _, ref := range (*refs)[1:] { - switch ref := ref.(type) { - case *ssa.DebugRef: - if _, ok := ref.Expr.(*ast.Ident); !ok { - // the string seems to be used somewhere - // unexpected; the default branch should - // catch this already, but be safe - continue insLoop - } else { - ident = true - } - case *ssa.Lookup: - default: - // the string is used somewhere else than a - // map lookup - continue insLoop - } - } - - // the result of the conversion wasn't assigned to an - // identifier - if !ident { - continue - } - j.Errorf(conv, "m[string(key)] would be more efficient than k := string(key); m[k]") - } - } - } -} - -func (c *Checker) CheckRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(c.nodeFns, j) -} - -func (c *Checker) CheckSelfAssignment(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } - if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { - return true - } - for i, stmt := range assign.Lhs { - rlh := j.Render(stmt) - rrh := j.Render(assign.Rhs[i]) - if rlh == rrh { - j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) - } - } - return true - } - for _, f := range c.filterGenerated(j.Program.Files) { - ast.Inspect(f, fn) - } -} - -func buildTagsIdentical(s1, s2 []string) bool { - if len(s1) != len(s2) { - return false - } - s1s := make([]string, len(s1)) - copy(s1s, s1) - sort.Strings(s1s) - s2s := make([]string, len(s2)) - copy(s2s, s2) - sort.Strings(s2s) - for i, s := range s1s { - if s != s2s[i] { - return false - } - } - return true -} - -func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range c.filterGenerated(job.Program.Files) { - constraints := buildTags(f) - for i, constraint1 := range constraints { - for j, constraint2 := range constraints { - if i >= j { - continue - } - if buildTagsIdentical(constraint1, constraint2) { - job.Errorf(f, "identical build constraints %q and %q", - strings.Join(constraint1, " "), - strings.Join(constraint2, " ")) - } - } - } - } -} diff --git a/vendor/honnef.co/go/tools/staticcheck/rules.go b/vendor/honnef.co/go/tools/staticcheck/rules.go deleted file mode 100644 index 60cc00c..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/rules.go +++ /dev/null @@ -1,321 +0,0 @@ -package staticcheck - -import ( - "fmt" - "go/constant" - "go/types" - "net" - "net/url" - "regexp" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/staticcheck/vrp" -) - -const ( - MsgInvalidHostPort = "invalid port or service name in host:port pair" - MsgInvalidUTF8 = "argument is not a valid UTF-8 encoded string" - MsgNonUniqueCutset = "cutset contains duplicate characters" -) - -type Call struct { - Job *lint.Job - Instr ssa.CallInstruction - Args []*Argument - - Checker *Checker - Parent *ssa.Function - - invalids []string -} - -func (c *Call) Invalid(msg string) { - c.invalids = append(c.invalids, msg) -} - -type Argument struct { - Value Value - invalids []string -} - -func (arg *Argument) Invalid(msg string) { - arg.invalids = append(arg.invalids, msg) -} - -type Value struct { - Value ssa.Value - Range vrp.Range -} - -type CallCheck func(call *Call) - -func extractConsts(v ssa.Value) []*ssa.Const { - switch v := v.(type) { - case *ssa.Const: - return []*ssa.Const{v} - case *ssa.MakeInterface: - return extractConsts(v.X) - default: - return nil - } -} - -func ValidateRegexp(v Value) error { - for _, c := range extractConsts(v.Value) { - if c.Value == nil { - continue - } - if c.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(c.Value) - if _, err := regexp.Compile(s); err != nil { - return err - } - } - return nil -} - -func ValidateTimeLayout(v Value) error { - for _, c := range extractConsts(v.Value) { - if c.Value == nil { - continue - } - if c.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(c.Value) - s = strings.Replace(s, "_", " ", -1) - s = strings.Replace(s, "Z", "-", -1) - _, err := time.Parse(s, s) - if err != nil { - return err - } - } - return nil -} - -func ValidateURL(v Value) error { - for _, c := range extractConsts(v.Value) { - if c.Value == nil { - continue - } - if c.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(c.Value) - _, err := url.Parse(s) - if err != nil { - return fmt.Errorf("%q is not a valid URL: %s", s, err) - } - } - return nil -} - -func IntValue(v Value, z vrp.Z) bool { - r, ok := v.Range.(vrp.IntInterval) - if !ok || !r.IsKnown() { - return false - } - if r.Lower != r.Upper { - return false - } - if r.Lower.Cmp(z) == 0 { - return true - } - return false -} - -func InvalidUTF8(v Value) bool { - for _, c := range extractConsts(v.Value) { - if c.Value == nil { - continue - } - if c.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(c.Value) - if !utf8.ValidString(s) { - return true - } - } - return false -} - -func UnbufferedChannel(v Value) bool { - r, ok := v.Range.(vrp.ChannelInterval) - if !ok || !r.IsKnown() { - return false - } - if r.Size.Lower.Cmp(vrp.NewZ(0)) == 0 && - r.Size.Upper.Cmp(vrp.NewZ(0)) == 0 { - return true - } - return false -} - -func Pointer(v Value) bool { - switch v.Value.Type().Underlying().(type) { - case *types.Pointer, *types.Interface: - return true - } - return false -} - -func ConvertedFromInt(v Value) bool { - conv, ok := v.Value.(*ssa.Convert) - if !ok { - return false - } - b, ok := conv.X.Type().Underlying().(*types.Basic) - if !ok { - return false - } - if (b.Info() & types.IsInteger) == 0 { - return false - } - return true -} - -func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { - typ = typ.Underlying() - switch typ := typ.(type) { - case *types.Basic: - switch typ.Kind() { - case types.Uint8, types.Uint16, types.Uint32, types.Uint64, - types.Int8, types.Int16, types.Int32, types.Int64, - types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid: - return true - case types.Bool: - return j.IsGoVersion(8) - } - return false - case *types.Struct: - n := typ.NumFields() - for i := 0; i < n; i++ { - if !validEncodingBinaryType(j, typ.Field(i).Type()) { - return false - } - } - return true - case *types.Array: - return validEncodingBinaryType(j, typ.Elem()) - case *types.Interface: - // we can't determine if it's a valid type or not - return true - } - return false -} - -func CanBinaryMarshal(j *lint.Job, v Value) bool { - typ := v.Value.Type().Underlying() - if ttyp, ok := typ.(*types.Pointer); ok { - typ = ttyp.Elem().Underlying() - } - if ttyp, ok := typ.(interface { - Elem() types.Type - }); ok { - if _, ok := ttyp.(*types.Pointer); !ok { - typ = ttyp.Elem() - } - } - - return validEncodingBinaryType(j, typ) -} - -func RepeatZeroTimes(name string, arg int) CallCheck { - return func(call *Call) { - arg := call.Args[arg] - if IntValue(arg.Value, vrp.NewZ(0)) { - arg.Invalid(fmt.Sprintf("calling %s with n == 0 will return no results, did you mean -1?", name)) - } - } -} - -func validateServiceName(s string) bool { - if len(s) < 1 || len(s) > 15 { - return false - } - if s[0] == '-' || s[len(s)-1] == '-' { - return false - } - if strings.Contains(s, "--") { - return false - } - hasLetter := false - for _, r := range s { - if (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') { - hasLetter = true - continue - } - if r >= '0' && r <= '9' { - continue - } - return false - } - return hasLetter -} - -func validatePort(s string) bool { - n, err := strconv.ParseInt(s, 10, 64) - if err != nil { - return validateServiceName(s) - } - return n >= 0 && n <= 65535 -} - -func ValidHostPort(v Value) bool { - for _, k := range extractConsts(v.Value) { - if k.Value == nil { - continue - } - if k.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(k.Value) - _, port, err := net.SplitHostPort(s) - if err != nil { - return false - } - // TODO(dh): check hostname - if !validatePort(port) { - return false - } - } - return true -} - -// ConvertedFrom reports whether value v was converted from type typ. -func ConvertedFrom(v Value, typ string) bool { - change, ok := v.Value.(*ssa.ChangeType) - return ok && types.TypeString(change.X.Type(), nil) == typ -} - -func UniqueStringCutset(v Value) bool { - for _, c := range extractConsts(v.Value) { - if c.Value == nil { - continue - } - if c.Value.Kind() != constant.String { - continue - } - s := constant.StringVal(c.Value) - rs := runeSlice(s) - if len(rs) < 2 { - continue - } - sort.Sort(rs) - for i, r := range rs[1:] { - if rs[i] == r { - return false - } - } - } - return true -} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go b/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go deleted file mode 100644 index c8fbacb..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/channel.go +++ /dev/null @@ -1,73 +0,0 @@ -package vrp - -import ( - "fmt" - - "honnef.co/go/tools/ssa" -) - -type ChannelInterval struct { - Size IntInterval -} - -func (c ChannelInterval) Union(other Range) Range { - i, ok := other.(ChannelInterval) - if !ok { - i = ChannelInterval{EmptyIntInterval} - } - if c.Size.Empty() || !c.Size.IsKnown() { - return i - } - if i.Size.Empty() || !i.Size.IsKnown() { - return c - } - return ChannelInterval{ - Size: c.Size.Union(i.Size).(IntInterval), - } -} - -func (c ChannelInterval) String() string { - return c.Size.String() -} - -func (c ChannelInterval) IsKnown() bool { - return c.Size.IsKnown() -} - -type MakeChannelConstraint struct { - aConstraint - Buffer ssa.Value -} -type ChannelChangeTypeConstraint struct { - aConstraint - X ssa.Value -} - -func NewMakeChannelConstraint(buffer, y ssa.Value) Constraint { - return &MakeChannelConstraint{NewConstraint(y), buffer} -} -func NewChannelChangeTypeConstraint(x, y ssa.Value) Constraint { - return &ChannelChangeTypeConstraint{NewConstraint(y), x} -} - -func (c *MakeChannelConstraint) Operands() []ssa.Value { return []ssa.Value{c.Buffer} } -func (c *ChannelChangeTypeConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } - -func (c *MakeChannelConstraint) String() string { - return fmt.Sprintf("%s = make(chan, %s)", c.Y().Name, c.Buffer.Name()) -} -func (c *ChannelChangeTypeConstraint) String() string { - return fmt.Sprintf("%s = changetype(%s)", c.Y().Name, c.X.Name()) -} - -func (c *MakeChannelConstraint) Eval(g *Graph) Range { - i, ok := g.Range(c.Buffer).(IntInterval) - if !ok { - return ChannelInterval{NewIntInterval(NewZ(0), PInfinity)} - } - if i.Lower.Sign() == -1 { - i.Lower = NewZ(0) - } - return ChannelInterval{i} -} -func (c *ChannelChangeTypeConstraint) Eval(g *Graph) Range { return g.Range(c.X) } diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/int.go b/vendor/honnef.co/go/tools/staticcheck/vrp/int.go deleted file mode 100644 index 926bb7a..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/int.go +++ /dev/null @@ -1,476 +0,0 @@ -package vrp - -import ( - "fmt" - "go/token" - "go/types" - "math/big" - - "honnef.co/go/tools/ssa" -) - -type Zs []Z - -func (zs Zs) Len() int { - return len(zs) -} - -func (zs Zs) Less(i int, j int) bool { - return zs[i].Cmp(zs[j]) == -1 -} - -func (zs Zs) Swap(i int, j int) { - zs[i], zs[j] = zs[j], zs[i] -} - -type Z struct { - infinity int8 - integer *big.Int -} - -func NewZ(n int64) Z { - return NewBigZ(big.NewInt(n)) -} - -func NewBigZ(n *big.Int) Z { - return Z{integer: n} -} - -func (z1 Z) Infinite() bool { - return z1.infinity != 0 -} - -func (z1 Z) Add(z2 Z) Z { - if z2.Sign() == -1 { - return z1.Sub(z2.Negate()) - } - if z1 == NInfinity { - return NInfinity - } - if z1 == PInfinity { - return PInfinity - } - if z2 == PInfinity { - return PInfinity - } - - if !z1.Infinite() && !z2.Infinite() { - n := &big.Int{} - n.Add(z1.integer, z2.integer) - return NewBigZ(n) - } - - panic(fmt.Sprintf("%s + %s is not defined", z1, z2)) -} - -func (z1 Z) Sub(z2 Z) Z { - if z2.Sign() == -1 { - return z1.Add(z2.Negate()) - } - if !z1.Infinite() && !z2.Infinite() { - n := &big.Int{} - n.Sub(z1.integer, z2.integer) - return NewBigZ(n) - } - - if z1 != PInfinity && z2 == PInfinity { - return NInfinity - } - if z1.Infinite() && !z2.Infinite() { - return Z{infinity: z1.infinity} - } - if z1 == PInfinity && z2 == PInfinity { - return PInfinity - } - panic(fmt.Sprintf("%s - %s is not defined", z1, z2)) -} - -func (z1 Z) Mul(z2 Z) Z { - if (z1.integer != nil && z1.integer.Sign() == 0) || - (z2.integer != nil && z2.integer.Sign() == 0) { - return NewBigZ(&big.Int{}) - } - - if z1.infinity != 0 || z2.infinity != 0 { - return Z{infinity: int8(z1.Sign() * z2.Sign())} - } - - n := &big.Int{} - n.Mul(z1.integer, z2.integer) - return NewBigZ(n) -} - -func (z1 Z) Negate() Z { - if z1.infinity == 1 { - return NInfinity - } - if z1.infinity == -1 { - return PInfinity - } - n := &big.Int{} - n.Neg(z1.integer) - return NewBigZ(n) -} - -func (z1 Z) Sign() int { - if z1.infinity != 0 { - return int(z1.infinity) - } - return z1.integer.Sign() -} - -func (z1 Z) String() string { - if z1 == NInfinity { - return "-∞" - } - if z1 == PInfinity { - return "∞" - } - return fmt.Sprintf("%d", z1.integer) -} - -func (z1 Z) Cmp(z2 Z) int { - if z1.infinity == z2.infinity && z1.infinity != 0 { - return 0 - } - if z1 == PInfinity { - return 1 - } - if z1 == NInfinity { - return -1 - } - if z2 == NInfinity { - return 1 - } - if z2 == PInfinity { - return -1 - } - return z1.integer.Cmp(z2.integer) -} - -func MaxZ(zs ...Z) Z { - if len(zs) == 0 { - panic("Max called with no arguments") - } - if len(zs) == 1 { - return zs[0] - } - ret := zs[0] - for _, z := range zs[1:] { - if z.Cmp(ret) == 1 { - ret = z - } - } - return ret -} - -func MinZ(zs ...Z) Z { - if len(zs) == 0 { - panic("Min called with no arguments") - } - if len(zs) == 1 { - return zs[0] - } - ret := zs[0] - for _, z := range zs[1:] { - if z.Cmp(ret) == -1 { - ret = z - } - } - return ret -} - -var NInfinity = Z{infinity: -1} -var PInfinity = Z{infinity: 1} -var EmptyIntInterval = IntInterval{true, PInfinity, NInfinity} - -func InfinityFor(v ssa.Value) IntInterval { - if b, ok := v.Type().Underlying().(*types.Basic); ok { - if (b.Info() & types.IsUnsigned) != 0 { - return NewIntInterval(NewZ(0), PInfinity) - } - } - return NewIntInterval(NInfinity, PInfinity) -} - -type IntInterval struct { - known bool - Lower Z - Upper Z -} - -func NewIntInterval(l, u Z) IntInterval { - if u.Cmp(l) == -1 { - return EmptyIntInterval - } - return IntInterval{known: true, Lower: l, Upper: u} -} - -func (i IntInterval) IsKnown() bool { - return i.known -} - -func (i IntInterval) Empty() bool { - return i.Lower == PInfinity && i.Upper == NInfinity -} - -func (i IntInterval) IsMaxRange() bool { - return i.Lower == NInfinity && i.Upper == PInfinity -} - -func (i1 IntInterval) Intersection(i2 IntInterval) IntInterval { - if !i1.IsKnown() { - return i2 - } - if !i2.IsKnown() { - return i1 - } - if i1.Empty() || i2.Empty() { - return EmptyIntInterval - } - i3 := NewIntInterval(MaxZ(i1.Lower, i2.Lower), MinZ(i1.Upper, i2.Upper)) - if i3.Lower.Cmp(i3.Upper) == 1 { - return EmptyIntInterval - } - return i3 -} - -func (i1 IntInterval) Union(other Range) Range { - i2, ok := other.(IntInterval) - if !ok { - i2 = EmptyIntInterval - } - if i1.Empty() || !i1.IsKnown() { - return i2 - } - if i2.Empty() || !i2.IsKnown() { - return i1 - } - return NewIntInterval(MinZ(i1.Lower, i2.Lower), MaxZ(i1.Upper, i2.Upper)) -} - -func (i1 IntInterval) Add(i2 IntInterval) IntInterval { - if i1.Empty() || i2.Empty() { - return EmptyIntInterval - } - l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper - return NewIntInterval(l1.Add(l2), u1.Add(u2)) -} - -func (i1 IntInterval) Sub(i2 IntInterval) IntInterval { - if i1.Empty() || i2.Empty() { - return EmptyIntInterval - } - l1, u1, l2, u2 := i1.Lower, i1.Upper, i2.Lower, i2.Upper - return NewIntInterval(l1.Sub(u2), u1.Sub(l2)) -} - -func (i1 IntInterval) Mul(i2 IntInterval) IntInterval { - if i1.Empty() || i2.Empty() { - return EmptyIntInterval - } - x1, x2 := i1.Lower, i1.Upper - y1, y2 := i2.Lower, i2.Upper - return NewIntInterval( - MinZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)), - MaxZ(x1.Mul(y1), x1.Mul(y2), x2.Mul(y1), x2.Mul(y2)), - ) -} - -func (i1 IntInterval) String() string { - if !i1.IsKnown() { - return "[⊥, ⊥]" - } - if i1.Empty() { - return "{}" - } - return fmt.Sprintf("[%s, %s]", i1.Lower, i1.Upper) -} - -type IntArithmeticConstraint struct { - aConstraint - A ssa.Value - B ssa.Value - Op token.Token - Fn func(IntInterval, IntInterval) IntInterval -} - -type IntAddConstraint struct{ *IntArithmeticConstraint } -type IntSubConstraint struct{ *IntArithmeticConstraint } -type IntMulConstraint struct{ *IntArithmeticConstraint } - -type IntConversionConstraint struct { - aConstraint - X ssa.Value -} - -type IntIntersectionConstraint struct { - aConstraint - ranges Ranges - A ssa.Value - B ssa.Value - Op token.Token - I IntInterval - resolved bool -} - -type IntIntervalConstraint struct { - aConstraint - I IntInterval -} - -func NewIntArithmeticConstraint(a, b, y ssa.Value, op token.Token, fn func(IntInterval, IntInterval) IntInterval) *IntArithmeticConstraint { - return &IntArithmeticConstraint{NewConstraint(y), a, b, op, fn} -} -func NewIntAddConstraint(a, b, y ssa.Value) Constraint { - return &IntAddConstraint{NewIntArithmeticConstraint(a, b, y, token.ADD, IntInterval.Add)} -} -func NewIntSubConstraint(a, b, y ssa.Value) Constraint { - return &IntSubConstraint{NewIntArithmeticConstraint(a, b, y, token.SUB, IntInterval.Sub)} -} -func NewIntMulConstraint(a, b, y ssa.Value) Constraint { - return &IntMulConstraint{NewIntArithmeticConstraint(a, b, y, token.MUL, IntInterval.Mul)} -} -func NewIntConversionConstraint(x, y ssa.Value) Constraint { - return &IntConversionConstraint{NewConstraint(y), x} -} -func NewIntIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint { - return &IntIntersectionConstraint{ - aConstraint: NewConstraint(y), - ranges: ranges, - A: a, - B: b, - Op: op, - } -} -func NewIntIntervalConstraint(i IntInterval, y ssa.Value) Constraint { - return &IntIntervalConstraint{NewConstraint(y), i} -} - -func (c *IntArithmeticConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } -func (c *IntConversionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } -func (c *IntIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} } -func (s *IntIntervalConstraint) Operands() []ssa.Value { return nil } - -func (c *IntArithmeticConstraint) String() string { - return fmt.Sprintf("%s = %s %s %s", c.Y().Name(), c.A.Name(), c.Op, c.B.Name()) -} -func (c *IntConversionConstraint) String() string { - return fmt.Sprintf("%s = %s(%s)", c.Y().Name(), c.Y().Type(), c.X.Name()) -} -func (c *IntIntersectionConstraint) String() string { - return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch) -} -func (c *IntIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } - -func (c *IntArithmeticConstraint) Eval(g *Graph) Range { - i1, i2 := g.Range(c.A).(IntInterval), g.Range(c.B).(IntInterval) - if !i1.IsKnown() || !i2.IsKnown() { - return IntInterval{} - } - return c.Fn(i1, i2) -} -func (c *IntConversionConstraint) Eval(g *Graph) Range { - s := &types.StdSizes{ - // XXX is it okay to assume the largest word size, or do we - // need to be platform specific? - WordSize: 8, - MaxAlign: 1, - } - fromI := g.Range(c.X).(IntInterval) - toI := g.Range(c.Y()).(IntInterval) - fromT := c.X.Type().Underlying().(*types.Basic) - toT := c.Y().Type().Underlying().(*types.Basic) - fromB := s.Sizeof(c.X.Type()) - toB := s.Sizeof(c.Y().Type()) - - if !fromI.IsKnown() { - return toI - } - if !toI.IsKnown() { - return fromI - } - - // uint -> sint/uint, M > N: [max(0, l1), min(2**N-1, u2)] - if (fromT.Info()&types.IsUnsigned != 0) && - toB > fromB { - - n := big.NewInt(1) - n.Lsh(n, uint(fromB*8)) - n.Sub(n, big.NewInt(1)) - return NewIntInterval( - MaxZ(NewZ(0), fromI.Lower), - MinZ(NewBigZ(n), toI.Upper), - ) - } - - // sint -> sint, M > N; [max(-∞, l1), min(2**N-1, u2)] - if (fromT.Info()&types.IsUnsigned == 0) && - (toT.Info()&types.IsUnsigned == 0) && - toB > fromB { - - n := big.NewInt(1) - n.Lsh(n, uint(fromB*8)) - n.Sub(n, big.NewInt(1)) - return NewIntInterval( - MaxZ(NInfinity, fromI.Lower), - MinZ(NewBigZ(n), toI.Upper), - ) - } - - return fromI -} -func (c *IntIntersectionConstraint) Eval(g *Graph) Range { - xi := g.Range(c.A).(IntInterval) - if !xi.IsKnown() { - return c.I - } - return xi.Intersection(c.I) -} -func (c *IntIntervalConstraint) Eval(*Graph) Range { return c.I } - -func (c *IntIntersectionConstraint) Futures() []ssa.Value { - return []ssa.Value{c.B} -} - -func (c *IntIntersectionConstraint) Resolve() { - r, ok := c.ranges[c.B].(IntInterval) - if !ok { - c.I = InfinityFor(c.Y()) - return - } - - switch c.Op { - case token.EQL: - c.I = r - case token.GTR: - c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity) - case token.GEQ: - c.I = NewIntInterval(r.Lower, PInfinity) - case token.LSS: - // TODO(dh): do we need 0 instead of NInfinity for uints? - c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1))) - case token.LEQ: - c.I = NewIntInterval(NInfinity, r.Upper) - case token.NEQ: - c.I = InfinityFor(c.Y()) - default: - panic("unsupported op " + c.Op.String()) - } -} - -func (c *IntIntersectionConstraint) IsKnown() bool { - return c.I.IsKnown() -} - -func (c *IntIntersectionConstraint) MarkUnresolved() { - c.resolved = false -} - -func (c *IntIntersectionConstraint) MarkResolved() { - c.resolved = true -} - -func (c *IntIntersectionConstraint) IsResolved() bool { - return c.resolved -} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go b/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go deleted file mode 100644 index 40658dd..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/slice.go +++ /dev/null @@ -1,273 +0,0 @@ -package vrp - -// TODO(dh): most of the constraints have implementations identical to -// that of strings. Consider reusing them. - -import ( - "fmt" - "go/types" - - "honnef.co/go/tools/ssa" -) - -type SliceInterval struct { - Length IntInterval -} - -func (s SliceInterval) Union(other Range) Range { - i, ok := other.(SliceInterval) - if !ok { - i = SliceInterval{EmptyIntInterval} - } - if s.Length.Empty() || !s.Length.IsKnown() { - return i - } - if i.Length.Empty() || !i.Length.IsKnown() { - return s - } - return SliceInterval{ - Length: s.Length.Union(i.Length).(IntInterval), - } -} -func (s SliceInterval) String() string { return s.Length.String() } -func (s SliceInterval) IsKnown() bool { return s.Length.IsKnown() } - -type SliceAppendConstraint struct { - aConstraint - A ssa.Value - B ssa.Value -} - -type SliceSliceConstraint struct { - aConstraint - X ssa.Value - Lower ssa.Value - Upper ssa.Value -} - -type ArraySliceConstraint struct { - aConstraint - X ssa.Value - Lower ssa.Value - Upper ssa.Value -} - -type SliceIntersectionConstraint struct { - aConstraint - X ssa.Value - I IntInterval -} - -type SliceLengthConstraint struct { - aConstraint - X ssa.Value -} - -type MakeSliceConstraint struct { - aConstraint - Size ssa.Value -} - -type SliceIntervalConstraint struct { - aConstraint - I IntInterval -} - -func NewSliceAppendConstraint(a, b, y ssa.Value) Constraint { - return &SliceAppendConstraint{NewConstraint(y), a, b} -} -func NewSliceSliceConstraint(x, lower, upper, y ssa.Value) Constraint { - return &SliceSliceConstraint{NewConstraint(y), x, lower, upper} -} -func NewArraySliceConstraint(x, lower, upper, y ssa.Value) Constraint { - return &ArraySliceConstraint{NewConstraint(y), x, lower, upper} -} -func NewSliceIntersectionConstraint(x ssa.Value, i IntInterval, y ssa.Value) Constraint { - return &SliceIntersectionConstraint{NewConstraint(y), x, i} -} -func NewSliceLengthConstraint(x, y ssa.Value) Constraint { - return &SliceLengthConstraint{NewConstraint(y), x} -} -func NewMakeSliceConstraint(size, y ssa.Value) Constraint { - return &MakeSliceConstraint{NewConstraint(y), size} -} -func NewSliceIntervalConstraint(i IntInterval, y ssa.Value) Constraint { - return &SliceIntervalConstraint{NewConstraint(y), i} -} - -func (c *SliceAppendConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } -func (c *SliceSliceConstraint) Operands() []ssa.Value { - ops := []ssa.Value{c.X} - if c.Lower != nil { - ops = append(ops, c.Lower) - } - if c.Upper != nil { - ops = append(ops, c.Upper) - } - return ops -} -func (c *ArraySliceConstraint) Operands() []ssa.Value { - ops := []ssa.Value{c.X} - if c.Lower != nil { - ops = append(ops, c.Lower) - } - if c.Upper != nil { - ops = append(ops, c.Upper) - } - return ops -} -func (c *SliceIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } -func (c *SliceLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } -func (c *MakeSliceConstraint) Operands() []ssa.Value { return []ssa.Value{c.Size} } -func (s *SliceIntervalConstraint) Operands() []ssa.Value { return nil } - -func (c *SliceAppendConstraint) String() string { - return fmt.Sprintf("%s = append(%s, %s)", c.Y().Name(), c.A.Name(), c.B.Name()) -} -func (c *SliceSliceConstraint) String() string { - var lname, uname string - if c.Lower != nil { - lname = c.Lower.Name() - } - if c.Upper != nil { - uname = c.Upper.Name() - } - return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) -} -func (c *ArraySliceConstraint) String() string { - var lname, uname string - if c.Lower != nil { - lname = c.Lower.Name() - } - if c.Upper != nil { - uname = c.Upper.Name() - } - return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) -} -func (c *SliceIntersectionConstraint) String() string { - return fmt.Sprintf("%s = %s.%t ⊓ %s", c.Y().Name(), c.X.Name(), c.Y().(*ssa.Sigma).Branch, c.I) -} -func (c *SliceLengthConstraint) String() string { - return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name()) -} -func (c *MakeSliceConstraint) String() string { - return fmt.Sprintf("%s = make(slice, %s)", c.Y().Name(), c.Size.Name()) -} -func (c *SliceIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } - -func (c *SliceAppendConstraint) Eval(g *Graph) Range { - l1 := g.Range(c.A).(SliceInterval).Length - var l2 IntInterval - switch r := g.Range(c.B).(type) { - case SliceInterval: - l2 = r.Length - case StringInterval: - l2 = r.Length - default: - return SliceInterval{} - } - if !l1.IsKnown() || !l2.IsKnown() { - return SliceInterval{} - } - return SliceInterval{ - Length: l1.Add(l2), - } -} -func (c *SliceSliceConstraint) Eval(g *Graph) Range { - lr := NewIntInterval(NewZ(0), NewZ(0)) - if c.Lower != nil { - lr = g.Range(c.Lower).(IntInterval) - } - ur := g.Range(c.X).(SliceInterval).Length - if c.Upper != nil { - ur = g.Range(c.Upper).(IntInterval) - } - if !lr.IsKnown() || !ur.IsKnown() { - return SliceInterval{} - } - - ls := []Z{ - ur.Lower.Sub(lr.Lower), - ur.Upper.Sub(lr.Lower), - ur.Lower.Sub(lr.Upper), - ur.Upper.Sub(lr.Upper), - } - // TODO(dh): if we don't truncate lengths to 0 we might be able to - // easily detect slices with high < low. we'd need to treat -∞ - // specially, though. - for i, l := range ls { - if l.Sign() == -1 { - ls[i] = NewZ(0) - } - } - - return SliceInterval{ - Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), - } -} -func (c *ArraySliceConstraint) Eval(g *Graph) Range { - lr := NewIntInterval(NewZ(0), NewZ(0)) - if c.Lower != nil { - lr = g.Range(c.Lower).(IntInterval) - } - var l int64 - switch typ := c.X.Type().(type) { - case *types.Array: - l = typ.Len() - case *types.Pointer: - l = typ.Elem().(*types.Array).Len() - } - ur := NewIntInterval(NewZ(l), NewZ(l)) - if c.Upper != nil { - ur = g.Range(c.Upper).(IntInterval) - } - if !lr.IsKnown() || !ur.IsKnown() { - return SliceInterval{} - } - - ls := []Z{ - ur.Lower.Sub(lr.Lower), - ur.Upper.Sub(lr.Lower), - ur.Lower.Sub(lr.Upper), - ur.Upper.Sub(lr.Upper), - } - // TODO(dh): if we don't truncate lengths to 0 we might be able to - // easily detect slices with high < low. we'd need to treat -∞ - // specially, though. - for i, l := range ls { - if l.Sign() == -1 { - ls[i] = NewZ(0) - } - } - - return SliceInterval{ - Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), - } -} -func (c *SliceIntersectionConstraint) Eval(g *Graph) Range { - xi := g.Range(c.X).(SliceInterval) - if !xi.IsKnown() { - return c.I - } - return SliceInterval{ - Length: xi.Length.Intersection(c.I), - } -} -func (c *SliceLengthConstraint) Eval(g *Graph) Range { - i := g.Range(c.X).(SliceInterval).Length - if !i.IsKnown() { - return NewIntInterval(NewZ(0), PInfinity) - } - return i -} -func (c *MakeSliceConstraint) Eval(g *Graph) Range { - i, ok := g.Range(c.Size).(IntInterval) - if !ok { - return SliceInterval{NewIntInterval(NewZ(0), PInfinity)} - } - if i.Lower.Sign() == -1 { - i.Lower = NewZ(0) - } - return SliceInterval{i} -} -func (c *SliceIntervalConstraint) Eval(*Graph) Range { return SliceInterval{c.I} } diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/string.go b/vendor/honnef.co/go/tools/staticcheck/vrp/string.go deleted file mode 100644 index e05877f..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/string.go +++ /dev/null @@ -1,258 +0,0 @@ -package vrp - -import ( - "fmt" - "go/token" - "go/types" - - "honnef.co/go/tools/ssa" -) - -type StringInterval struct { - Length IntInterval -} - -func (s StringInterval) Union(other Range) Range { - i, ok := other.(StringInterval) - if !ok { - i = StringInterval{EmptyIntInterval} - } - if s.Length.Empty() || !s.Length.IsKnown() { - return i - } - if i.Length.Empty() || !i.Length.IsKnown() { - return s - } - return StringInterval{ - Length: s.Length.Union(i.Length).(IntInterval), - } -} - -func (s StringInterval) String() string { - return s.Length.String() -} - -func (s StringInterval) IsKnown() bool { - return s.Length.IsKnown() -} - -type StringSliceConstraint struct { - aConstraint - X ssa.Value - Lower ssa.Value - Upper ssa.Value -} - -type StringIntersectionConstraint struct { - aConstraint - ranges Ranges - A ssa.Value - B ssa.Value - Op token.Token - I IntInterval - resolved bool -} - -type StringConcatConstraint struct { - aConstraint - A ssa.Value - B ssa.Value -} - -type StringLengthConstraint struct { - aConstraint - X ssa.Value -} - -type StringIntervalConstraint struct { - aConstraint - I IntInterval -} - -func NewStringSliceConstraint(x, lower, upper, y ssa.Value) Constraint { - return &StringSliceConstraint{NewConstraint(y), x, lower, upper} -} -func NewStringIntersectionConstraint(a, b ssa.Value, op token.Token, ranges Ranges, y ssa.Value) Constraint { - return &StringIntersectionConstraint{ - aConstraint: NewConstraint(y), - ranges: ranges, - A: a, - B: b, - Op: op, - } -} -func NewStringConcatConstraint(a, b, y ssa.Value) Constraint { - return &StringConcatConstraint{NewConstraint(y), a, b} -} -func NewStringLengthConstraint(x ssa.Value, y ssa.Value) Constraint { - return &StringLengthConstraint{NewConstraint(y), x} -} -func NewStringIntervalConstraint(i IntInterval, y ssa.Value) Constraint { - return &StringIntervalConstraint{NewConstraint(y), i} -} - -func (c *StringSliceConstraint) Operands() []ssa.Value { - vs := []ssa.Value{c.X} - if c.Lower != nil { - vs = append(vs, c.Lower) - } - if c.Upper != nil { - vs = append(vs, c.Upper) - } - return vs -} -func (c *StringIntersectionConstraint) Operands() []ssa.Value { return []ssa.Value{c.A} } -func (c StringConcatConstraint) Operands() []ssa.Value { return []ssa.Value{c.A, c.B} } -func (c *StringLengthConstraint) Operands() []ssa.Value { return []ssa.Value{c.X} } -func (s *StringIntervalConstraint) Operands() []ssa.Value { return nil } - -func (c *StringSliceConstraint) String() string { - var lname, uname string - if c.Lower != nil { - lname = c.Lower.Name() - } - if c.Upper != nil { - uname = c.Upper.Name() - } - return fmt.Sprintf("%s[%s:%s]", c.X.Name(), lname, uname) -} -func (c *StringIntersectionConstraint) String() string { - return fmt.Sprintf("%s = %s %s %s (%t branch)", c.Y().Name(), c.A.Name(), c.Op, c.B.Name(), c.Y().(*ssa.Sigma).Branch) -} -func (c StringConcatConstraint) String() string { - return fmt.Sprintf("%s = %s + %s", c.Y().Name(), c.A.Name(), c.B.Name()) -} -func (c *StringLengthConstraint) String() string { - return fmt.Sprintf("%s = len(%s)", c.Y().Name(), c.X.Name()) -} -func (c *StringIntervalConstraint) String() string { return fmt.Sprintf("%s = %s", c.Y().Name(), c.I) } - -func (c *StringSliceConstraint) Eval(g *Graph) Range { - lr := NewIntInterval(NewZ(0), NewZ(0)) - if c.Lower != nil { - lr = g.Range(c.Lower).(IntInterval) - } - ur := g.Range(c.X).(StringInterval).Length - if c.Upper != nil { - ur = g.Range(c.Upper).(IntInterval) - } - if !lr.IsKnown() || !ur.IsKnown() { - return StringInterval{} - } - - ls := []Z{ - ur.Lower.Sub(lr.Lower), - ur.Upper.Sub(lr.Lower), - ur.Lower.Sub(lr.Upper), - ur.Upper.Sub(lr.Upper), - } - // TODO(dh): if we don't truncate lengths to 0 we might be able to - // easily detect slices with high < low. we'd need to treat -∞ - // specially, though. - for i, l := range ls { - if l.Sign() == -1 { - ls[i] = NewZ(0) - } - } - - return StringInterval{ - Length: NewIntInterval(MinZ(ls...), MaxZ(ls...)), - } -} -func (c *StringIntersectionConstraint) Eval(g *Graph) Range { - var l IntInterval - switch r := g.Range(c.A).(type) { - case StringInterval: - l = r.Length - case IntInterval: - l = r - } - - if !l.IsKnown() { - return StringInterval{c.I} - } - return StringInterval{ - Length: l.Intersection(c.I), - } -} -func (c StringConcatConstraint) Eval(g *Graph) Range { - i1, i2 := g.Range(c.A).(StringInterval), g.Range(c.B).(StringInterval) - if !i1.Length.IsKnown() || !i2.Length.IsKnown() { - return StringInterval{} - } - return StringInterval{ - Length: i1.Length.Add(i2.Length), - } -} -func (c *StringLengthConstraint) Eval(g *Graph) Range { - i := g.Range(c.X).(StringInterval).Length - if !i.IsKnown() { - return NewIntInterval(NewZ(0), PInfinity) - } - return i -} -func (c *StringIntervalConstraint) Eval(*Graph) Range { return StringInterval{c.I} } - -func (c *StringIntersectionConstraint) Futures() []ssa.Value { - return []ssa.Value{c.B} -} - -func (c *StringIntersectionConstraint) Resolve() { - if (c.A.Type().Underlying().(*types.Basic).Info() & types.IsString) != 0 { - // comparing two strings - r, ok := c.ranges[c.B].(StringInterval) - if !ok { - c.I = NewIntInterval(NewZ(0), PInfinity) - return - } - switch c.Op { - case token.EQL: - c.I = r.Length - case token.GTR, token.GEQ: - c.I = NewIntInterval(r.Length.Lower, PInfinity) - case token.LSS, token.LEQ: - c.I = NewIntInterval(NewZ(0), r.Length.Upper) - case token.NEQ: - default: - panic("unsupported op " + c.Op.String()) - } - } else { - r, ok := c.ranges[c.B].(IntInterval) - if !ok { - c.I = NewIntInterval(NewZ(0), PInfinity) - return - } - // comparing two lengths - switch c.Op { - case token.EQL: - c.I = r - case token.GTR: - c.I = NewIntInterval(r.Lower.Add(NewZ(1)), PInfinity) - case token.GEQ: - c.I = NewIntInterval(r.Lower, PInfinity) - case token.LSS: - c.I = NewIntInterval(NInfinity, r.Upper.Sub(NewZ(1))) - case token.LEQ: - c.I = NewIntInterval(NInfinity, r.Upper) - case token.NEQ: - default: - panic("unsupported op " + c.Op.String()) - } - } -} - -func (c *StringIntersectionConstraint) IsKnown() bool { - return c.I.IsKnown() -} - -func (c *StringIntersectionConstraint) MarkUnresolved() { - c.resolved = false -} - -func (c *StringIntersectionConstraint) MarkResolved() { - c.resolved = true -} - -func (c *StringIntersectionConstraint) IsResolved() bool { - return c.resolved -} diff --git a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go b/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go deleted file mode 100644 index cb17f04..0000000 --- a/vendor/honnef.co/go/tools/staticcheck/vrp/vrp.go +++ /dev/null @@ -1,1049 +0,0 @@ -package vrp - -// TODO(dh) widening and narrowing have a lot of code in common. Make -// it reusable. - -import ( - "fmt" - "go/constant" - "go/token" - "go/types" - "math/big" - "sort" - "strings" - - "honnef.co/go/tools/ssa" -) - -type Future interface { - Constraint - Futures() []ssa.Value - Resolve() - IsKnown() bool - MarkUnresolved() - MarkResolved() - IsResolved() bool -} - -type Range interface { - Union(other Range) Range - IsKnown() bool -} - -type Constraint interface { - Y() ssa.Value - isConstraint() - String() string - Eval(*Graph) Range - Operands() []ssa.Value -} - -type aConstraint struct { - y ssa.Value -} - -func NewConstraint(y ssa.Value) aConstraint { - return aConstraint{y} -} - -func (aConstraint) isConstraint() {} -func (c aConstraint) Y() ssa.Value { return c.y } - -type PhiConstraint struct { - aConstraint - Vars []ssa.Value -} - -func NewPhiConstraint(vars []ssa.Value, y ssa.Value) Constraint { - uniqm := map[ssa.Value]struct{}{} - for _, v := range vars { - uniqm[v] = struct{}{} - } - var uniq []ssa.Value - for v := range uniqm { - uniq = append(uniq, v) - } - return &PhiConstraint{ - aConstraint: NewConstraint(y), - Vars: uniq, - } -} - -func (c *PhiConstraint) Operands() []ssa.Value { - return c.Vars -} - -func (c *PhiConstraint) Eval(g *Graph) Range { - i := Range(nil) - for _, v := range c.Vars { - i = g.Range(v).Union(i) - } - return i -} - -func (c *PhiConstraint) String() string { - names := make([]string, len(c.Vars)) - for i, v := range c.Vars { - names[i] = v.Name() - } - return fmt.Sprintf("%s = φ(%s)", c.Y().Name(), strings.Join(names, ", ")) -} - -func isSupportedType(typ types.Type) bool { - switch typ := typ.Underlying().(type) { - case *types.Basic: - switch typ.Kind() { - case types.String, types.UntypedString: - return true - default: - if (typ.Info() & types.IsInteger) == 0 { - return false - } - } - case *types.Chan: - return true - case *types.Slice: - return true - default: - return false - } - return true -} - -func ConstantToZ(c constant.Value) Z { - s := constant.ToInt(c).ExactString() - n := &big.Int{} - n.SetString(s, 10) - return NewBigZ(n) -} - -func sigmaInteger(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { - op := cond.Op - if !ins.Branch { - op = (invertToken(op)) - } - - switch op { - case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ: - default: - return nil - } - var a, b ssa.Value - if (*ops[0]) == ins.X { - a = *ops[0] - b = *ops[1] - } else { - a = *ops[1] - b = *ops[0] - op = flipToken(op) - } - return NewIntIntersectionConstraint(a, b, op, g.ranges, ins) -} - -func sigmaString(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { - op := cond.Op - if !ins.Branch { - op = (invertToken(op)) - } - - switch op { - case token.EQL, token.GTR, token.GEQ, token.LSS, token.LEQ: - default: - return nil - } - - if ((*ops[0]).Type().Underlying().(*types.Basic).Info() & types.IsString) == 0 { - var a, b ssa.Value - call, ok := (*ops[0]).(*ssa.Call) - if ok && call.Common().Args[0] == ins.X { - a = *ops[0] - b = *ops[1] - } else { - a = *ops[1] - b = *ops[0] - op = flipToken(op) - } - return NewStringIntersectionConstraint(a, b, op, g.ranges, ins) - } - var a, b ssa.Value - if (*ops[0]) == ins.X { - a = *ops[0] - b = *ops[1] - } else { - a = *ops[1] - b = *ops[0] - op = flipToken(op) - } - return NewStringIntersectionConstraint(a, b, op, g.ranges, ins) -} - -func sigmaSlice(g *Graph, ins *ssa.Sigma, cond *ssa.BinOp, ops []*ssa.Value) Constraint { - // TODO(dh) sigmaSlice and sigmaString are a lot alike. Can they - // be merged? - // - // XXX support futures - - op := cond.Op - if !ins.Branch { - op = (invertToken(op)) - } - - k, ok := (*ops[1]).(*ssa.Const) - // XXX investigate in what cases this wouldn't be a Const - // - // XXX what if left and right are swapped? - if !ok { - return nil - } - - call, ok := (*ops[0]).(*ssa.Call) - if !ok { - return nil - } - builtin, ok := call.Common().Value.(*ssa.Builtin) - if !ok { - return nil - } - if builtin.Name() != "len" { - return nil - } - callops := call.Operands(nil) - - v := ConstantToZ(k.Value) - c := NewSliceIntersectionConstraint(*callops[1], IntInterval{}, ins).(*SliceIntersectionConstraint) - switch op { - case token.EQL: - c.I = NewIntInterval(v, v) - case token.GTR, token.GEQ: - off := int64(0) - if cond.Op == token.GTR { - off = 1 - } - c.I = NewIntInterval( - v.Add(NewZ(off)), - PInfinity, - ) - case token.LSS, token.LEQ: - off := int64(0) - if cond.Op == token.LSS { - off = -1 - } - c.I = NewIntInterval( - NInfinity, - v.Add(NewZ(off)), - ) - default: - return nil - } - return c -} - -func BuildGraph(f *ssa.Function) *Graph { - g := &Graph{ - Vertices: map[interface{}]*Vertex{}, - ranges: Ranges{}, - } - - var cs []Constraint - - ops := make([]*ssa.Value, 16) - seen := map[ssa.Value]bool{} - for _, block := range f.Blocks { - for _, ins := range block.Instrs { - ops = ins.Operands(ops[:0]) - for _, op := range ops { - if c, ok := (*op).(*ssa.Const); ok { - if seen[c] { - continue - } - seen[c] = true - if c.Value == nil { - switch c.Type().Underlying().(type) { - case *types.Slice: - cs = append(cs, NewSliceIntervalConstraint(NewIntInterval(NewZ(0), NewZ(0)), c)) - } - continue - } - switch c.Value.Kind() { - case constant.Int: - v := ConstantToZ(c.Value) - cs = append(cs, NewIntIntervalConstraint(NewIntInterval(v, v), c)) - case constant.String: - s := constant.StringVal(c.Value) - n := NewZ(int64(len(s))) - cs = append(cs, NewStringIntervalConstraint(NewIntInterval(n, n), c)) - } - } - } - } - } - for _, block := range f.Blocks { - for _, ins := range block.Instrs { - switch ins := ins.(type) { - case *ssa.Convert: - switch v := ins.Type().Underlying().(type) { - case *types.Basic: - if (v.Info() & types.IsInteger) == 0 { - continue - } - cs = append(cs, NewIntConversionConstraint(ins.X, ins)) - } - case *ssa.Call: - if static := ins.Common().StaticCallee(); static != nil { - if fn, ok := static.Object().(*types.Func); ok { - switch fn.FullName() { - case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte", - "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex", - "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc", - "strings.Index", "strings.IndexAny", "strings.IndexByte", - "strings.IndexFunc", "strings.IndexRune", "strings.LastIndex", - "strings.LastIndexAny", "strings.LastIndexByte", "strings.LastIndexFunc": - // TODO(dh): instead of limiting by +∞, - // limit by the upper bound of the passed - // string - cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), PInfinity), ins)) - case "bytes.Title", "bytes.ToLower", "bytes.ToTitle", "bytes.ToUpper", - "strings.Title", "strings.ToLower", "strings.ToTitle", "strings.ToUpper": - cs = append(cs, NewCopyConstraint(ins.Common().Args[0], ins)) - case "bytes.ToLowerSpecial", "bytes.ToTitleSpecial", "bytes.ToUpperSpecial", - "strings.ToLowerSpecial", "strings.ToTitleSpecial", "strings.ToUpperSpecial": - cs = append(cs, NewCopyConstraint(ins.Common().Args[1], ins)) - case "bytes.Compare", "strings.Compare": - cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(-1), NewZ(1)), ins)) - case "bytes.Count", "strings.Count": - // TODO(dh): instead of limiting by +∞, - // limit by the upper bound of the passed - // string. - cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins)) - case "bytes.Map", "bytes.TrimFunc", "bytes.TrimLeft", "bytes.TrimLeftFunc", - "bytes.TrimRight", "bytes.TrimRightFunc", "bytes.TrimSpace", - "strings.Map", "strings.TrimFunc", "strings.TrimLeft", "strings.TrimLeftFunc", - "strings.TrimRight", "strings.TrimRightFunc", "strings.TrimSpace": - // TODO(dh): lower = 0, upper = upper of passed string - case "bytes.TrimPrefix", "bytes.TrimSuffix", - "strings.TrimPrefix", "strings.TrimSuffix": - // TODO(dh) range between "unmodified" and len(cutset) removed - case "(*bytes.Buffer).Cap", "(*bytes.Buffer).Len", "(*bytes.Reader).Len", "(*bytes.Reader).Size": - cs = append(cs, NewIntIntervalConstraint(NewIntInterval(NewZ(0), PInfinity), ins)) - } - } - } - builtin, ok := ins.Common().Value.(*ssa.Builtin) - ops := ins.Operands(nil) - if !ok { - continue - } - switch builtin.Name() { - case "len": - switch op1 := (*ops[1]).Type().Underlying().(type) { - case *types.Basic: - if op1.Kind() == types.String || op1.Kind() == types.UntypedString { - cs = append(cs, NewStringLengthConstraint(*ops[1], ins)) - } - case *types.Slice: - cs = append(cs, NewSliceLengthConstraint(*ops[1], ins)) - } - - case "append": - cs = append(cs, NewSliceAppendConstraint(ins.Common().Args[0], ins.Common().Args[1], ins)) - } - case *ssa.BinOp: - ops := ins.Operands(nil) - basic, ok := (*ops[0]).Type().Underlying().(*types.Basic) - if !ok { - continue - } - switch basic.Kind() { - case types.Int, types.Int8, types.Int16, types.Int32, types.Int64, - types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt: - fns := map[token.Token]func(ssa.Value, ssa.Value, ssa.Value) Constraint{ - token.ADD: NewIntAddConstraint, - token.SUB: NewIntSubConstraint, - token.MUL: NewIntMulConstraint, - // XXX support QUO, REM, SHL, SHR - } - fn, ok := fns[ins.Op] - if ok { - cs = append(cs, fn(*ops[0], *ops[1], ins)) - } - case types.String, types.UntypedString: - if ins.Op == token.ADD { - cs = append(cs, NewStringConcatConstraint(*ops[0], *ops[1], ins)) - } - } - case *ssa.Slice: - typ := ins.X.Type().Underlying() - switch typ := typ.(type) { - case *types.Basic: - cs = append(cs, NewStringSliceConstraint(ins.X, ins.Low, ins.High, ins)) - case *types.Slice: - cs = append(cs, NewSliceSliceConstraint(ins.X, ins.Low, ins.High, ins)) - case *types.Array: - cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins)) - case *types.Pointer: - if _, ok := typ.Elem().(*types.Array); !ok { - continue - } - cs = append(cs, NewArraySliceConstraint(ins.X, ins.Low, ins.High, ins)) - } - case *ssa.Phi: - if !isSupportedType(ins.Type()) { - continue - } - ops := ins.Operands(nil) - dops := make([]ssa.Value, len(ops)) - for i, op := range ops { - dops[i] = *op - } - cs = append(cs, NewPhiConstraint(dops, ins)) - case *ssa.Sigma: - pred := ins.Block().Preds[0] - instrs := pred.Instrs - cond, ok := instrs[len(instrs)-1].(*ssa.If).Cond.(*ssa.BinOp) - ops := cond.Operands(nil) - if !ok { - continue - } - switch typ := ins.Type().Underlying().(type) { - case *types.Basic: - var c Constraint - switch typ.Kind() { - case types.Int, types.Int8, types.Int16, types.Int32, types.Int64, - types.Uint, types.Uint8, types.Uint16, types.Uint32, types.Uint64, types.UntypedInt: - c = sigmaInteger(g, ins, cond, ops) - case types.String, types.UntypedString: - c = sigmaString(g, ins, cond, ops) - } - if c != nil { - cs = append(cs, c) - } - case *types.Slice: - c := sigmaSlice(g, ins, cond, ops) - if c != nil { - cs = append(cs, c) - } - default: - //log.Printf("unsupported sigma type %T", typ) // XXX - } - case *ssa.MakeChan: - cs = append(cs, NewMakeChannelConstraint(ins.Size, ins)) - case *ssa.MakeSlice: - cs = append(cs, NewMakeSliceConstraint(ins.Len, ins)) - case *ssa.ChangeType: - switch ins.X.Type().Underlying().(type) { - case *types.Chan: - cs = append(cs, NewChannelChangeTypeConstraint(ins.X, ins)) - } - } - } - } - - for _, c := range cs { - if c == nil { - panic("nil constraint") - } - // If V is used in constraint C, then we create an edge V->C - for _, op := range c.Operands() { - g.AddEdge(op, c, false) - } - if c, ok := c.(Future); ok { - for _, op := range c.Futures() { - g.AddEdge(op, c, true) - } - } - // If constraint C defines variable V, then we create an edge - // C->V - g.AddEdge(c, c.Y(), false) - } - - g.FindSCCs() - g.sccEdges = make([][]Edge, len(g.SCCs)) - g.futures = make([][]Future, len(g.SCCs)) - for _, e := range g.Edges { - g.sccEdges[e.From.SCC] = append(g.sccEdges[e.From.SCC], e) - if !e.control { - continue - } - if c, ok := e.To.Value.(Future); ok { - g.futures[e.From.SCC] = append(g.futures[e.From.SCC], c) - } - } - return g -} - -func (g *Graph) Solve() Ranges { - var consts []Z - off := NewZ(1) - for _, n := range g.Vertices { - if c, ok := n.Value.(*ssa.Const); ok { - basic, ok := c.Type().Underlying().(*types.Basic) - if !ok { - continue - } - if (basic.Info() & types.IsInteger) != 0 { - z := ConstantToZ(c.Value) - consts = append(consts, z) - consts = append(consts, z.Add(off)) - consts = append(consts, z.Sub(off)) - } - } - - } - sort.Sort(Zs(consts)) - - for scc, vertices := range g.SCCs { - n := 0 - n = len(vertices) - if n == 1 { - g.resolveFutures(scc) - v := vertices[0] - if v, ok := v.Value.(ssa.Value); ok { - switch typ := v.Type().Underlying().(type) { - case *types.Basic: - switch typ.Kind() { - case types.String, types.UntypedString: - if !g.Range(v).(StringInterval).IsKnown() { - g.SetRange(v, StringInterval{NewIntInterval(NewZ(0), PInfinity)}) - } - default: - if !g.Range(v).(IntInterval).IsKnown() { - g.SetRange(v, InfinityFor(v)) - } - } - case *types.Chan: - if !g.Range(v).(ChannelInterval).IsKnown() { - g.SetRange(v, ChannelInterval{NewIntInterval(NewZ(0), PInfinity)}) - } - case *types.Slice: - if !g.Range(v).(SliceInterval).IsKnown() { - g.SetRange(v, SliceInterval{NewIntInterval(NewZ(0), PInfinity)}) - } - } - } - if c, ok := v.Value.(Constraint); ok { - g.SetRange(c.Y(), c.Eval(g)) - } - } else { - uses := g.uses(scc) - entries := g.entries(scc) - for len(entries) > 0 { - v := entries[len(entries)-1] - entries = entries[:len(entries)-1] - for _, use := range uses[v] { - if g.widen(use, consts) { - entries = append(entries, use.Y()) - } - } - } - - g.resolveFutures(scc) - - // XXX this seems to be necessary, but shouldn't be. - // removing it leads to nil pointer derefs; investigate - // where we're not setting values correctly. - for _, n := range vertices { - if v, ok := n.Value.(ssa.Value); ok { - i, ok := g.Range(v).(IntInterval) - if !ok { - continue - } - if !i.IsKnown() { - g.SetRange(v, InfinityFor(v)) - } - } - } - - actives := g.actives(scc) - for len(actives) > 0 { - v := actives[len(actives)-1] - actives = actives[:len(actives)-1] - for _, use := range uses[v] { - if g.narrow(use) { - actives = append(actives, use.Y()) - } - } - } - } - // propagate scc - for _, edge := range g.sccEdges[scc] { - if edge.control { - continue - } - if edge.From.SCC == edge.To.SCC { - continue - } - if c, ok := edge.To.Value.(Constraint); ok { - g.SetRange(c.Y(), c.Eval(g)) - } - if c, ok := edge.To.Value.(Future); ok { - if !c.IsKnown() { - c.MarkUnresolved() - } - } - } - } - - for v, r := range g.ranges { - i, ok := r.(IntInterval) - if !ok { - continue - } - if (v.Type().Underlying().(*types.Basic).Info() & types.IsUnsigned) == 0 { - if i.Upper != PInfinity { - s := &types.StdSizes{ - // XXX is it okay to assume the largest word size, or do we - // need to be platform specific? - WordSize: 8, - MaxAlign: 1, - } - bits := (s.Sizeof(v.Type()) * 8) - 1 - n := big.NewInt(1) - n = n.Lsh(n, uint(bits)) - upper, lower := &big.Int{}, &big.Int{} - upper.Sub(n, big.NewInt(1)) - lower.Neg(n) - - if i.Upper.Cmp(NewBigZ(upper)) == 1 { - i = NewIntInterval(NInfinity, PInfinity) - } else if i.Lower.Cmp(NewBigZ(lower)) == -1 { - i = NewIntInterval(NInfinity, PInfinity) - } - } - } - - g.ranges[v] = i - } - - return g.ranges -} - -func VertexString(v *Vertex) string { - switch v := v.Value.(type) { - case Constraint: - return v.String() - case ssa.Value: - return v.Name() - case nil: - return "BUG: nil vertex value" - default: - panic(fmt.Sprintf("unexpected type %T", v)) - } -} - -type Vertex struct { - Value interface{} // one of Constraint or ssa.Value - SCC int - index int - lowlink int - stack bool - - Succs []Edge -} - -type Ranges map[ssa.Value]Range - -func (r Ranges) Get(x ssa.Value) Range { - if x == nil { - return nil - } - i, ok := r[x] - if !ok { - switch x := x.Type().Underlying().(type) { - case *types.Basic: - switch x.Kind() { - case types.String, types.UntypedString: - return StringInterval{} - default: - return IntInterval{} - } - case *types.Chan: - return ChannelInterval{} - case *types.Slice: - return SliceInterval{} - } - } - return i -} - -type Graph struct { - Vertices map[interface{}]*Vertex - Edges []Edge - SCCs [][]*Vertex - ranges Ranges - - // map SCCs to futures - futures [][]Future - // map SCCs to edges - sccEdges [][]Edge -} - -func (g Graph) Graphviz() string { - var lines []string - lines = append(lines, "digraph{") - ids := map[interface{}]int{} - i := 1 - for _, v := range g.Vertices { - ids[v] = i - shape := "box" - if _, ok := v.Value.(ssa.Value); ok { - shape = "oval" - } - lines = append(lines, fmt.Sprintf(`n%d [shape="%s", label=%q, colorscheme=spectral11, style="filled", fillcolor="%d"]`, - i, shape, VertexString(v), (v.SCC%11)+1)) - i++ - } - for _, e := range g.Edges { - style := "solid" - if e.control { - style = "dashed" - } - lines = append(lines, fmt.Sprintf(`n%d -> n%d [style="%s"]`, ids[e.From], ids[e.To], style)) - } - lines = append(lines, "}") - return strings.Join(lines, "\n") -} - -func (g *Graph) SetRange(x ssa.Value, r Range) { - g.ranges[x] = r -} - -func (g *Graph) Range(x ssa.Value) Range { - return g.ranges.Get(x) -} - -func (g *Graph) widen(c Constraint, consts []Z) bool { - setRange := func(i Range) { - g.SetRange(c.Y(), i) - } - widenIntInterval := func(oi, ni IntInterval) (IntInterval, bool) { - if !ni.IsKnown() { - return oi, false - } - nlc := NInfinity - nuc := PInfinity - for _, co := range consts { - if co.Cmp(ni.Lower) <= 0 { - nlc = co - break - } - } - for _, co := range consts { - if co.Cmp(ni.Upper) >= 0 { - nuc = co - break - } - } - - if !oi.IsKnown() { - return ni, true - } - if ni.Lower.Cmp(oi.Lower) == -1 && ni.Upper.Cmp(oi.Upper) == 1 { - return NewIntInterval(nlc, nuc), true - } - if ni.Lower.Cmp(oi.Lower) == -1 { - return NewIntInterval(nlc, oi.Upper), true - } - if ni.Upper.Cmp(oi.Upper) == 1 { - return NewIntInterval(oi.Lower, nuc), true - } - return oi, false - } - switch oi := g.Range(c.Y()).(type) { - case IntInterval: - ni := c.Eval(g).(IntInterval) - si, changed := widenIntInterval(oi, ni) - if changed { - setRange(si) - return true - } - return false - case StringInterval: - ni := c.Eval(g).(StringInterval) - si, changed := widenIntInterval(oi.Length, ni.Length) - if changed { - setRange(StringInterval{si}) - return true - } - return false - case SliceInterval: - ni := c.Eval(g).(SliceInterval) - si, changed := widenIntInterval(oi.Length, ni.Length) - if changed { - setRange(SliceInterval{si}) - return true - } - return false - default: - return false - } -} - -func (g *Graph) narrow(c Constraint) bool { - narrowIntInterval := func(oi, ni IntInterval) (IntInterval, bool) { - oLower := oi.Lower - oUpper := oi.Upper - nLower := ni.Lower - nUpper := ni.Upper - - if oLower == NInfinity && nLower != NInfinity { - return NewIntInterval(nLower, oUpper), true - } - if oUpper == PInfinity && nUpper != PInfinity { - return NewIntInterval(oLower, nUpper), true - } - if oLower.Cmp(nLower) == 1 { - return NewIntInterval(nLower, oUpper), true - } - if oUpper.Cmp(nUpper) == -1 { - return NewIntInterval(oLower, nUpper), true - } - return oi, false - } - switch oi := g.Range(c.Y()).(type) { - case IntInterval: - ni := c.Eval(g).(IntInterval) - si, changed := narrowIntInterval(oi, ni) - if changed { - g.SetRange(c.Y(), si) - return true - } - return false - case StringInterval: - ni := c.Eval(g).(StringInterval) - si, changed := narrowIntInterval(oi.Length, ni.Length) - if changed { - g.SetRange(c.Y(), StringInterval{si}) - return true - } - return false - case SliceInterval: - ni := c.Eval(g).(SliceInterval) - si, changed := narrowIntInterval(oi.Length, ni.Length) - if changed { - g.SetRange(c.Y(), SliceInterval{si}) - return true - } - return false - default: - return false - } -} - -func (g *Graph) resolveFutures(scc int) { - for _, c := range g.futures[scc] { - c.Resolve() - } -} - -func (g *Graph) entries(scc int) []ssa.Value { - var entries []ssa.Value - for _, n := range g.Vertices { - if n.SCC != scc { - continue - } - if v, ok := n.Value.(ssa.Value); ok { - // XXX avoid quadratic runtime - // - // XXX I cannot think of any code where the future and its - // variables aren't in the same SCC, in which case this - // code isn't very useful (the variables won't be resolved - // yet). Before we have a cross-SCC example, however, we - // can't really verify that this code is working - // correctly, or indeed doing anything useful. - for _, on := range g.Vertices { - if c, ok := on.Value.(Future); ok { - if c.Y() == v { - if !c.IsResolved() { - g.SetRange(c.Y(), c.Eval(g)) - c.MarkResolved() - } - break - } - } - } - if g.Range(v).IsKnown() { - entries = append(entries, v) - } - } - } - return entries -} - -func (g *Graph) uses(scc int) map[ssa.Value][]Constraint { - m := map[ssa.Value][]Constraint{} - for _, e := range g.sccEdges[scc] { - if e.control { - continue - } - if v, ok := e.From.Value.(ssa.Value); ok { - c := e.To.Value.(Constraint) - sink := c.Y() - if g.Vertices[sink].SCC == scc { - m[v] = append(m[v], c) - } - } - } - return m -} - -func (g *Graph) actives(scc int) []ssa.Value { - var actives []ssa.Value - for _, n := range g.Vertices { - if n.SCC != scc { - continue - } - if v, ok := n.Value.(ssa.Value); ok { - if _, ok := v.(*ssa.Const); !ok { - actives = append(actives, v) - } - } - } - return actives -} - -func (g *Graph) AddEdge(from, to interface{}, ctrl bool) { - vf, ok := g.Vertices[from] - if !ok { - vf = &Vertex{Value: from} - g.Vertices[from] = vf - } - vt, ok := g.Vertices[to] - if !ok { - vt = &Vertex{Value: to} - g.Vertices[to] = vt - } - e := Edge{From: vf, To: vt, control: ctrl} - g.Edges = append(g.Edges, e) - vf.Succs = append(vf.Succs, e) -} - -type Edge struct { - From, To *Vertex - control bool -} - -func (e Edge) String() string { - return fmt.Sprintf("%s -> %s", VertexString(e.From), VertexString(e.To)) -} - -func (g *Graph) FindSCCs() { - // use Tarjan to find the SCCs - - index := 1 - var s []*Vertex - - scc := 0 - var strongconnect func(v *Vertex) - strongconnect = func(v *Vertex) { - // set the depth index for v to the smallest unused index - v.index = index - v.lowlink = index - index++ - s = append(s, v) - v.stack = true - - for _, e := range v.Succs { - w := e.To - if w.index == 0 { - // successor w has not yet been visited; recurse on it - strongconnect(w) - if w.lowlink < v.lowlink { - v.lowlink = w.lowlink - } - } else if w.stack { - // successor w is in stack s and hence in the current scc - if w.index < v.lowlink { - v.lowlink = w.index - } - } - } - - if v.lowlink == v.index { - for { - w := s[len(s)-1] - s = s[:len(s)-1] - w.stack = false - w.SCC = scc - if w == v { - break - } - } - scc++ - } - } - for _, v := range g.Vertices { - if v.index == 0 { - strongconnect(v) - } - } - - g.SCCs = make([][]*Vertex, scc) - for _, n := range g.Vertices { - n.SCC = scc - n.SCC - 1 - g.SCCs[n.SCC] = append(g.SCCs[n.SCC], n) - } -} - -func invertToken(tok token.Token) token.Token { - switch tok { - case token.LSS: - return token.GEQ - case token.GTR: - return token.LEQ - case token.EQL: - return token.NEQ - case token.NEQ: - return token.EQL - case token.GEQ: - return token.LSS - case token.LEQ: - return token.GTR - default: - panic(fmt.Sprintf("unsupported token %s", tok)) - } -} - -func flipToken(tok token.Token) token.Token { - switch tok { - case token.LSS: - return token.GTR - case token.GTR: - return token.LSS - case token.EQL: - return token.EQL - case token.NEQ: - return token.NEQ - case token.GEQ: - return token.LEQ - case token.LEQ: - return token.GEQ - default: - panic(fmt.Sprintf("unsupported token %s", tok)) - } -} - -type CopyConstraint struct { - aConstraint - X ssa.Value -} - -func (c *CopyConstraint) String() string { - return fmt.Sprintf("%s = copy(%s)", c.Y().Name(), c.X.Name()) -} - -func (c *CopyConstraint) Eval(g *Graph) Range { - return g.Range(c.X) -} - -func (c *CopyConstraint) Operands() []ssa.Value { - return []ssa.Value{c.X} -} - -func NewCopyConstraint(x, y ssa.Value) Constraint { - return &CopyConstraint{ - aConstraint: aConstraint{ - y: y, - }, - X: x, - } -} diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go deleted file mode 100644 index 21889e8..0000000 --- a/vendor/honnef.co/go/tools/unused/unused.go +++ /dev/null @@ -1,1064 +0,0 @@ -package unused // import "honnef.co/go/tools/unused" - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "path/filepath" - "strings" - - "honnef.co/go/tools/lint" - - "golang.org/x/tools/go/loader" - "golang.org/x/tools/go/types/typeutil" -) - -func NewLintChecker(c *Checker) *LintChecker { - l := &LintChecker{ - c: c, - } - return l -} - -type LintChecker struct { - c *Checker -} - -func (*LintChecker) Name() string { return "unused" } -func (*LintChecker) Prefix() string { return "U" } - -func (l *LintChecker) Init(*lint.Program) {} -func (l *LintChecker) Funcs() map[string]lint.Func { - return map[string]lint.Func{ - "U1000": l.Lint, - } -} - -func typString(obj types.Object) string { - switch obj := obj.(type) { - case *types.Func: - return "func" - case *types.Var: - if obj.IsField() { - return "field" - } - return "var" - case *types.Const: - return "const" - case *types.TypeName: - return "type" - default: - // log.Printf("%T", obj) - return "identifier" - } -} - -func (l *LintChecker) Lint(j *lint.Job) { - unused := l.c.Check(j.Program.Prog) - for _, u := range unused { - name := u.Obj.Name() - if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { - switch sig.Recv().Type().(type) { - case *types.Named, *types.Pointer: - typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) - if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) - } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) - } - } - } - j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) - } -} - -type graph struct { - roots []*graphNode - nodes map[interface{}]*graphNode -} - -func (g *graph) markUsedBy(obj, usedBy interface{}) { - objNode := g.getNode(obj) - usedByNode := g.getNode(usedBy) - if objNode.obj == usedByNode.obj { - return - } - usedByNode.uses[objNode] = struct{}{} -} - -var labelCounter = 1 - -func (g *graph) getNode(obj interface{}) *graphNode { - for { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } else { - break - } - } - _, ok := g.nodes[obj] - if !ok { - g.addObj(obj) - } - - return g.nodes[obj] -} - -func (g *graph) addObj(obj interface{}) { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } - node := &graphNode{obj: obj, uses: make(map[*graphNode]struct{}), n: labelCounter} - g.nodes[obj] = node - labelCounter++ - - if obj, ok := obj.(*types.Struct); ok { - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - g.markUsedBy(obj, field) - } - } -} - -type graphNode struct { - obj interface{} - uses map[*graphNode]struct{} - used bool - quiet bool - n int -} - -type CheckMode int - -const ( - CheckConstants CheckMode = 1 << iota - CheckFields - CheckFunctions - CheckTypes - CheckVariables - - CheckAll = CheckConstants | CheckFields | CheckFunctions | CheckTypes | CheckVariables -) - -type Unused struct { - Obj types.Object - Position token.Position -} - -type Checker struct { - Mode CheckMode - WholeProgram bool - ConsiderReflection bool - Debug io.Writer - - graph *graph - - msCache typeutil.MethodSetCache - lprog *loader.Program - topmostCache map[*types.Scope]*types.Scope - interfaces []*types.Interface -} - -func NewChecker(mode CheckMode) *Checker { - return &Checker{ - Mode: mode, - graph: &graph{ - nodes: make(map[interface{}]*graphNode), - }, - topmostCache: make(map[*types.Scope]*types.Scope), - } -} - -func (c *Checker) checkConstants() bool { return (c.Mode & CheckConstants) > 0 } -func (c *Checker) checkFields() bool { return (c.Mode & CheckFields) > 0 } -func (c *Checker) checkFunctions() bool { return (c.Mode & CheckFunctions) > 0 } -func (c *Checker) checkTypes() bool { return (c.Mode & CheckTypes) > 0 } -func (c *Checker) checkVariables() bool { return (c.Mode & CheckVariables) > 0 } - -func (c *Checker) markFields(typ types.Type) { - structType, ok := typ.Underlying().(*types.Struct) - if !ok { - return - } - n := structType.NumFields() - for i := 0; i < n; i++ { - field := structType.Field(i) - c.graph.markUsedBy(field, typ) - } -} - -type Error struct { - Errors map[string][]error -} - -func (e Error) Error() string { - return fmt.Sprintf("errors in %d packages", len(e.Errors)) -} - -func (c *Checker) Check(lprog *loader.Program) []Unused { - var unused []Unused - c.lprog = lprog - if c.WholeProgram { - c.findExportedInterfaces() - } - for _, pkg := range c.lprog.InitialPackages() { - c.processDefs(pkg) - c.processUses(pkg) - c.processTypes(pkg) - c.processSelections(pkg) - c.processAST(pkg) - } - - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - typNode, ok := c.graph.nodes[obj.Type()] - if !ok { - continue - } - node.uses[typNode] = struct{}{} - } - - roots := map[*graphNode]struct{}{} - for _, root := range c.graph.roots { - roots[root] = struct{}{} - } - markNodesUsed(roots) - c.markNodesQuiet() - - if c.Debug != nil { - c.printDebugGraph(c.Debug) - } - - for _, node := range c.graph.nodes { - if node.used || node.quiet { - continue - } - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - found := false - if !false { - for _, pkg := range c.lprog.InitialPackages() { - if pkg.Pkg == obj.Pkg() { - found = true - break - } - } - } - if !found { - continue - } - - pos := c.lprog.Fset.Position(obj.Pos()) - if pos.Filename == "" || filepath.Base(pos.Filename) == "C" { - continue - } - generated := false - for _, file := range c.lprog.Package(obj.Pkg().Path()).Files { - if c.lprog.Fset.Position(file.Pos()).Filename != pos.Filename { - continue - } - if len(file.Comments) > 0 { - generated = isGenerated(file.Comments[0].Text()) - } - break - } - if generated { - continue - } - unused = append(unused, Unused{Obj: obj, Position: pos}) - } - return unused -} - -// isNoCopyType reports whether a type represents the NoCopy sentinel -// type. The NoCopy type is a named struct with no fields and exactly -// one method `func Lock()` that is empty. -// -// FIXME(dh): currently we're not checking that the function body is -// empty. -func isNoCopyType(typ types.Type) bool { - st, ok := typ.Underlying().(*types.Struct) - if !ok { - return false - } - if st.NumFields() != 0 { - return false - } - - named, ok := typ.(*types.Named) - if !ok { - return false - } - if named.NumMethods() != 1 { - return false - } - meth := named.Method(0) - if meth.Name() != "Lock" { - return false - } - sig := meth.Type().(*types.Signature) - if sig.Params().Len() != 0 || sig.Results().Len() != 0 { - return false - } - return true -} - -func (c *Checker) useNoCopyFields(typ types.Type) { - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if isNoCopyType(field.Type()) { - c.graph.markUsedBy(field, typ) - c.graph.markUsedBy(field.Type().(*types.Named).Method(0), field.Type()) - } - } - } -} - -func (c *Checker) useExportedFields(typ types.Type) { - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if field.Exported() { - c.graph.markUsedBy(field, typ) - } - } - } -} - -func (c *Checker) useExportedMethods(typ types.Type) { - named, ok := typ.(*types.Named) - if !ok { - return - } - ms := typeutil.IntuitiveMethodSet(named, &c.msCache) - for i := 0; i < len(ms); i++ { - meth := ms[i].Obj() - if meth.Exported() { - c.graph.markUsedBy(meth, typ) - } - } - - st, ok := named.Underlying().(*types.Struct) - if !ok { - return - } - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if !field.Anonymous() { - continue - } - ms := typeutil.IntuitiveMethodSet(field.Type(), &c.msCache) - for j := 0; j < len(ms); j++ { - if ms[j].Obj().Exported() { - c.graph.markUsedBy(field, typ) - break - } - } - } -} - -func (c *Checker) processDefs(pkg *loader.PackageInfo) { - for _, obj := range pkg.Defs { - if obj == nil { - continue - } - c.graph.getNode(obj) - - if obj, ok := obj.(*types.TypeName); ok { - c.graph.markUsedBy(obj.Type().Underlying(), obj.Type()) - c.graph.markUsedBy(obj.Type(), obj) // TODO is this needed? - c.graph.markUsedBy(obj, obj.Type()) - - // We mark all exported fields as used. For normal - // operation, we have to. The user may use these fields - // without us knowing. - // - // TODO(dh): In whole-program mode, however, we mark them - // as used because of reflection (such as JSON - // marshaling). Strictly speaking, we would only need to - // mark them used if an instance of the type was - // accessible via an interface value. - if !c.WholeProgram || c.ConsiderReflection { - c.useExportedFields(obj.Type()) - } - - // TODO(dh): Traditionally we have not marked all exported - // methods as exported, even though they're strictly - // speaking accessible through reflection. We've done that - // because using methods just via reflection is rare, and - // not worth the false negatives. With the new -reflect - // flag, however, we should reconsider that choice. - if !c.WholeProgram { - c.useExportedMethods(obj.Type()) - } - } - - switch obj := obj.(type) { - case *types.Var, *types.Const, *types.Func, *types.TypeName: - if obj.Exported() { - // Exported variables and constants use their types, - // even if there's no expression using them in the - // checked program. - // - // Also operates on funcs and type names, but that's - // irrelevant/redundant. - c.graph.markUsedBy(obj.Type(), obj) - } - if obj.Name() == "_" { - node := c.graph.getNode(obj) - node.quiet = true - scope := c.topmostScope(pkg.Pkg.Scope().Innermost(obj.Pos()), pkg.Pkg) - if scope == pkg.Pkg.Scope() { - c.graph.roots = append(c.graph.roots, node) - } else { - c.graph.markUsedBy(obj, scope) - } - } else { - // Variables declared in functions are used. This is - // done so that arguments and return parameters are - // always marked as used. - if _, ok := obj.(*types.Var); ok { - if obj.Parent() != obj.Pkg().Scope() && obj.Parent() != nil { - c.graph.markUsedBy(obj, c.topmostScope(obj.Parent(), obj.Pkg())) - c.graph.markUsedBy(obj.Type(), obj) - } - } - } - } - - if fn, ok := obj.(*types.Func); ok { - // A function uses its signature - c.graph.markUsedBy(fn, fn.Type()) - - // A function uses its return types - sig := fn.Type().(*types.Signature) - res := sig.Results() - n := res.Len() - for i := 0; i < n; i++ { - c.graph.markUsedBy(res.At(i).Type(), fn) - } - } - - if obj, ok := obj.(interface { - Scope() *types.Scope - Pkg() *types.Package - }); ok { - scope := obj.Scope() - c.graph.markUsedBy(c.topmostScope(scope, obj.Pkg()), obj) - } - - if c.isRoot(obj) { - node := c.graph.getNode(obj) - c.graph.roots = append(c.graph.roots, node) - if obj, ok := obj.(*types.PkgName); ok { - scope := obj.Pkg().Scope() - c.graph.markUsedBy(scope, obj) - } - } - } -} - -func (c *Checker) processUses(pkg *loader.PackageInfo) { - for ident, usedObj := range pkg.Uses { - if _, ok := usedObj.(*types.PkgName); ok { - continue - } - pos := ident.Pos() - scope := pkg.Pkg.Scope().Innermost(pos) - scope = c.topmostScope(scope, pkg.Pkg) - if scope != pkg.Pkg.Scope() { - c.graph.markUsedBy(usedObj, scope) - } - - switch usedObj.(type) { - case *types.Var, *types.Const: - c.graph.markUsedBy(usedObj.Type(), usedObj) - } - } -} - -func (c *Checker) findExportedInterfaces() { - c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)} - var pkgs []*loader.PackageInfo - if c.WholeProgram { - for _, pkg := range c.lprog.AllPackages { - pkgs = append(pkgs, pkg) - } - } else { - pkgs = c.lprog.InitialPackages() - } - - for _, pkg := range pkgs { - for _, tv := range pkg.Types { - iface, ok := tv.Type.(*types.Interface) - if !ok { - continue - } - if iface.NumMethods() == 0 { - continue - } - c.interfaces = append(c.interfaces, iface) - } - } -} - -func (c *Checker) processTypes(pkg *loader.PackageInfo) { - named := map[*types.Named]*types.Pointer{} - var interfaces []*types.Interface - for _, tv := range pkg.Types { - if typ, ok := tv.Type.(interface { - Elem() types.Type - }); ok { - c.graph.markUsedBy(typ.Elem(), typ) - } - - switch obj := tv.Type.(type) { - case *types.Named: - named[obj] = types.NewPointer(obj) - c.graph.markUsedBy(obj, obj.Underlying()) - c.graph.markUsedBy(obj.Underlying(), obj) - case *types.Interface: - if obj.NumMethods() > 0 { - interfaces = append(interfaces, obj) - } - case *types.Struct: - c.useNoCopyFields(obj) - if pkg.Pkg.Name() != "main" && !c.WholeProgram { - c.useExportedFields(obj) - } - } - } - - // Pretend that all types are meant to implement as many - // interfaces as possible. - // - // TODO(dh): For normal operations, that's the best we can do, as - // we have no idea what external users will do with our types. In - // whole-program mode, we could be more conservative, in two ways: - // 1) Only consider interfaces if a type has been assigned to one - // 2) Use SSA and flow analysis and determine the exact set of - // interfaces that is relevant. - fn := func(iface *types.Interface) { - for obj, objPtr := range named { - if !types.Implements(obj, iface) && !types.Implements(objPtr, iface) { - continue - } - ifaceMethods := make(map[string]struct{}, iface.NumMethods()) - n := iface.NumMethods() - for i := 0; i < n; i++ { - meth := iface.Method(i) - ifaceMethods[meth.Name()] = struct{}{} - } - for _, obj := range []types.Type{obj, objPtr} { - ms := c.msCache.MethodSet(obj) - n := ms.Len() - for i := 0; i < n; i++ { - sel := ms.At(i) - meth := sel.Obj().(*types.Func) - _, found := ifaceMethods[meth.Name()] - if !found { - continue - } - c.graph.markUsedBy(meth.Type().(*types.Signature).Recv().Type(), obj) // embedded receiver - if len(sel.Index()) > 1 { - f := getField(obj, sel.Index()[0]) - c.graph.markUsedBy(f, obj) // embedded receiver - } - c.graph.markUsedBy(meth, obj) - } - } - } - } - - for _, iface := range interfaces { - fn(iface) - } - for _, iface := range c.interfaces { - fn(iface) - } -} - -func (c *Checker) processSelections(pkg *loader.PackageInfo) { - fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { - scope := pkg.Pkg.Scope().Innermost(expr.Pos()) - c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Pkg)) - c.graph.markUsedBy(sel.Obj(), expr.X) - if len(sel.Index()) > 1 { - typ := sel.Recv() - indices := sel.Index() - for _, idx := range indices[:len(indices)-offset] { - obj := getField(typ, idx) - typ = obj.Type() - c.graph.markUsedBy(obj, expr.X) - } - } - } - - for expr, sel := range pkg.Selections { - switch sel.Kind() { - case types.FieldVal: - fn(expr, sel, 0) - case types.MethodVal: - fn(expr, sel, 1) - } - } -} - -func dereferenceType(typ types.Type) types.Type { - if typ, ok := typ.(*types.Pointer); ok { - return typ.Elem() - } - return typ -} - -// processConversion marks fields as used if they're part of a type conversion. -func (c *Checker) processConversion(pkg *loader.PackageInfo, node ast.Node) { - if node, ok := node.(*ast.CallExpr); ok { - callTyp := pkg.TypeOf(node.Fun) - var typDst *types.Struct - var ok bool - switch typ := callTyp.(type) { - case *types.Named: - typDst, ok = typ.Underlying().(*types.Struct) - case *types.Pointer: - typDst, ok = typ.Elem().Underlying().(*types.Struct) - default: - return - } - if !ok { - return - } - - if typ, ok := pkg.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { - // This is an unsafe conversion. Assume that all the - // fields are relevant (they are, because of memory - // layout) - n := typDst.NumFields() - for i := 0; i < n; i++ { - c.graph.markUsedBy(typDst.Field(i), typDst) - } - return - } - - typSrc, ok := dereferenceType(pkg.TypeOf(node.Args[0])).Underlying().(*types.Struct) - if !ok { - return - } - - // When we convert from type t1 to t2, were t1 and t2 are - // structs, all fields are relevant, as otherwise the - // conversion would fail. - // - // We mark t2's fields as used by t1's fields, and vice - // versa. That way, if no code actually refers to a field - // in either type, it's still correctly marked as unused. - // If a field is used in either struct, it's implicitly - // relevant in the other one, too. - // - // It works in a similar way for conversions between types - // of two packages, only that the extra information in the - // graph is redundant unless we're in whole program mode. - n := typDst.NumFields() - for i := 0; i < n; i++ { - fDst := typDst.Field(i) - fSrc := typSrc.Field(i) - c.graph.markUsedBy(fDst, fSrc) - c.graph.markUsedBy(fSrc, fDst) - } - } -} - -// processCompositeLiteral marks fields as used if the struct is used -// in a composite literal. -func (c *Checker) processCompositeLiteral(pkg *loader.PackageInfo, node ast.Node) { - // XXX how does this actually work? wouldn't it match t{}? - if node, ok := node.(*ast.CompositeLit); ok { - typ := pkg.TypeOf(node) - if _, ok := typ.(*types.Named); ok { - typ = typ.Underlying() - } - if _, ok := typ.(*types.Struct); !ok { - return - } - - if isBasicStruct(node.Elts) { - c.markFields(typ) - } - } -} - -// processCgoExported marks functions as used if they're being -// exported to cgo. -func (c *Checker) processCgoExported(pkg *loader.PackageInfo, node ast.Node) { - if node, ok := node.(*ast.FuncDecl); ok { - if node.Doc == nil { - return - } - for _, cmt := range node.Doc.List { - if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") { - return - } - obj := pkg.ObjectOf(node.Name) - c.graph.roots = append(c.graph.roots, c.graph.getNode(obj)) - } - } -} - -func (c *Checker) processVariableDeclaration(pkg *loader.PackageInfo, node ast.Node) { - if decl, ok := node.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - spec, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - for i, name := range spec.Names { - if i >= len(spec.Values) { - break - } - value := spec.Values[i] - fn := func(node ast.Node) bool { - if node3, ok := node.(*ast.Ident); ok { - obj := pkg.ObjectOf(node3) - if _, ok := obj.(*types.PkgName); ok { - return true - } - c.graph.markUsedBy(obj, pkg.ObjectOf(name)) - } - return true - } - ast.Inspect(value, fn) - } - } - } -} - -func (c *Checker) processArrayConstants(pkg *loader.PackageInfo, node ast.Node) { - if decl, ok := node.(*ast.ArrayType); ok { - ident, ok := decl.Len.(*ast.Ident) - if !ok { - return - } - c.graph.markUsedBy(pkg.ObjectOf(ident), pkg.TypeOf(decl)) - } -} - -func (c *Checker) processKnownReflectMethodCallers(pkg *loader.PackageInfo, node ast.Node) { - call, ok := node.(*ast.CallExpr) - if !ok { - return - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - if types.TypeString(pkg.TypeOf(sel.X), nil) != "*net/rpc.Server" { - x, ok := sel.X.(*ast.Ident) - if !ok { - return - } - pkgname, ok := pkg.ObjectOf(x).(*types.PkgName) - if !ok { - return - } - if pkgname.Imported().Path() != "net/rpc" { - return - } - } - - var arg ast.Expr - switch sel.Sel.Name { - case "Register": - if len(call.Args) != 1 { - return - } - arg = call.Args[0] - case "RegisterName": - if len(call.Args) != 2 { - return - } - arg = call.Args[1] - } - typ := pkg.TypeOf(arg) - ms := types.NewMethodSet(typ) - for i := 0; i < ms.Len(); i++ { - c.graph.markUsedBy(ms.At(i).Obj(), typ) - } -} - -func (c *Checker) processAST(pkg *loader.PackageInfo) { - fn := func(node ast.Node) bool { - c.processConversion(pkg, node) - c.processKnownReflectMethodCallers(pkg, node) - c.processCompositeLiteral(pkg, node) - c.processCgoExported(pkg, node) - c.processVariableDeclaration(pkg, node) - c.processArrayConstants(pkg, node) - return true - } - for _, file := range pkg.Files { - ast.Inspect(file, fn) - } -} - -func isBasicStruct(elts []ast.Expr) bool { - for _, elt := range elts { - if _, ok := elt.(*ast.KeyValueExpr); !ok { - return true - } - } - return false -} - -func isPkgScope(obj types.Object) bool { - return obj.Parent() == obj.Pkg().Scope() -} - -func isMain(obj types.Object) bool { - if obj.Pkg().Name() != "main" { - return false - } - if obj.Name() != "main" { - return false - } - if !isPkgScope(obj) { - return false - } - if !isFunction(obj) { - return false - } - if isMethod(obj) { - return false - } - return true -} - -func isFunction(obj types.Object) bool { - _, ok := obj.(*types.Func) - return ok -} - -func isMethod(obj types.Object) bool { - if !isFunction(obj) { - return false - } - return obj.(*types.Func).Type().(*types.Signature).Recv() != nil -} - -func isVariable(obj types.Object) bool { - _, ok := obj.(*types.Var) - return ok -} - -func isConstant(obj types.Object) bool { - _, ok := obj.(*types.Const) - return ok -} - -func isType(obj types.Object) bool { - _, ok := obj.(*types.TypeName) - return ok -} - -func isField(obj types.Object) bool { - if obj, ok := obj.(*types.Var); ok && obj.IsField() { - return true - } - return false -} - -func (c *Checker) checkFlags(v interface{}) bool { - obj, ok := v.(types.Object) - if !ok { - return false - } - if isFunction(obj) && !c.checkFunctions() { - return false - } - if isVariable(obj) && !c.checkVariables() { - return false - } - if isConstant(obj) && !c.checkConstants() { - return false - } - if isType(obj) && !c.checkTypes() { - return false - } - if isField(obj) && !c.checkFields() { - return false - } - return true -} - -func (c *Checker) isRoot(obj types.Object) bool { - // - in local mode, main, init, tests, and non-test, non-main exported are roots - // - in global mode (not yet implemented), main, init and tests are roots - - if _, ok := obj.(*types.PkgName); ok { - return true - } - - if isMain(obj) || (isFunction(obj) && !isMethod(obj) && obj.Name() == "init") { - return true - } - if obj.Exported() { - f := c.lprog.Fset.Position(obj.Pos()).Filename - if strings.HasSuffix(f, "_test.go") { - return strings.HasPrefix(obj.Name(), "Test") || - strings.HasPrefix(obj.Name(), "Benchmark") || - strings.HasPrefix(obj.Name(), "Example") - } - - // Package-level are used, except in package main - if isPkgScope(obj) && obj.Pkg().Name() != "main" && !c.WholeProgram { - return true - } - } - return false -} - -func markNodesUsed(nodes map[*graphNode]struct{}) { - for node := range nodes { - wasUsed := node.used - node.used = true - if !wasUsed { - markNodesUsed(node.uses) - } - } -} - -func (c *Checker) markNodesQuiet() { - for _, node := range c.graph.nodes { - if node.used { - continue - } - if obj, ok := node.obj.(types.Object); ok && !c.checkFlags(obj) { - node.quiet = true - continue - } - c.markObjQuiet(node.obj) - } -} - -func (c *Checker) markObjQuiet(obj interface{}) { - switch obj := obj.(type) { - case *types.Named: - n := obj.NumMethods() - for i := 0; i < n; i++ { - meth := obj.Method(i) - node := c.graph.getNode(meth) - node.quiet = true - c.markObjQuiet(meth.Scope()) - } - case *types.Struct: - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - c.graph.nodes[field].quiet = true - } - case *types.Func: - c.markObjQuiet(obj.Scope()) - case *types.Scope: - if obj == nil { - return - } - if obj.Parent() == types.Universe { - return - } - for _, name := range obj.Names() { - v := obj.Lookup(name) - if n, ok := c.graph.nodes[v]; ok { - n.quiet = true - } - } - n := obj.NumChildren() - for i := 0; i < n; i++ { - c.markObjQuiet(obj.Child(i)) - } - } -} - -func getField(typ types.Type, idx int) *types.Var { - switch obj := typ.(type) { - case *types.Pointer: - return getField(obj.Elem(), idx) - case *types.Named: - switch v := obj.Underlying().(type) { - case *types.Struct: - return v.Field(idx) - case *types.Pointer: - return getField(v.Elem(), idx) - default: - panic(fmt.Sprintf("unexpected type %s", typ)) - } - case *types.Struct: - return obj.Field(idx) - } - return nil -} - -func (c *Checker) topmostScope(scope *types.Scope, pkg *types.Package) (ret *types.Scope) { - if top, ok := c.topmostCache[scope]; ok { - return top - } - defer func() { - c.topmostCache[scope] = ret - }() - if scope == pkg.Scope() { - return scope - } - if scope.Parent().Parent() == pkg.Scope() { - return scope - } - return c.topmostScope(scope.Parent(), pkg) -} - -func (c *Checker) printDebugGraph(w io.Writer) { - fmt.Fprintln(w, "digraph {") - fmt.Fprintln(w, "n0 [label = roots]") - for _, node := range c.graph.nodes { - s := fmt.Sprintf("%s (%T)", node.obj, node.obj) - s = strings.Replace(s, "\n", "", -1) - s = strings.Replace(s, `"`, "", -1) - fmt.Fprintf(w, `n%d [label = %q]`, node.n, s) - color := "black" - switch { - case node.used: - color = "green" - case node.quiet: - color = "orange" - case !c.checkFlags(node.obj): - color = "purple" - default: - color = "red" - } - fmt.Fprintf(w, "[color = %s]", color) - fmt.Fprintln(w) - } - - for _, node1 := range c.graph.nodes { - for node2 := range node1.uses { - fmt.Fprintf(w, "n%d -> n%d\n", node1.n, node2.n) - } - } - for _, root := range c.graph.roots { - fmt.Fprintf(w, "n0 -> n%d\n", root.n) - } - fmt.Fprintln(w, "}") -} - -func isGenerated(comment string) bool { - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") -} diff --git a/vendor/honnef.co/go/tools/version/version.go b/vendor/honnef.co/go/tools/version/version.go deleted file mode 100644 index 9536482..0000000 --- a/vendor/honnef.co/go/tools/version/version.go +++ /dev/null @@ -1,17 +0,0 @@ -package version - -import ( - "fmt" - "os" - "path/filepath" -) - -const Version = "2017.2.2" - -func Print() { - if Version == "devel" { - fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0])) - } else { - fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version) - } -}