diff --git a/go.mod b/go.mod index 3ca577df..5f8e1cd6 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/ec2 v1.29.0 github.com/aws/aws-sdk-go-v2/service/wafregional v1.12.3 github.com/aws/aws-sdk-go-v2/service/wafv2 v1.19.0 - github.com/golangci/golangci-lint v1.50.0 + github.com/golangci/golangci-lint v1.51.2 github.com/google/go-cmp v0.5.9 github.com/mikefarah/yq/v4 v4.24.4 github.com/onsi/ginkgo v1.16.5 @@ -30,11 +30,12 @@ require ( ) require ( - 4d63.com/gochecknoglobals v0.1.0 // indirect - github.com/Abirdcfly/dupword v0.0.7 // indirect + 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect + 4d63.com/gochecknoglobals v0.2.1 // indirect + github.com/Abirdcfly/dupword v0.0.9 // indirect github.com/Antonboom/errname v0.1.7 // indirect github.com/Antonboom/nilnil v0.1.1 // indirect - github.com/BurntSushi/toml v1.2.0 // indirect + github.com/BurntSushi/toml v1.2.1 // indirect github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0 // indirect github.com/Masterminds/semver v1.5.0 // indirect @@ -43,7 +44,7 @@ require ( github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect - github.com/ashanbrown/forbidigo v1.3.0 // indirect + github.com/ashanbrown/forbidigo v1.4.0 // indirect github.com/ashanbrown/makezero v1.1.1 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.10.0 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9 // indirect @@ -56,15 +57,15 @@ require ( github.com/beorn7/perks v1.0.1 // indirect github.com/bkielbasa/cyclop v1.2.0 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect - github.com/bombsimon/wsl/v3 v3.3.0 // indirect + github.com/bombsimon/wsl/v3 v3.4.0 // indirect github.com/breml/bidichk v0.2.3 // indirect github.com/breml/errchkjson v0.3.0 // indirect github.com/butuzov/ireturn v0.1.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/charithe/durationcheck v0.0.9 // indirect - github.com/chavacava/garif v0.0.0-20220630083739-93517212f375 // indirect + github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 // indirect github.com/curioswitch/go-reassign v0.2.0 // indirect - github.com/daixiang0/gci v0.8.0 // indirect + github.com/daixiang0/gci v0.9.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/denis-tingaikin/go-header v0.4.3 // indirect github.com/elliotchance/orderedmap v1.4.0 // indirect @@ -73,26 +74,26 @@ require ( github.com/ettle/strcase v0.1.1 // indirect github.com/evanphx/json-patch v4.12.0+incompatible // indirect github.com/evanphx/json-patch/v5 v5.6.0 // indirect - github.com/fatih/color v1.13.0 // indirect + github.com/fatih/color v1.14.1 // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/firefart/nonamedreturns v1.0.4 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/go-critic/go-critic v0.6.5 // indirect + github.com/go-critic/go-critic v0.6.7 // indirect github.com/go-errors/errors v1.0.1 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/zapr v1.2.4 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect github.com/go-openapi/jsonreference v0.20.1 // indirect github.com/go-openapi/swag v0.22.3 // indirect - github.com/go-toolsmith/astcast v1.0.0 // indirect - github.com/go-toolsmith/astcopy v1.0.2 // indirect - github.com/go-toolsmith/astequal v1.0.3 // indirect - github.com/go-toolsmith/astfmt v1.0.0 // indirect - github.com/go-toolsmith/astp v1.0.0 // indirect - github.com/go-toolsmith/strparse v1.0.0 // indirect - github.com/go-toolsmith/typep v1.0.2 // indirect - github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b // indirect + github.com/go-toolsmith/astcast v1.1.0 // indirect + github.com/go-toolsmith/astcopy v1.0.3 // indirect + github.com/go-toolsmith/astequal v1.1.0 // indirect + github.com/go-toolsmith/astfmt v1.1.0 // indirect + github.com/go-toolsmith/astp v1.1.0 // indirect + github.com/go-toolsmith/strparse v1.1.0 // indirect + github.com/go-toolsmith/typep v1.1.0 // indirect + github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect github.com/gobuffalo/flect v0.2.5 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/goccy/go-yaml v1.9.5 // indirect @@ -106,14 +107,14 @@ require ( github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 // indirect github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect - github.com/golangci/misspell v0.3.5 // indirect + github.com/golangci/misspell v0.4.0 // indirect github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 // indirect github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 // indirect github.com/google/gnostic v0.5.7-v3refs // indirect github.com/google/gofuzz v1.2.0 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/uuid v1.3.0 // indirect - github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 // indirect + github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 // indirect github.com/gostaticanalysis/analysisutil v0.7.1 // indirect github.com/gostaticanalysis/comment v1.4.2 // indirect github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect @@ -133,15 +134,16 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/julz/importas v0.1.0 // indirect - github.com/kisielk/errcheck v1.6.2 // indirect + github.com/junk1tm/musttag v0.4.5 // indirect + github.com/kisielk/errcheck v1.6.3 // indirect github.com/kisielk/gotool v1.0.0 // indirect - github.com/kkHAIKE/contextcheck v1.1.2 // indirect + github.com/kkHAIKE/contextcheck v1.1.3 // indirect github.com/kulti/thelper v0.6.3 // indirect github.com/kunwardeep/paralleltest v1.0.6 // indirect - github.com/kyoh86/exportloopref v0.1.8 // indirect + github.com/kyoh86/exportloopref v0.1.11 // indirect github.com/ldez/gomoddirectives v0.2.3 // indirect - github.com/ldez/tagliatelle v0.3.1 // indirect - github.com/leonklingele/grouper v1.1.0 // indirect + github.com/ldez/tagliatelle v0.4.0 // indirect + github.com/leonklingele/grouper v1.1.1 // indirect github.com/lufeee/execinquery v1.2.1 // indirect github.com/magiconair/properties v1.8.6 // indirect github.com/mailru/easyjson v0.7.7 // indirect @@ -149,11 +151,11 @@ require ( github.com/maratori/testpackage v1.1.0 // indirect github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.16 // indirect + github.com/mattn/go-isatty v0.0.17 // indirect github.com/mattn/go-runewidth v0.0.9 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mbilski/exhaustivestruct v1.2.0 // indirect - github.com/mgechev/revive v1.2.4 // indirect + github.com/mgechev/revive v1.2.5 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect @@ -163,38 +165,38 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/nakabonne/nestif v0.3.1 // indirect github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 // indirect - github.com/nishanths/exhaustive v0.8.3 // indirect + github.com/nishanths/exhaustive v0.9.5 // indirect github.com/nishanths/predeclared v0.2.2 // indirect + github.com/nunnatsa/ginkgolinter v0.8.1 // indirect github.com/nxadm/tail v1.4.8 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/onsi/ginkgo/v2 v2.10.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.5 // indirect - github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/polyfloyd/go-errorlint v1.0.5 // indirect + github.com/polyfloyd/go-errorlint v1.1.0 // indirect github.com/prometheus/client_golang v1.15.1 // indirect github.com/prometheus/client_model v0.4.0 // indirect github.com/prometheus/common v0.42.0 // indirect github.com/prometheus/procfs v0.9.0 // indirect - github.com/quasilyte/go-ruleguard v0.3.18 // indirect - github.com/quasilyte/gogrep v0.0.0-20220828223005-86e4605de09f // indirect + github.com/quasilyte/go-ruleguard v0.3.19 // indirect + github.com/quasilyte/gogrep v0.5.0 // indirect github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect - github.com/ryancurrah/gomodguard v1.2.4 // indirect - github.com/ryanrolds/sqlclosecheck v0.3.0 // indirect - github.com/sanposhiho/wastedassign/v2 v2.0.6 // indirect + github.com/ryancurrah/gomodguard v1.3.0 // indirect + github.com/ryanrolds/sqlclosecheck v0.4.0 // indirect + github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect github.com/sashamelentyev/interfacebloat v1.1.0 // indirect - github.com/sashamelentyev/usestdlibvars v1.20.0 // indirect - github.com/securego/gosec/v2 v2.13.1 // indirect + github.com/sashamelentyev/usestdlibvars v1.23.0 // indirect + github.com/securego/gosec/v2 v2.15.0 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect github.com/sirupsen/logrus v1.9.0 // indirect github.com/sivchari/containedctx v1.0.2 // indirect github.com/sivchari/nosnakecase v1.7.0 // indirect - github.com/sivchari/tenv v1.7.0 // indirect + github.com/sivchari/tenv v1.7.1 // indirect github.com/sonatard/noctx v0.0.1 // indirect - github.com/sourcegraph/go-diff v0.6.1 // indirect + github.com/sourcegraph/go-diff v0.7.0 // indirect github.com/spf13/afero v1.8.2 // indirect github.com/spf13/cast v1.5.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect @@ -205,14 +207,15 @@ require ( github.com/stretchr/objx v0.5.0 // indirect github.com/stretchr/testify v1.8.2 // indirect github.com/subosito/gotenv v1.4.1 // indirect + github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect github.com/tdakkota/asciicheck v0.1.1 // indirect github.com/tetafro/godot v1.4.11 // indirect - github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 // indirect + github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e // indirect github.com/timonwong/loggercheck v0.9.3 // indirect github.com/timtadh/data-structures v0.5.3 // indirect github.com/timtadh/lexmachine v0.2.2 // indirect - github.com/tomarrell/wrapcheck/v2 v2.6.2 // indirect - github.com/tommy-muehle/go-mnd/v2 v2.5.0 // indirect + github.com/tomarrell/wrapcheck/v2 v2.8.0 // indirect + github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect github.com/ultraware/funlen v0.0.3 // indirect github.com/ultraware/whitespace v0.0.5 // indirect github.com/uudashr/gocognit v1.0.6 // indirect @@ -225,7 +228,7 @@ require ( go.uber.org/multierr v1.6.0 // indirect go.uber.org/zap v1.24.0 // indirect golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e // indirect - golang.org/x/exp/typeparams v0.0.0-20220827204233-334a2380cb91 // indirect + golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 // indirect golang.org/x/mod v0.10.0 // indirect golang.org/x/net v0.10.0 // indirect golang.org/x/oauth2 v0.5.0 // indirect @@ -245,7 +248,7 @@ require ( gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - honnef.co/go/tools v0.3.3 // indirect + honnef.co/go/tools v0.4.2 // indirect k8s.io/apiextensions-apiserver v0.27.2 // indirect k8s.io/component-base v0.27.2 // indirect k8s.io/klog/v2 v2.90.1 // indirect @@ -253,7 +256,7 @@ require ( mvdan.cc/gofumpt v0.4.0 // indirect mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect - mvdan.cc/unparam v0.0.0-20220706161116-678bad134442 // indirect + mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect sigs.k8s.io/kustomize/api v0.12.1 // indirect sigs.k8s.io/kustomize/cmd/config v0.10.9 // indirect diff --git a/go.sum b/go.sum index 8ed7c153..02962f6c 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,7 @@ -4d63.com/gochecknoglobals v0.1.0 h1:zeZSRqj5yCg28tCkIV/z/lWbwvNm5qnKVS15PI8nhD0= -4d63.com/gochecknoglobals v0.1.0/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= +4d63.com/gocheckcompilerdirectives v1.2.1 h1:AHcMYuw56NPjq/2y615IGg2kYkBdTvOaojYCBcRE7MA= +4d63.com/gocheckcompilerdirectives v1.2.1/go.mod h1:yjDJSxmDTtIHHCqX0ufRYZDL6vQtMG7tJdKVeWwsqvs= +4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc= +4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -38,15 +40,15 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Abirdcfly/dupword v0.0.7 h1:z14n0yytA3wNO2gpCD/jVtp/acEXPGmYu0esewpBt6Q= -github.com/Abirdcfly/dupword v0.0.7/go.mod h1:K/4M1kj+Zh39d2aotRwypvasonOyAMH1c/IZJzE0dmk= +github.com/Abirdcfly/dupword v0.0.9 h1:MxprGjKq3yDBICXDgEEsyGirIXfMYXkLNT/agPsE1tk= +github.com/Abirdcfly/dupword v0.0.9/go.mod h1:PzmHVLLZ27MvHSzV7eFmMXSFArWXZPZmfuuziuUrf2g= github.com/Antonboom/errname v0.1.7 h1:mBBDKvEYwPl4WFFNwec1CZO096G6vzK9vvDQzAwkako= github.com/Antonboom/errname v0.1.7/go.mod h1:g0ONh16msHIPgJSGsecu1G/dcF2hlYR/0SddnIAGavU= github.com/Antonboom/nilnil v0.1.1 h1:PHhrh5ANKFWRBh7TdYmyyq2gyT2lotnvFvvFbylF81Q= github.com/Antonboom/nilnil v0.1.1/go.mod h1:L1jBqoWM7AOeTD+tSquifKSesRHs4ZdaxvZR+xdJEaI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.0 h1:Rt8g24XnyGTyglgET/PRUNlrUeu9F5L+7FilkXfZgs0= -github.com/BurntSushi/toml v1.2.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= +github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= @@ -69,8 +71,8 @@ github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQ github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/ashanbrown/forbidigo v1.3.0 h1:VkYIwb/xxdireGAdJNZoo24O4lmnEWkactplBlWTShc= -github.com/ashanbrown/forbidigo v1.3.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= +github.com/ashanbrown/forbidigo v1.4.0 h1:spdPbupaSqtWORq1Q4eHBoPBmHtwVyLKwaedbSLc5Sw= +github.com/ashanbrown/forbidigo v1.4.0/go.mod h1:IvgwB5Y4fzqSAj/WVXKWigoTkB0dzI2FBbpKWuh7ph8= github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s= github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI= github.com/aws/aws-sdk-go-v2 v1.13.0/go.mod h1:L6+ZpqHaLbAaxsqV0L4cvxZY7QupWJB4fhkf8LXvC7w= @@ -115,8 +117,8 @@ github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7 github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= -github.com/bombsimon/wsl/v3 v3.3.0 h1:Mka/+kRLoQJq7g2rggtgQsjuI/K5Efd87WX96EWFxjM= -github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= +github.com/bombsimon/wsl/v3 v3.4.0 h1:RkSxjT3tmlptwfgEgTgU+KYKLI35p/tviNXNXiL2aNU= +github.com/bombsimon/wsl/v3 v3.4.0/go.mod h1:KkIB+TXkqy6MvK9BDZVbZxKNYsE1/oLRJbIFtf14qqo= github.com/breml/bidichk v0.2.3 h1:qe6ggxpTfA8E75hdjWPZ581sY3a2lnl0IRxLQFelECI= github.com/breml/bidichk v0.2.3/go.mod h1:8u2C6DnAy0g2cEq+k/A2+tr9O1s+vHGxWn0LTc70T2A= github.com/breml/errchkjson v0.3.0 h1:YdDqhfqMT+I1vIxPSas44P+9Z9HzJwCeAzjB8PxP1xw= @@ -130,8 +132,8 @@ github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/charithe/durationcheck v0.0.9 h1:mPP4ucLrf/rKZiIG/a9IPXHGlh8p4CzgpyTy6EEutYk= github.com/charithe/durationcheck v0.0.9/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= -github.com/chavacava/garif v0.0.0-20220630083739-93517212f375 h1:E7LT642ysztPWE0dfz43cWOvMiF42DyTRC+eZIaO4yI= -github.com/chavacava/garif v0.0.0-20220630083739-93517212f375/go.mod h1:4m1Rv7xfuwWPNKXlThldNuJvutYM6J95wNuuVmn55To= +github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 h1:cy5GCEZLUCshCGCRRUjxHrDUqkB4l5cuUt3ShEckQEo= +github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348/go.mod h1:f/miWtG3SSuTxKsNK3o58H1xl+XV6ZIfbC6p7lPPB8U= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -141,11 +143,10 @@ github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnht github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/cristalhq/acmd v0.8.1/go.mod h1:LG5oa43pE/BbxtfMoImHCQN++0Su7dzipdgBjMCBVDQ= github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo= github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc= -github.com/daixiang0/gci v0.8.0 h1:DzWYUm4+bc+taVUtuq1tsIMb/QFMMYgDIiykSoO98ZU= -github.com/daixiang0/gci v0.8.0/go.mod h1:EpVfrztufwVgQRXjnX4zuNinEpLj5OmMjtu/+MB0V0c= +github.com/daixiang0/gci v0.9.1 h1:jBrwBmBZTDsGsXiaCTLIe9diotp1X4X64zodFrh7l+c= +github.com/daixiang0/gci v0.9.1/go.mod h1:EpVfrztufwVgQRXjnX4zuNinEpLj5OmMjtu/+MB0V0c= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -171,8 +172,8 @@ github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQL github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= -github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= -github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w= +github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/firefart/nonamedreturns v1.0.4 h1:abzI1p7mAEPYuR4A+VLKn4eNDOycjYo2phmY9sfv40Y= @@ -184,8 +185,8 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4 github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= -github.com/go-critic/go-critic v0.6.5 h1:fDaR/5GWURljXwF8Eh31T2GZNz9X4jeboS912mWF8Uo= -github.com/go-critic/go-critic v0.6.5/go.mod h1:ezfP/Lh7MA6dBNn4c6ab5ALv3sKnZVLx37tr00uuaOY= +github.com/go-critic/go-critic v0.6.7 h1:1evPrElnLQ2LZtJfmNDzlieDhjnq36SLgNzisx06oPM= +github.com/go-critic/go-critic v0.6.7/go.mod h1:fYZUijFdcnxgx6wPjQA2QEjIRaNCT0gO8bhexy6/QmE= github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= @@ -212,30 +213,29 @@ github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvSc github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g= -github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= -github.com/go-toolsmith/astcopy v1.0.2 h1:YnWf5Rnh1hUudj11kei53kI57quN/VH6Hp1n+erozn0= -github.com/go-toolsmith/astcopy v1.0.2/go.mod h1:4TcEdbElGc9twQEYpVo/aieIXfHhiuLh4aLAck6dO7Y= -github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= +github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= +github.com/go-toolsmith/astcopy v1.0.3 h1:r0bgSRlMOAgO+BdQnVAcpMSMkrQCnV6ZJmIkrJgcJj0= +github.com/go-toolsmith/astcopy v1.0.3/go.mod h1:4TcEdbElGc9twQEYpVo/aieIXfHhiuLh4aLAck6dO7Y= github.com/go-toolsmith/astequal v1.0.2/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= -github.com/go-toolsmith/astequal v1.0.3 h1:+LVdyRatFS+XO78SGV4I3TCEA0AC7fKEGma+fH+674o= github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= -github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= -github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= -github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= -github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= +github.com/go-toolsmith/astequal v1.1.0 h1:kHKm1AWqClYn15R0K1KKE4RG614D46n+nqUQ06E1dTw= +github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= +github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsOmcco= +github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= +github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= +github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= github.com/go-toolsmith/pkgload v1.0.2-0.20220101231613-e814995d17c5 h1:eD9POs68PHkwrx7hAB78z1cb6PfGq/jyWn3wJywsH1o= -github.com/go-toolsmith/pkgload v1.0.2-0.20220101231613-e814995d17c5/go.mod h1:3NAwwmD4uY/yggRxoEjk/S00MIV3A+H7rrE3i87eYxM= -github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= -github.com/go-toolsmith/typep v1.0.2 h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk= -github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= -github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= -github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= +github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= +github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= +github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= +github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U= +github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobuffalo/flect v0.2.5 h1:H6vvsv2an0lalEaCDRThvtBfmg44W/QHXBCYUXf/6S4= github.com/gobuffalo/flect v0.2.5/go.mod h1:1ZyCLIbg0YD7sDkzvFdPoOydPtD8y9JQnrOROolUcM8= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= @@ -286,14 +286,14 @@ github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6 github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ= github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 h1:amWTbTGqOZ71ruzrdA+Nx5WA3tV1N0goTspwmKCQvBY= github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2/go.mod h1:9wOXstvyDRshQ9LggQuzBCGysxs3b6Uo/1MvYCR2NMs= -github.com/golangci/golangci-lint v1.50.0 h1:+Xmyt8rKLauNLp2gzcxKMN8VNGqGc5Avc2ZLTwIOpEA= -github.com/golangci/golangci-lint v1.50.0/go.mod h1:UqtDvK24R9OizqRF06foPX8opRMzQB0HQK90uI2JgKc= +github.com/golangci/golangci-lint v1.51.2 h1:yIcsT1X9ZYHdSpeWXRT1ORC/FPGSqDHbHsu9uk4FK7M= +github.com/golangci/golangci-lint v1.51.2/go.mod h1:KH9Q7/3glwpYSknxUgUyLlAv46A8fsSKo1hH2wDvkr8= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= -github.com/golangci/misspell v0.3.5 h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo= -github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= +github.com/golangci/misspell v0.4.0 h1:KtVB/hTK4bbL/S6bs64rYyk8adjmh1BygbBiaAiX+a0= +github.com/golangci/misspell v0.4.0/go.mod h1:W6O/bwV6lGDxUCChm2ykw9NQdd5bYd1Xkjo88UcWyJc= github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 h1:DIPQnGy2Gv2FSA4B/hh8Q7xx3B7AIDk3DAMeHclH1vQ= github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6/go.mod h1:0AKcRCkMoKvUvlf89F6O7H2LYdhr1zBh736mBItOdRs= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= @@ -343,9 +343,8 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 h1:PVRE9d4AQKmbelZ7emNig1+NT27DUmKZn5qXxfio54U= -github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= -github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 h1:9alfqbrhuD+9fLZ4iaAVwhlp5PEhmnBt7yvK2Oy5C1U= +github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= @@ -393,7 +392,6 @@ github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9Y github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= @@ -408,13 +406,15 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= +github.com/junk1tm/musttag v0.4.5 h1:d+mpJ1vn6WFEVKHwkgJiIedis1u/EawKOuUTygAUtCo= +github.com/junk1tm/musttag v0.4.5/go.mod h1:XkcL/9O6RmD88JBXb+I15nYRl9W4ExhgQeCBEhfMC8U= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.6.2 h1:uGQ9xI8/pgc9iOoCe7kWQgRE6SBTrCGmTSf0LrEtY7c= -github.com/kisielk/errcheck v1.6.2/go.mod h1:nXw/i/MfnvRHqXa7XXmQMUB0oNFGuBrNI8d8NLy0LPw= +github.com/kisielk/errcheck v1.6.3 h1:dEKh+GLHcWm2oN34nMvDzn1sqI0i0WxPvrgiJA5JuM8= +github.com/kisielk/errcheck v1.6.3/go.mod h1:nXw/i/MfnvRHqXa7XXmQMUB0oNFGuBrNI8d8NLy0LPw= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkHAIKE/contextcheck v1.1.2 h1:BYUSG/GhMhqVz//yjl8IkBDlMEws+9DtCmkz18QO1gg= -github.com/kkHAIKE/contextcheck v1.1.2/go.mod h1:PG/cwd6c0705/LM0KTr1acO2gORUxkSVWyLJOFW5qoo= +github.com/kkHAIKE/contextcheck v1.1.3 h1:l4pNvrb8JSwRd51ojtcOxOeHJzHek+MtOyXbaR0uvmw= +github.com/kkHAIKE/contextcheck v1.1.3/go.mod h1:PG/cwd6c0705/LM0KTr1acO2gORUxkSVWyLJOFW5qoo= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= @@ -431,16 +431,15 @@ github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= github.com/kunwardeep/paralleltest v1.0.6 h1:FCKYMF1OF2+RveWlABsdnmsvJrei5aoyZoaGS+Ugg8g= github.com/kunwardeep/paralleltest v1.0.6/go.mod h1:Y0Y0XISdZM5IKm3TREQMZ6iteqn1YuwCsJO/0kL9Zes= -github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= -github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= +github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= +github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA= github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.3.1 h1:3BqVVlReVUZwafJUwQ+oxbx2BEX2vUG4Yu/NOfMiKiM= -github.com/ldez/tagliatelle v0.3.1/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= +github.com/ldez/tagliatelle v0.4.0 h1:sylp7d9kh6AdXN2DpVGHBRb5guTVAgOxqNGhbqc4b1c= +github.com/ldez/tagliatelle v0.4.0/go.mod h1:mNtTfrHy2haaBAw+VT7IBV6VXBThS7TCreYWbBcJ87I= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= -github.com/leonklingele/grouper v1.1.0 h1:tC2y/ygPbMFSBOs3DcyaEMKnnwH7eYKzohOtRrf0SAg= -github.com/leonklingele/grouper v1.1.0/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/leonklingele/grouper v1.1.1 h1:suWXRU57D4/Enn6pXR0QVqqWWrnJ9Osrz+5rjt8ivzU= +github.com/leonklingele/grouper v1.1.1/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= @@ -456,23 +455,21 @@ github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859 github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/revive v1.2.4 h1:+2Hd/S8oO2H0Ikq2+egtNwQsVhAeELHjxjIUFX5ajLI= -github.com/mgechev/revive v1.2.4/go.mod h1:iAWlQishqCuj4yhV24FTnKSXGpbAA+0SckXB8GQMX/Q= +github.com/mgechev/revive v1.2.5 h1:UF9AR8pOAuwNmhXj2odp4mxv9Nx2qUIwVz8ZsU+Mbec= +github.com/mgechev/revive v1.2.5/go.mod h1:nFOXent79jMTISAfOAasKfy0Z2Ejq0WX7Qn/KAdYopI= github.com/mikefarah/yq/v4 v4.24.4 h1:QpfftVB8sAMVI/b1sOywovcztsPFYr8iFzdsPXOVPnQ= github.com/mikefarah/yq/v4 v4.24.4/go.mod h1:S+m9R9Qq17v0Mg/DtaESrbvfvrgbrOEMlEsSN57huV0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= @@ -498,11 +495,12 @@ github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81 github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE= github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nishanths/exhaustive v0.8.3 h1:pw5O09vwg8ZaditDp/nQRqVnrMczSJDxRDJMowvhsrM= -github.com/nishanths/exhaustive v0.8.3/go.mod h1:qj+zJJUgJ76tR92+25+03oYUhzF4R7/2Wk7fGTfCHmg= +github.com/nishanths/exhaustive v0.9.5 h1:TzssWan6orBiLYVqewCG8faud9qlFntJE30ACpzmGME= +github.com/nishanths/exhaustive v0.9.5/go.mod h1:IbwrGdVMizvDcIxPYGVdQn5BqWJaOwpCvg4RGb8r/TA= github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= +github.com/nunnatsa/ginkgolinter v0.8.1 h1:/y4o/0hV+ruUHj4xXh89xlFjoaitnI4LnkpuYs02q1c= +github.com/nunnatsa/ginkgolinter v0.8.1/go.mod h1:FYYLtszIdmzCH8XMaMPyxPVXZ7VCaIm55bA+gugx+14= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -534,8 +532,6 @@ github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3v github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg= github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= -github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= -github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -544,8 +540,8 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v1.0.5 h1:AHB5JRCjlmelh9RrLxT9sgzpalIwwq4hqE8EkwIwKdY= -github.com/polyfloyd/go-errorlint v1.0.5/go.mod h1:APVvOesVSAnne5SClsPxPdfvZTVDojXh1/G3qb5wjGI= +github.com/polyfloyd/go-errorlint v1.1.0 h1:VKoEFg5yxSgJ2yFPVhxW7oGz+f8/OVcuMeNvcPIi6Eg= +github.com/polyfloyd/go-errorlint v1.1.0/go.mod h1:Uss7Bc/izYG0leCMRx3WVlrpqWedSZk7V/FUQW6VJ6U= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= @@ -572,15 +568,10 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI= github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY= -github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= -github.com/quasilyte/go-ruleguard v0.3.18 h1:sd+abO1PEI9fkYennwzHn9kl3nqP6M5vE7FiOzZ+5CE= -github.com/quasilyte/go-ruleguard v0.3.18/go.mod h1:lOIzcYlgxrQ2sGJ735EHXmf/e9MJ516j16K/Ifcttvs= -github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.21/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= -github.com/quasilyte/go-ruleguard/rules v0.0.0-20211022131956-028d6511ab71/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= -github.com/quasilyte/gogrep v0.0.0-20220828223005-86e4605de09f h1:6Gtn2i04RD0gVyYf2/IUMTIs+qYleBt4zxDqkLTcu4U= -github.com/quasilyte/gogrep v0.0.0-20220828223005-86e4605de09f/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= +github.com/quasilyte/go-ruleguard v0.3.19 h1:tfMnabXle/HzOb5Xe9CUZYWXKfkS1KwRmZyPmD9nVcc= +github.com/quasilyte/go-ruleguard v0.3.19/go.mod h1:lHSn69Scl48I7Gt9cX3VrbsZYvYiBYszZOZW4A+oTEw= +github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= +github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= @@ -588,18 +579,18 @@ github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8 github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryancurrah/gomodguard v1.2.4 h1:CpMSDKan0LtNGGhPrvupAoLeObRFjND8/tU1rEOtBp4= -github.com/ryancurrah/gomodguard v1.2.4/go.mod h1:+Kem4VjWwvFpUJRJSwa16s1tBJe+vbv02+naTow2f6M= -github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= -github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= -github.com/sanposhiho/wastedassign/v2 v2.0.6 h1:+6/hQIHKNJAUixEj6EmOngGIisyeI+T3335lYTyxRoA= -github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= +github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw= +github.com/ryancurrah/gomodguard v1.3.0/go.mod h1:ggBxb3luypPEzqVtq33ee7YSN35V28XeGnid8dnni50= +github.com/ryanrolds/sqlclosecheck v0.4.0 h1:i8SX60Rppc1wRuyQjMciLqIzV3xnoHB7/tXbr6RGYNI= +github.com/ryanrolds/sqlclosecheck v0.4.0/go.mod h1:TBRRjzL31JONc9i4XMinicuo+s+E8yKZ5FN8X3G6CKQ= +github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc= +github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw= github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= -github.com/sashamelentyev/usestdlibvars v1.20.0 h1:K6CXjqqtSYSsuyRDDC7Sjn6vTMLiSJa4ZmDkiokoqtw= -github.com/sashamelentyev/usestdlibvars v1.20.0/go.mod h1:0GaP+ecfZMXShS0A94CJn6aEuPRILv8h/VuWI9n1ygg= -github.com/securego/gosec/v2 v2.13.1 h1:7mU32qn2dyC81MH9L2kefnQyRMUarfDER3iQyMHcjYM= -github.com/securego/gosec/v2 v2.13.1/go.mod h1:EO1sImBMBWFjOTFzMWfTRrZW6M15gm60ljzrmy/wtHo= +github.com/sashamelentyev/usestdlibvars v1.23.0 h1:01h+/2Kd+NblNItNeux0veSL5cBF1jbEOPrEhDzGYq0= +github.com/sashamelentyev/usestdlibvars v1.23.0/go.mod h1:YPwr/Y1LATzHI93CqoPUN/2BzGQ/6N/cl/KwgR0B/aU= +github.com/securego/gosec/v2 v2.15.0 h1:v4Ym7FF58/jlykYmmhZ7mTm7FQvN/setNm++0fgIAtw= +github.com/securego/gosec/v2 v2.15.0/go.mod h1:VOjTrZOkUtSDt2QLSJmQBMWnvwiQPEjg0l+5juIqGk8= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= @@ -614,12 +605,12 @@ github.com/sivchari/containedctx v1.0.2 h1:0hLQKpgC53OVF1VT7CeoFHk9YKstur1XOgfYI github.com/sivchari/containedctx v1.0.2/go.mod h1:PwZOeqm4/DLoJOqMSIJs3aKqXRX4YO+uXww087KZ7Bw= github.com/sivchari/nosnakecase v1.7.0 h1:7QkpWIRMe8x25gckkFd2A5Pi6Ymo0qgr4JrhGt95do8= github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvRbFSgJx2Gs+QY= -github.com/sivchari/tenv v1.7.0 h1:d4laZMBK6jpe5PWepxlV9S+LC0yXqvYHiq8E6ceoVVE= -github.com/sivchari/tenv v1.7.0/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= +github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= +github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= -github.com/sourcegraph/go-diff v0.6.1 h1:hmA1LzxW0n1c3Q4YbrFgg4P99GSnebYa3x8gr0HZqLQ= -github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= +github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= +github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= @@ -655,6 +646,8 @@ github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8= +github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk= github.com/tdakkota/asciicheck v0.1.1 h1:PKzG7JUTUmVspQTDqtkX9eSiLGossXTybutHwTXuO0A= github.com/tdakkota/asciicheck v0.1.1/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= @@ -663,18 +656,18 @@ github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpR github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= github.com/tetafro/godot v1.4.11 h1:BVoBIqAf/2QdbFmSwAWnaIqDivZdOV0ZRwEm6jivLKw= github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= -github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 h1:kl4KhGNsJIbDHS9/4U9yQo1UcPQM0kOMJHn29EoH/Ro= -github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= +github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e h1:MV6KaVu/hzByHP0UvJ4HcMGE/8a6A4Rggc/0wx2AvJo= +github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= github.com/timonwong/loggercheck v0.9.3 h1:ecACo9fNiHxX4/Bc02rW2+kaJIAMAes7qJ7JKxt0EZI= github.com/timonwong/loggercheck v0.9.3/go.mod h1:wUqnk9yAOIKtGA39l1KLE9Iz0QiTocu/YZoOf+OzFdw= github.com/timtadh/data-structures v0.5.3 h1:F2tEjoG9qWIyUjbvXVgJqEOGJPMIiYn7U5W5mE+i/vQ= github.com/timtadh/data-structures v0.5.3/go.mod h1:9R4XODhJ8JdWFEI8P/HJKqxuJctfBQw6fDibMQny2oU= github.com/timtadh/lexmachine v0.2.2 h1:g55RnjdYazm5wnKv59pwFcBJHOyvTPfDEoz21s4PHmY= github.com/timtadh/lexmachine v0.2.2/go.mod h1:GBJvD5OAfRn/gnp92zb9KTgHLB7akKyxmVivoYCcjQI= -github.com/tomarrell/wrapcheck/v2 v2.6.2 h1:3dI6YNcrJTQ/CJQ6M/DUkc0gnqYSIk6o0rChn9E/D0M= -github.com/tomarrell/wrapcheck/v2 v2.6.2/go.mod h1:ao7l5p0aOlUNJKI0qVwB4Yjlqutd0IvAB9Rdwyilxvg= -github.com/tommy-muehle/go-mnd/v2 v2.5.0 h1:iAj0a8e6+dXSL7Liq0aXPox36FiN1dBbjA6lt9fl65s= -github.com/tommy-muehle/go-mnd/v2 v2.5.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/tomarrell/wrapcheck/v2 v2.8.0 h1:qDzbir0xmoE+aNxGCPrn+rUSxAX+nG6vREgbbXAR81I= +github.com/tomarrell/wrapcheck/v2 v2.8.0/go.mod h1:ao7l5p0aOlUNJKI0qVwB4Yjlqutd0IvAB9Rdwyilxvg= +github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= +github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= github.com/ultraware/whitespace v0.0.5 h1:hh+/cpIcopyMYbZNVov9iSxvJU3OYQg78Sfaqzi/CzI= @@ -721,6 +714,7 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -734,8 +728,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA= golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20220827204233-334a2380cb91 h1:Ic/qN6TEifvObMGQy72k0n1LlJr7DjWWEi+MOsDOiSk= -golang.org/x/exp/typeparams v0.0.0-20220827204233-334a2380cb91/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 h1:6WHiuFL9FNjg8RljAaT7FNUuKDbvMqS1i5cr2OE2sLQ= +golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -763,6 +757,8 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= +golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -804,6 +800,10 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= +golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -830,6 +830,7 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI= golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -884,7 +885,6 @@ golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -894,10 +894,18 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -908,6 +916,9 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -925,7 +936,6 @@ golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3 golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -935,7 +945,6 @@ golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -965,25 +974,20 @@ golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= -golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -992,11 +996,14 @@ golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0t golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.9-0.20211228192929-ee1ca4ffc4da/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= +golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= +golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= +golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM= golang.org/x/tools v0.9.3/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1105,7 +1112,6 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -1137,8 +1143,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.3.3 h1:oDx7VAwstgpYpb3wv0oxiZlxY+foCpRAwY7Vk6XpAgA= -honnef.co/go/tools v0.3.3/go.mod h1:jzwdWgg7Jdq75wlfblQxO4neNaFFSvgc1tD5Wv8U0Yw= +honnef.co/go/tools v0.4.2 h1:6qXr+R5w+ktL5UkwEbPp+fEvfyoMPche6GkOpGHZcLc= +honnef.co/go/tools v0.4.2/go.mod h1:36ZgoUOrqOk1GxwHhyryEkq8FQWkUO2xGuSMhUCcdvA= k8s.io/api v0.27.3 h1:yR6oQXXnUEBWEWcvPWS0jQL575KoAboQPfJAuKNrw5Y= k8s.io/api v0.27.3/go.mod h1:C4BNvZnQOF7JA/0Xed2S+aUyJSfTGkGFxLXz9MnpIpg= k8s.io/apiextensions-apiserver v0.27.2 h1:iwhyoeS4xj9Y7v8YExhUwbVuBhMr3Q4bd/laClBV6Bo= @@ -1161,8 +1167,8 @@ mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wp mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= -mvdan.cc/unparam v0.0.0-20220706161116-678bad134442 h1:seuXWbRB1qPrS3NQnHmFKLJLtskWyueeIzmLXghMGgk= -mvdan.cc/unparam v0.0.0-20220706161116-678bad134442/go.mod h1:F/Cxw/6mVrNKqrR2YjFf5CaW0Bw4RL8RfbEf4GRggJk= +mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d h1:3rvTIIM22r9pvXk+q3swxUQAQOxksVMGK7sml4nG57w= +mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d/go.mod h1:IeHQjmn6TOD+e4Z3RFiZMMsLVL+A96Nvptar8Fj71is= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/vendor/4d63.com/gocheckcompilerdirectives/LICENSE b/vendor/4d63.com/gocheckcompilerdirectives/LICENSE new file mode 100644 index 00000000..3f12625b --- /dev/null +++ b/vendor/4d63.com/gocheckcompilerdirectives/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Leigh McCulloch + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go b/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go new file mode 100644 index 00000000..19948c45 --- /dev/null +++ b/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go @@ -0,0 +1,105 @@ +package checkcompilerdirectives + +import ( + "strings" + + "golang.org/x/tools/go/analysis" +) + +func Analyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "gocheckcompilerdirectives", + Doc: "Checks that go compiler directive comments (//go:) are valid.", + Run: run, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + for _, group := range file.Comments { + for _, comment := range group.List { + text := comment.Text + if !strings.HasPrefix(text, "//") { + continue + } + start := 2 + spaces := 0 + for _, c := range text[start:] { + if c == ' ' { + spaces++ + continue + } + break + } + start += spaces + if !strings.HasPrefix(text[start:], "go:") { + continue + } + start += 3 + end := strings.Index(text[start:], " ") + if end == -1 { + continue + } + directive := text[start : start+end] + if len(directive) == 0 { + continue + } + prefix := text[:start+end] + // Leading whitespace will cause the go directive to be ignored + // by the compiler with no error, causing it not to work. This + // is an easy mistake. + if spaces > 0 { + pass.ReportRangef(comment, "compiler directive contains space: %s", prefix) + } + // If the directive is unknown it will be ignored by the + // compiler with no error. This is an easy mistake to make, + // especially if you typo a directive. + if !isKnown(directive) { + pass.ReportRangef(comment, "compiler directive unrecognized: %s", prefix) + } + } + } + } + return nil, nil +} + +func isKnown(directive string) bool { + for _, k := range known { + if directive == k { + return true + } + } + return false +} + +var known = []string{ + // Found by running the following command on the source of go. + // git grep -o -E -h '//go:[a-z_]+' -- ':!**/*_test.go' ':!test/' ':!**/testdata/**' | sort -u + "binary", + "build", + "buildsomethingelse", + "cgo_dynamic_linker", + "cgo_export_dynamic", + "cgo_export_static", + "cgo_import_dynamic", + "cgo_import_static", + "cgo_ldflag", + "cgo_unsafe_args", + "embed", + "generate", + "linkname", + "name", + "nocheckptr", + "noescape", + "noinline", + "nointerface", + "norace", + "nosplit", + "notinheap", + "nowritebarrier", + "nowritebarrierrec", + "systemstack", + "uintptrescapes", + "uintptrkeepalive", + "yeswritebarrierrec", +} diff --git a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go index 9ae889d4..edf9193e 100644 --- a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go +++ b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go @@ -5,6 +5,7 @@ import ( "fmt" "go/ast" "go/token" + "go/types" "strings" "golang.org/x/tools/go/analysis" @@ -48,12 +49,12 @@ func flags() flag.FlagSet { return *flags } -func isAllowed(cm ast.CommentMap, v ast.Node) bool { +func isAllowed(cm ast.CommentMap, v ast.Node, ti *types.Info) bool { switch i := v.(type) { case *ast.GenDecl: return hasEmbedComment(cm, i) case *ast.Ident: - return i.Name == "_" || i.Name == "version" || looksLikeError(i) || identHasEmbedComment(cm, i) + return i.Name == "_" || i.Name == "version" || isError(i, ti) || identHasEmbedComment(cm, i) case *ast.CallExpr: if expr, ok := i.Fun.(*ast.SelectorExpr); ok { return isAllowedSelectorExpression(expr) @@ -86,10 +87,14 @@ func isAllowedSelectorExpression(v *ast.SelectorExpr) bool { return false } +// isError reports whether the AST identifier looks like +// an error and implements the error interface. +func isError(i *ast.Ident, ti *types.Info) bool { + return looksLikeError(i) && implementsError(i, ti) +} + // looksLikeError returns true if the AST identifier starts // with 'err' or 'Err', or false otherwise. -// -// TODO: https://github.com/leighmcculloch/gochecknoglobals/issues/5 func looksLikeError(i *ast.Ident) bool { prefix := "err" if i.IsExported() { @@ -98,6 +103,14 @@ func looksLikeError(i *ast.Ident) bool { return strings.HasPrefix(i.Name, prefix) } +// implementsError reports whether the AST identifier +// implements the error interface. +func implementsError(i *ast.Ident, ti *types.Info) bool { + t := ti.TypeOf(i) + et := types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + return types.Implements(t, et) +} + func identHasEmbedComment(cm ast.CommentMap, i *ast.Ident) bool { if i.Obj == nil { return false @@ -146,7 +159,7 @@ func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { if genDecl.Tok != token.VAR { continue } - if isAllowed(fileCommentMap, genDecl) { + if isAllowed(fileCommentMap, genDecl, pass.TypesInfo) { continue } for _, spec := range genDecl.Specs { @@ -154,7 +167,7 @@ func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { onlyAllowedValues := false for _, vn := range valueSpec.Values { - if isAllowed(fileCommentMap, vn) { + if isAllowed(fileCommentMap, vn, pass.TypesInfo) { onlyAllowedValues = true continue } @@ -168,7 +181,7 @@ func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { } for _, vn := range valueSpec.Names { - if isAllowed(fileCommentMap, vn) { + if isAllowed(fileCommentMap, vn, pass.TypesInfo) { continue } diff --git a/vendor/github.com/Abirdcfly/dupword/README.md b/vendor/github.com/Abirdcfly/dupword/README.md index f12c53e2..6917acae 100644 --- a/vendor/github.com/Abirdcfly/dupword/README.md +++ b/vendor/github.com/Abirdcfly/dupword/README.md @@ -138,7 +138,7 @@ $ dupword --keyword=the,and,a -fix ./... ## TODO -- [ ] add this linter to golangci-lint +- [x] add this linter to golangci-lint - [ ] rewrite the detection logic to make it more efficient ## Limitation diff --git a/vendor/github.com/Abirdcfly/dupword/dupword.go b/vendor/github.com/Abirdcfly/dupword/dupword.go index 7b8eb712..508caca5 100644 --- a/vendor/github.com/Abirdcfly/dupword/dupword.go +++ b/vendor/github.com/Abirdcfly/dupword/dupword.go @@ -289,6 +289,9 @@ func (a *analyzer) Check(raw string) (update string, keyword string, find bool) // e.g. %s, should not be reported. func ExcludeWords(word string) (exclude bool) { firstRune, _ := utf8.DecodeRuneInString(word) + if unicode.IsDigit(firstRune) { + return true + } if unicode.IsPunct(firstRune) { return true } diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go index 09523315..0ca1dc4f 100644 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -21,7 +21,9 @@ type Unmarshaler interface { UnmarshalTOML(interface{}) error } -// Unmarshal decodes the contents of `data` in TOML format into a pointer `v`. +// Unmarshal decodes the contents of data in TOML format into a pointer v. +// +// See [Decoder] for a description of the decoding process. func Unmarshal(data []byte, v interface{}) error { _, err := NewDecoder(bytes.NewReader(data)).Decode(v) return err @@ -29,13 +31,12 @@ func Unmarshal(data []byte, v interface{}) error { // Decode the TOML data in to the pointer v. // -// See the documentation on Decoder for a description of the decoding process. +// See [Decoder] for a description of the decoding process. func Decode(data string, v interface{}) (MetaData, error) { return NewDecoder(strings.NewReader(data)).Decode(v) } -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at path and decode it for you. +// DecodeFile reads the contents of a file and decodes it with [Decode]. func DecodeFile(path string, v interface{}) (MetaData, error) { fp, err := os.Open(path) if err != nil { @@ -48,7 +49,7 @@ func DecodeFile(path string, v interface{}) (MetaData, error) { // Primitive is a TOML value that hasn't been decoded into a Go value. // // This type can be used for any value, which will cause decoding to be delayed. -// You can use the PrimitiveDecode() function to "manually" decode these values. +// You can use [PrimitiveDecode] to "manually" decode these values. // // NOTE: The underlying representation of a `Primitive` value is subject to // change. Do not rely on it. @@ -70,15 +71,15 @@ const ( // Decoder decodes TOML data. // -// TOML tables correspond to Go structs or maps (dealer's choice – they can be -// used interchangeably). +// TOML tables correspond to Go structs or maps; they can be used +// interchangeably, but structs offer better type safety. // // TOML table arrays correspond to either a slice of structs or a slice of maps. // -// TOML datetimes correspond to Go time.Time values. Local datetimes are parsed -// in the local timezone. +// TOML datetimes correspond to [time.Time]. Local datetimes are parsed in the +// local timezone. // -// time.Duration types are treated as nanoseconds if the TOML value is an +// [time.Duration] types are treated as nanoseconds if the TOML value is an // integer, or they're parsed with time.ParseDuration() if they're strings. // // All other TOML types (float, string, int, bool and array) correspond to the @@ -90,7 +91,7 @@ const ( // UnmarshalText method. See the Unmarshaler example for a demonstration with // email addresses. // -// Key mapping +// ### Key mapping // // TOML keys can map to either keys in a Go map or field names in a Go struct. // The special `toml` struct tag can be used to map TOML keys to struct fields @@ -168,17 +169,16 @@ func (dec *Decoder) Decode(v interface{}) (MetaData, error) { return md, md.unify(p.mapping, rv) } -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including this method. (i.e., `v` may contain more `Primitive` -// values.) +// PrimitiveDecode is just like the other Decode* functions, except it decodes a +// TOML value that has already been parsed. Valid primitive values can *only* be +// obtained from values filled by the decoder functions, including this method. +// (i.e., v may contain more [Primitive] values.) // -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions with one exception: keys returned by the Undecoded -// method will only reflect keys that were decoded. Namely, any keys hidden -// behind a Primitive will be considered undecoded. Executing this method will -// update the undecoded keys in the meta data. (See the example.) +// Meta data for primitive values is included in the meta data returned by the +// Decode* functions with one exception: keys returned by the Undecoded method +// will only reflect keys that were decoded. Namely, any keys hidden behind a +// Primitive will be considered undecoded. Executing this method will update the +// undecoded keys in the meta data. (See the example.) func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { md.context = primValue.context defer func() { md.context = nil }() diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go index eddfb641..086d0b68 100644 --- a/vendor/github.com/BurntSushi/toml/decode_go116.go +++ b/vendor/github.com/BurntSushi/toml/decode_go116.go @@ -7,8 +7,8 @@ import ( "io/fs" ) -// DecodeFS is just like Decode, except it will automatically read the contents -// of the file at `path` from a fs.FS instance. +// DecodeFS reads the contents of a file from [fs.FS] and decodes it with +// [Decode]. func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) { fp, err := fsys.Open(path) if err != nil { diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go index 099c4a77..81a7c0fe 100644 --- a/vendor/github.com/BurntSushi/toml/doc.go +++ b/vendor/github.com/BurntSushi/toml/doc.go @@ -1,13 +1,11 @@ -/* -Package toml implements decoding and encoding of TOML files. - -This package supports TOML v1.0.0, as listed on https://toml.io - -There is also support for delaying decoding with the Primitive type, and -querying the set of keys in a TOML document with the MetaData type. - -The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, -and can be used to verify if TOML document is valid. It can also be used to -print the type of each key. -*/ +// Package toml implements decoding and encoding of TOML files. +// +// This package supports TOML v1.0.0, as specified at https://toml.io +// +// There is also support for delaying decoding with the Primitive type, and +// querying the set of keys in a TOML document with the MetaData type. +// +// The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, +// and can be used to verify if TOML document is valid. It can also be used to +// print the type of each key. package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go index dc8568d1..930e1d52 100644 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -79,12 +79,12 @@ type Marshaler interface { // Encoder encodes a Go to a TOML document. // // The mapping between Go values and TOML values should be precisely the same as -// for the Decode* functions. +// for [Decode]. // // time.Time is encoded as a RFC 3339 string, and time.Duration as its string // representation. // -// The toml.Marshaler and encoder.TextMarshaler interfaces are supported to +// The [Marshaler] and [encoding.TextMarshaler] interfaces are supported to // encoding the value as custom TOML. // // If you want to write arbitrary binary data then you will need to use @@ -130,7 +130,7 @@ func NewEncoder(w io.Writer) *Encoder { } } -// Encode writes a TOML representation of the Go value to the Encoder's writer. +// Encode writes a TOML representation of the Go value to the [Encoder]'s writer. // // An error is returned if the value given cannot be encoded to a valid TOML // document. @@ -261,7 +261,7 @@ func (enc *Encoder) eElement(rv reflect.Value) { enc.eElement(reflect.ValueOf(v)) return } - encPanic(errors.New(fmt.Sprintf("Unable to convert \"%s\" to neither int64 nor float64", n))) + encPanic(fmt.Errorf("unable to convert %q to int64 or float64", n)) } switch rv.Kind() { @@ -504,7 +504,8 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { if opts.name != "" { keyName = opts.name } - if opts.omitempty && isEmpty(fieldVal) { + + if opts.omitempty && enc.isEmpty(fieldVal) { continue } if opts.omitzero && isZero(fieldVal) { @@ -648,12 +649,26 @@ func isZero(rv reflect.Value) bool { return false } -func isEmpty(rv reflect.Value) bool { +func (enc *Encoder) isEmpty(rv reflect.Value) bool { switch rv.Kind() { case reflect.Array, reflect.Slice, reflect.Map, reflect.String: return rv.Len() == 0 case reflect.Struct: - return reflect.Zero(rv.Type()).Interface() == rv.Interface() + if rv.Type().Comparable() { + return reflect.Zero(rv.Type()).Interface() == rv.Interface() + } + // Need to also check if all the fields are empty, otherwise something + // like this with uncomparable types will always return true: + // + // type a struct{ field b } + // type b struct{ s []string } + // s := a{field: b{s: []string{"AAA"}}} + for i := 0; i < rv.NumField(); i++ { + if !enc.isEmpty(rv.Field(i)) { + return false + } + } + return true case reflect.Bool: return !rv.Bool() } @@ -668,16 +683,15 @@ func (enc *Encoder) newline() { // Write a key/value pair: // -// key = +// key = // // This is also used for "k = v" in inline tables; so something like this will // be written in three calls: // -// ┌────────────────────┐ -// │ ┌───┐ ┌─────┐│ -// v v v v vv -// key = {k = v, k2 = v2} -// +// ┌───────────────────┐ +// │ ┌───┐ ┌────┐│ +// v v v v vv +// key = {k = 1, k2 = 2} func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { if len(key) == 0 { encPanic(errNoKey) diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go index 2ac24e77..f4f390e6 100644 --- a/vendor/github.com/BurntSushi/toml/error.go +++ b/vendor/github.com/BurntSushi/toml/error.go @@ -5,57 +5,60 @@ import ( "strings" ) -// ParseError is returned when there is an error parsing the TOML syntax. -// -// For example invalid syntax, duplicate keys, etc. +// ParseError is returned when there is an error parsing the TOML syntax such as +// invalid syntax, duplicate keys, etc. // // In addition to the error message itself, you can also print detailed location -// information with context by using ErrorWithPosition(): +// information with context by using [ErrorWithPosition]: // -// toml: error: Key 'fruit' was already created and cannot be used as an array. +// toml: error: Key 'fruit' was already created and cannot be used as an array. // -// At line 4, column 2-7: +// At line 4, column 2-7: // -// 2 | fruit = [] -// 3 | -// 4 | [[fruit]] # Not allowed -// ^^^^^ +// 2 | fruit = [] +// 3 | +// 4 | [[fruit]] # Not allowed +// ^^^^^ // -// Furthermore, the ErrorWithUsage() can be used to print the above with some -// more detailed usage guidance: +// [ErrorWithUsage] can be used to print the above with some more detailed usage +// guidance: // -// toml: error: newlines not allowed within inline tables +// toml: error: newlines not allowed within inline tables // -// At line 1, column 18: +// At line 1, column 18: // -// 1 | x = [{ key = 42 # -// ^ +// 1 | x = [{ key = 42 # +// ^ // -// Error help: +// Error help: // -// Inline tables must always be on a single line: +// Inline tables must always be on a single line: // -// table = {key = 42, second = 43} +// table = {key = 42, second = 43} // -// It is invalid to split them over multiple lines like so: +// It is invalid to split them over multiple lines like so: // -// # INVALID -// table = { -// key = 42, -// second = 43 -// } +// # INVALID +// table = { +// key = 42, +// second = 43 +// } // -// Use regular for this: +// Use regular for this: // -// [table] -// key = 42 -// second = 43 +// [table] +// key = 42 +// second = 43 type ParseError struct { Message string // Short technical message. Usage string // Longer message with usage guidance; may be blank. Position Position // Position of the error LastKey string // Last parsed key, may be blank. - Line int // Line the error occurred. Deprecated: use Position. + + // Line the error occurred. + // + // Deprecated: use [Position]. + Line int err error input string @@ -83,7 +86,7 @@ func (pe ParseError) Error() string { // ErrorWithUsage() returns the error with detailed location context. // -// See the documentation on ParseError. +// See the documentation on [ParseError]. func (pe ParseError) ErrorWithPosition() string { if pe.input == "" { // Should never happen, but just in case. return pe.Error() @@ -124,7 +127,7 @@ func (pe ParseError) ErrorWithPosition() string { // ErrorWithUsage() returns the error with detailed location context and usage // guidance. // -// See the documentation on ParseError. +// See the documentation on [ParseError]. func (pe ParseError) ErrorWithUsage() string { m := pe.ErrorWithPosition() if u, ok := pe.err.(interface{ Usage() string }); ok && u.Usage() != "" { diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go index 28ed4dd3..d4d70871 100644 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -771,7 +771,7 @@ func lexRawString(lx *lexer) stateFn { } // lexMultilineRawString consumes a raw string. Nothing can be escaped in such -// a string. It assumes that the beginning "'''" has already been consumed and +// a string. It assumes that the beginning ''' has already been consumed and // ignored. func lexMultilineRawString(lx *lexer) stateFn { r := lx.next() diff --git a/vendor/github.com/BurntSushi/toml/meta.go b/vendor/github.com/BurntSushi/toml/meta.go index d284f2a0..71847a04 100644 --- a/vendor/github.com/BurntSushi/toml/meta.go +++ b/vendor/github.com/BurntSushi/toml/meta.go @@ -71,7 +71,7 @@ func (md *MetaData) Keys() []Key { // Undecoded returns all keys that have not been decoded in the order in which // they appear in the original TOML document. // -// This includes keys that haven't been decoded because of a Primitive value. +// This includes keys that haven't been decoded because of a [Primitive] value. // Once the Primitive value is decoded, the keys will be considered decoded. // // Also note that decoding into an empty interface will result in no decoding, @@ -89,7 +89,7 @@ func (md *MetaData) Undecoded() []Key { return undecoded } -// Key represents any TOML key, including key groups. Use (MetaData).Keys to get +// Key represents any TOML key, including key groups. Use [MetaData.Keys] to get // values of this type. type Key []string diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go index 17740faa..9b376540 100644 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go @@ -7,6 +7,7 @@ import ( "go/ast" "go/printer" "go/token" + "go/types" "log" "regexp" "strings" @@ -16,6 +17,7 @@ import ( type Issue interface { Details() string + Pos() token.Pos Position() token.Position String() string } @@ -23,6 +25,7 @@ type Issue interface { type UsedIssue struct { identifier string pattern string + pos token.Pos position token.Position customMsg string } @@ -39,6 +42,10 @@ func (a UsedIssue) Position() token.Position { return a.position } +func (a UsedIssue) Pos() token.Pos { + return a.pos +} + func (a UsedIssue) String() string { return toString(a) } func toString(i UsedIssue) string { @@ -91,19 +98,43 @@ type visitor struct { linter *Linter comments []*ast.CommentGroup - fset *token.FileSet - issues []Issue + runConfig RunConfig + issues []Issue } +// Deprecated: Run was the original entrypoint before RunWithConfig was introduced to support +// additional match patterns that need additional information. func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { - var issues []Issue //nolint:prealloc // we don't know how many there will be + return l.RunWithConfig(RunConfig{Fset: fset}, nodes...) +} + +// RunConfig provides information that the linter needs for different kinds +// of match patterns. Ideally, all fields should get set. More fields may get +// added in the future as needed. +type RunConfig struct { + // FSet is required. + Fset *token.FileSet + + // TypesInfo is needed for expanding source code expressions. + // Nil disables that step, i.e. patterns match the literal source code. + TypesInfo *types.Info + + // DebugLog is used to print debug messages. May be nil. + DebugLog func(format string, args ...interface{}) +} + +func (l *Linter) RunWithConfig(config RunConfig, nodes ...ast.Node) ([]Issue, error) { + if config.DebugLog == nil { + config.DebugLog = func(format string, args ...interface{}) {} + } + var issues []Issue for _, node := range nodes { var comments []*ast.CommentGroup isTestFile := false isWholeFileExample := false if file, ok := node.(*ast.File); ok { comments = file.Comments - fileName := fset.Position(file.Pos()).Filename + fileName := config.Fset.Position(file.Pos()).Filename isTestFile = strings.HasSuffix(fileName, "_test.go") // From https://blog.golang.org/examples, a "whole file example" is: @@ -139,7 +170,7 @@ func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { cfg: l.cfg, isTestFile: isTestFile, linter: l, - fset: fset, + runConfig: config, comments: comments, } ast.Walk(&visitor, node) @@ -157,40 +188,169 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { return nil } return v + // The following two are handled below. case *ast.SelectorExpr: case *ast.Ident: + // Everything else isn't. default: return v } + + // The text as it appears in the source is always used because issues + // use that. It's used for matching unless usage of type information + // is enabled. + srcText := v.textFor(node) + matchTexts, pkgText := v.expandMatchText(node, srcText) + v.runConfig.DebugLog("%s: match %v, package %q", v.runConfig.Fset.Position(node.Pos()), matchTexts, pkgText) for _, p := range v.linter.patterns { - if p.pattern.MatchString(v.textFor(node)) && !v.permit(node) { + if p.matches(matchTexts) && + (p.Package == "" || p.pkgRe.MatchString(pkgText)) && + !v.permit(node) { v.issues = append(v.issues, UsedIssue{ - identifier: v.textFor(node), - pattern: p.pattern.String(), - position: v.fset.Position(node.Pos()), - customMsg: p.msg, + identifier: srcText, // Always report the expression as it appears in the source code. + pattern: p.re.String(), + pos: node.Pos(), + position: v.runConfig.Fset.Position(node.Pos()), + customMsg: p.Msg, }) } } return nil } +// textFor returns the expression as it appears in the source code (for +// example, .). func (v *visitor) textFor(node ast.Node) string { buf := new(bytes.Buffer) - if err := printer.Fprint(buf, v.fset, node); err != nil { - log.Fatalf("ERROR: unable to print node at %s: %s", v.fset.Position(node.Pos()), err) + if err := printer.Fprint(buf, v.runConfig.Fset, node); err != nil { + log.Fatalf("ERROR: unable to print node at %s: %s", v.runConfig.Fset.Position(node.Pos()), err) } return buf.String() } +// expandMatchText expands the selector in a selector expression to the full package +// name and (for variables) the type: +// +// - example.com/some/pkg.Function +// - example.com/some/pkg.CustomType.Method +// +// It updates the text to match against and fills the package string if possible, +// otherwise it just returns. +func (v *visitor) expandMatchText(node ast.Node, srcText string) (matchTexts []string, pkgText string) { + // The text to match against is the literal source code if we cannot + // come up with something different. + matchText := srcText + + if v.runConfig.TypesInfo == nil { + return []string{matchText}, pkgText + } + + location := v.runConfig.Fset.Position(node.Pos()) + + switch node := node.(type) { + case *ast.Ident: + object, ok := v.runConfig.TypesInfo.Uses[node] + if !ok { + // No information about the identifier. Should + // not happen, but perhaps there were compile + // errors? + v.runConfig.DebugLog("%s: unknown identifier %q", location, srcText) + return []string{matchText}, pkgText + } + if pkg := object.Pkg(); pkg != nil { + pkgText = pkg.Path() + v.runConfig.DebugLog("%s: identifier: %q -> %q in package %q", location, srcText, matchText, pkgText) + // match either with or without package name + return []string{pkg.Name() + "." + srcText, srcText}, pkgText + } else { + v.runConfig.DebugLog("%s: identifier: %q -> %q without package", location, srcText, matchText) + } + return []string{matchText}, pkgText + case *ast.SelectorExpr: + selector := node.X + field := node.Sel.Name + + // If we are lucky, the entire selector expression has a known + // type. We don't care about the value. + selectorText := v.textFor(node) + if typeAndValue, ok := v.runConfig.TypesInfo.Types[selector]; ok { + m, p, ok := pkgFromType(typeAndValue.Type) + if !ok { + v.runConfig.DebugLog("%s: selector %q with supported type %T", location, selectorText, typeAndValue.Type) + } + matchText = m + "." + field + pkgText = p + v.runConfig.DebugLog("%s: selector %q with supported type %q: %q -> %q, package %q", location, selectorText, typeAndValue.Type.String(), srcText, matchText, pkgText) + return []string{matchText}, pkgText + } + // Some expressions need special treatment. + switch selector := selector.(type) { + case *ast.Ident: + object, ok := v.runConfig.TypesInfo.Uses[selector] + if !ok { + // No information about the identifier. Should + // not happen, but perhaps there were compile + // errors? + v.runConfig.DebugLog("%s: unknown selector identifier %q", location, selectorText) + return []string{matchText}, pkgText + } + switch object := object.(type) { + case *types.PkgName: + pkgText = object.Imported().Path() + matchText = object.Imported().Name() + "." + field + v.runConfig.DebugLog("%s: selector %q is package: %q -> %q, package %q", location, selectorText, srcText, matchText, pkgText) + return []string{matchText}, pkgText + case *types.Var: + m, p, ok := pkgFromType(object.Type()) + if !ok { + v.runConfig.DebugLog("%s: selector %q is variable with unsupported type %T", location, selectorText, object.Type()) + } + matchText = m + "." + field + pkgText = p + v.runConfig.DebugLog("%s: selector %q is variable of type %q: %q -> %q, package %q", location, selectorText, object.Type().String(), srcText, matchText, pkgText) + default: + // Something else? + v.runConfig.DebugLog("%s: selector %q is identifier with unsupported type %T", location, selectorText, object) + } + default: + v.runConfig.DebugLog("%s: selector %q of unsupported type %T", location, selectorText, selector) + } + return []string{matchText}, pkgText + default: + v.runConfig.DebugLog("%s: unsupported type %T", location, node) + return []string{matchText}, pkgText + } +} + +// pkgFromType tries to determine `.` and the full +// package path. This only needs to work for types of a selector in a selector +// expression. +func pkgFromType(t types.Type) (typeStr, pkgStr string, ok bool) { + if ptr, ok := t.(*types.Pointer); ok { + t = ptr.Elem() + } + + switch t := t.(type) { + case *types.Named: + obj := t.Obj() + pkg := obj.Pkg() + if pkg == nil { + return "", "", false + } + return pkg.Name() + "." + obj.Name(), pkg.Path(), true + default: + return "", "", false + } +} + func (v *visitor) permit(node ast.Node) bool { if v.cfg.IgnorePermitDirectives { return false } - nodePos := v.fset.Position(node.Pos()) - var nolint = regexp.MustCompile(fmt.Sprintf(`^//\s?permit:%s\b`, regexp.QuoteMeta(v.textFor(node)))) + nodePos := v.runConfig.Fset.Position(node.Pos()) + nolint := regexp.MustCompile(fmt.Sprintf(`^//\s?permit:%s\b`, regexp.QuoteMeta(v.textFor(node)))) for _, c := range v.comments { - commentPos := v.fset.Position(c.Pos()) + commentPos := v.runConfig.Fset.Position(c.Pos()) if commentPos.Line == nodePos.Line && len(c.List) > 0 && nolint.MatchString(c.List[0].Text) { return true } diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go index c2364882..2692dcd2 100644 --- a/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/patterns.go @@ -5,39 +5,123 @@ import ( "regexp" "regexp/syntax" "strings" + + "gopkg.in/yaml.v2" ) +// pattern matches code that is not supposed to be used. type pattern struct { - pattern *regexp.Regexp - msg string + re, pkgRe *regexp.Regexp + + // Pattern is the regular expression string that is used for matching. + // It gets matched against the literal source code text or the expanded + // text, depending on the mode in which the analyzer runs. + Pattern string `yaml:"p"` + + // Package is a regular expression for the full package path of + // an imported item. Ignored unless the analyzer is configured to + // determine that information. + Package string `yaml:"pkg,omitempty"` + + // Msg gets printed in addition to the normal message if a match is + // found. + Msg string `yaml:"msg,omitempty"` +} + +// A yamlPattern pattern in a YAML string may be represented either by a string +// (the traditional regular expression syntax) or a struct (for more complex +// patterns). +type yamlPattern pattern + +func (p *yamlPattern) UnmarshalYAML(unmarshal func(interface{}) error) error { + // Try struct first. It's unlikely that a regular expression string + // is valid YAML for a struct. + var ptrn pattern + if err := unmarshal(&ptrn); err != nil { + errStr := err.Error() + // Didn't work, try plain string. + var ptrn string + if err := unmarshal(&ptrn); err != nil { + return fmt.Errorf("pattern is neither a regular expression string (%s) nor a Pattern struct (%s)", err.Error(), errStr) + } + p.Pattern = ptrn + } else { + *p = yamlPattern(ptrn) + } + return ((*pattern)(p)).validate() } +var _ yaml.Unmarshaler = &yamlPattern{} + +// parse accepts a regular expression or, if the string starts with { or contains a line break, a +// JSON or YAML representation of a Pattern. func parse(ptrn string) (*pattern, error) { - ptrnRe, err := regexp.Compile(ptrn) + pattern := &pattern{} + + if strings.HasPrefix(strings.TrimSpace(ptrn), "{") || + strings.Contains(ptrn, "\n") { + // Embedded JSON or YAML. We can decode both with the YAML decoder. + if err := yaml.UnmarshalStrict([]byte(ptrn), pattern); err != nil { + return nil, fmt.Errorf("parsing as JSON or YAML failed: %v", err) + } + } else { + pattern.Pattern = ptrn + } + + if err := pattern.validate(); err != nil { + return nil, err + } + return pattern, nil +} + +func (p *pattern) validate() error { + ptrnRe, err := regexp.Compile(p.Pattern) if err != nil { - return nil, fmt.Errorf("unable to compile pattern `%s`: %s", ptrn, err) + return fmt.Errorf("unable to compile source code pattern `%s`: %s", p.Pattern, err) } - re, err := syntax.Parse(ptrn, syntax.Perl) + re, err := syntax.Parse(p.Pattern, syntax.Perl) if err != nil { - return nil, fmt.Errorf("unable to parse pattern `%s`: %s", ptrn, err) + return fmt.Errorf("unable to parse source code pattern `%s`: %s", p.Pattern, err) } msg := extractComment(re) - return &pattern{pattern: ptrnRe, msg: msg}, nil + if msg != "" { + p.Msg = msg + } + p.re = ptrnRe + + if p.Package != "" { + pkgRe, err := regexp.Compile(p.Package) + if err != nil { + return fmt.Errorf("unable to compile package pattern `%s`: %s", p.Package, err) + } + p.pkgRe = pkgRe + } + + return nil +} + +func (p *pattern) matches(matchTexts []string) bool { + for _, text := range matchTexts { + if p.re.MatchString(text) { + return true + } + } + return false } // Traverse the leaf submatches in the regex tree and extract a comment, if any // is present. func extractComment(re *syntax.Regexp) string { for _, sub := range re.Sub { + subStr := sub.String() + if strings.HasPrefix(subStr, "#") { + return strings.TrimSpace(strings.TrimPrefix(sub.String(), "#")) + } if len(sub.Sub) > 0 { if comment := extractComment(sub); comment != "" { return comment } } - subStr := sub.String() - if strings.HasPrefix(subStr, "#") { - return strings.TrimSpace(strings.TrimPrefix(subStr, "#")) - } } return "" } diff --git a/vendor/github.com/bombsimon/wsl/v3/.golangci.yml b/vendor/github.com/bombsimon/wsl/v3/.golangci.yml new file mode 100644 index 00000000..336ad4bc --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/.golangci.yml @@ -0,0 +1,78 @@ +--- +run: + deadline: 1m + issues-exit-code: 1 + tests: true + skip-dirs: + - vendor$ + +output: + format: colored-line-number + print-issued-lines: false + +linters-settings: + gocognit: + min-complexity: 10 + + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/davecgh/go-spew/spew + + misspell: + locale: US + + gocritic: + # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` + # to see all tags and checks. Empty list by default. See + # https://github.com/go-critic/go-critic#usage -> section "Tags". + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + +linters: + enable-all: true + disable: + - cyclop + - deadcode + - dupl + - dupword + - exhaustivestruct + - exhaustruct + - forbidigo + - funlen + - gci + - gocognit + - gocyclo + - godox + - golint + - gomnd + - ifshort + - interfacer + - lll + - maintidx + - maligned + - nakedret + - nestif + - nlreturn + - nosnakecase + - paralleltest + - prealloc + - scopelint + - structcheck + - testpackage + - varcheck + - varnamelen + fast: false + + +issues: + exclude-use-default: true + max-issues-per-linter: 0 + max-same-issues: 0 + +# vim: set sw=2 ts=2 et: diff --git a/vendor/github.com/bombsimon/wsl/v3/.travis.yml b/vendor/github.com/bombsimon/wsl/v3/.travis.yml deleted file mode 100644 index 5e2e26ed..00000000 --- a/vendor/github.com/bombsimon/wsl/v3/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -language: go - -go: - - 1.13.x - - 1.12.x - - 1.11.x - -env: - global: - - GO111MODULE=on - -install: - - go get -v golang.org/x/tools/cmd/cover github.com/mattn/goveralls - -script: - - go test -v -covermode=count -coverprofile=coverage.out - -after_script: - - $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci - -notifications: - email: false - -# vim: set ts=2 sw=2 et: diff --git a/vendor/github.com/bombsimon/wsl/v3/README.md b/vendor/github.com/bombsimon/wsl/v3/README.md index 9812f94a..8ff74392 100644 --- a/vendor/github.com/bombsimon/wsl/v3/README.md +++ b/vendor/github.com/bombsimon/wsl/v3/README.md @@ -3,7 +3,7 @@ [![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com) [![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com) -[![Build Status](https://travis-ci.org/bombsimon/wsl.svg?branch=master)](https://travis-ci.org/bombsimon/wsl) +[![GitHub Actions](https://github.com/bombsimon/wsl/actions/workflows/go.yml/badge.svg)](https://github.com/bombsimon/wsl/actions/workflows/go.yml) [![Coverage Status](https://coveralls.io/repos/github/bombsimon/wsl/badge.svg?branch=master)](https://coveralls.io/github/bombsimon/wsl?branch=master) WSL is a linter that enforces a very **non scientific** vision of how to make @@ -28,7 +28,7 @@ make something configurable! You can do that by using: ```sh -go get -u github.com/bombsimon/wsl/cmd/... +go get -u github.com/bombsimon/wsl/v3/cmd/... ``` ### By golangci-lint (CI automation) diff --git a/vendor/github.com/bombsimon/wsl/v3/wsl.go b/vendor/github.com/bombsimon/wsl/v3/wsl.go index 313b5278..1b139c04 100644 --- a/vendor/github.com/bombsimon/wsl/v3/wsl.go +++ b/vendor/github.com/bombsimon/wsl/v3/wsl.go @@ -5,12 +5,12 @@ import ( "go/ast" "go/parser" "go/token" - "io/ioutil" + "os" "reflect" "strings" ) -// Error reason strings +// Error reason strings. const ( reasonMustCuddleErrCheck = "if statements that check an error must be cuddled with the statement that assigned the error" reasonOnlyCuddleIfWithAssign = "if statements should only be cuddled with assignments" @@ -44,7 +44,7 @@ const ( reasonShortDeclNotExclusive = "short declaration should cuddle only with other short declarations" ) -// Warning strings +// Warning strings. const ( warnTypeNotImplement = "type not implemented" warnStmtNotImplemented = "stmt type not implemented" @@ -176,7 +176,7 @@ type Configuration struct { ForceExclusiveShortDeclarations bool } -// DefaultConfig returns default configuration +// DefaultConfig returns default configuration. func DefaultConfig() Configuration { return Configuration{ StrictAppend: true, @@ -216,6 +216,8 @@ type Processor struct { } // NewProcessor will create a Processor. +// +//nolint:gocritic // It's fine to copy config struct func NewProcessorWithConfig(cfg Configuration) *Processor { return &Processor{ result: []Result{}, @@ -230,10 +232,11 @@ func NewProcessor() *Processor { // ProcessFiles takes a string slice with file names (full paths) and lints // them. -// nolint: gocritic +// +//nolint:gocritic // Don't want named returns func (p *Processor) ProcessFiles(filenames []string) ([]Result, []string) { for _, filename := range filenames { - data, err := ioutil.ReadFile(filename) + data, err := os.ReadFile(filename) if err != nil { panic(err) } @@ -247,7 +250,6 @@ func (p *Processor) ProcessFiles(filenames []string) ([]Result, []string) { func (p *Processor) process(filename string, data []byte) { fileSet := token.NewFileSet() file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments) - // If the file is not parsable let's add a syntax error and move on. if err != nil { p.result = append(p.result, Result{ @@ -292,7 +294,6 @@ func (p *Processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) { // parseBlockStatements will parse all the statements found in the body of a // node. A list of Result is returned. -// nolint: gocognit func (p *Processor) parseBlockStatements(statements []ast.Stmt) { for i, stmt := range statements { // Start by checking if this statement is another block (other than if, @@ -335,7 +336,7 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) { var calledOnLineAbove []string // Check if the previous statement spans over multiple lines. - var cuddledWithMultiLineAssignment = cuddledWithLastStmt && p.nodeStart(previousStatement) != p.nodeStart(stmt)-1 + cuddledWithMultiLineAssignment := cuddledWithLastStmt && p.nodeStart(previousStatement) != p.nodeStart(stmt)-1 // Ensure previous line is not a multi line assignment and if not get // rightAndLeftHandSide assigned variables. @@ -397,8 +398,7 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) { // t.X = true // return t // } - // nolint: gocritic - if i == len(statements)-1 && i == 1 { + if len(statements) == 2 && i == 1 { if p.nodeEnd(stmt)-p.nodeStart(previousStatement) <= 2 { return true } @@ -672,6 +672,22 @@ func (p *Processor) parseBlockStatements(statements []ast.Stmt) { continue } + if c, ok := t.Call.Fun.(*ast.SelectorExpr); ok { + goCallArgs := append(p.findLHS(c.X), p.findRHS(c.X)...) + + if atLeastOneInListsMatch(calledOnLineAbove, goCallArgs) { + continue + } + } + + if c, ok := t.Call.Fun.(*ast.FuncLit); ok { + goCallArgs := append(p.findLHS(c.Body), p.findRHS(c.Body)...) + + if atLeastOneInListsMatch(assignedOnLineAbove, goCallArgs) { + continue + } + } + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { p.addError(t.Pos(), reasonGoFuncWithoutAssign) } @@ -845,8 +861,7 @@ func (p *Processor) findRHS(node ast.Node) []string { case *ast.BasicLit, *ast.SelectStmt, *ast.ChanType, *ast.LabeledStmt, *ast.DeclStmt, *ast.BranchStmt, *ast.TypeSpec, *ast.ArrayType, *ast.CaseClause, - *ast.CommClause, *ast.KeyValueExpr, *ast.MapType, - *ast.FuncLit: + *ast.CommClause, *ast.MapType, *ast.FuncLit: // Nothing to add to RHS case *ast.Ident: return []string{t.Name} @@ -905,6 +920,9 @@ func (p *Processor) findRHS(node ast.Node) []string { rhs = append(rhs, p.findRHS(t.X)...) rhs = append(rhs, p.findRHS(t.Low)...) rhs = append(rhs, p.findRHS(t.High)...) + case *ast.KeyValueExpr: + rhs = p.findRHS(t.Key) + rhs = append(rhs, p.findRHS(t.Value)...) default: if x, ok := maybeX(t); ok { return p.findRHS(x) @@ -1002,7 +1020,6 @@ func atLeastOneInListsMatch(listOne, listTwo []string) bool { // findLeadingAndTrailingWhitespaces will find leading and trailing whitespaces // in a node. The method takes comments in consideration which will make the // parser more gentle. -// nolint: gocognit func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, nextStatement ast.Node) { var ( allowedLinesBeforeFirstStatement = 1 @@ -1094,7 +1111,7 @@ func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, ne if seenCommentGroups > 1 { allowedLinesBeforeFirstStatement += seenCommentGroups - 1 } else if seenCommentGroups == 1 { - allowedLinesBeforeFirstStatement += 1 + allowedLinesBeforeFirstStatement++ } } @@ -1205,7 +1222,7 @@ func (p *Processor) nodeStart(node ast.Node) int { } func (p *Processor) nodeEnd(node ast.Node) int { - var line = p.fileSet.Position(node.End()).Line + line := p.fileSet.Position(node.End()).Line if isEmptyLabeledStmt(node) { return p.fileSet.Position(node.Pos()).Line diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go index 2980e7d7..7418db20 100644 --- a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go +++ b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go @@ -121,7 +121,7 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err return src, src, nil } - imports, headEnd, tailStart, err := parse.ParseFile(src, file.Path()) + imports, headEnd, tailStart, cStart, cEnd, err := parse.ParseFile(src, file.Path()) if err != nil { if errors.Is(err, parse.NoImportError{}) { return src, src, nil @@ -139,23 +139,6 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err return nil, nil, err } - var head []byte - if src[headEnd-1] == '\t' { - head = src[:headEnd] - } else { - // handle multiple import blocks - // cover `import ` to `import (` - head = make([]byte, headEnd) - copy(head, src[:headEnd]) - head = append(head, []byte{40, 10, 9}...) - } - - tail := src[tailStart:] - // for test - if len(tail) == 0 { - tail = []byte(")\n") - } - firstWithIndex := true var body []byte @@ -173,8 +156,30 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err } } - if tail[0] != utils.Linebreak { - body = append(body, utils.Linebreak) + head := make([]byte, headEnd) + copy(head, src[:headEnd]) + tail := make([]byte, len(src)-tailStart) + copy(tail, src[tailStart:]) + + head = append(head, utils.Linebreak) + // ensure C + if cStart != 0 { + head = append(head, src[cStart:cEnd]...) + head = append(head, utils.Linebreak) + } + + // add beginning of import block + head = append(head, `import (`...) + head = append(head, utils.Linebreak) + // add end of import block + body = append(body, []byte{utils.RightParenthesis, utils.Linebreak}...) + + log.L().Debug(fmt.Sprintf("head:\n%s", head)) + log.L().Debug(fmt.Sprintf("body:\n%s", body)) + if len(tail) > 20 { + log.L().Debug(fmt.Sprintf("tail:\n%s", tail[:20])) + } else { + log.L().Debug(fmt.Sprintf("tail:\n%s", tail)) } var totalLen int @@ -187,7 +192,7 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err for _, s := range slices { i += copy(dist[i:], s) } - + log.L().Debug(fmt.Sprintf("raw:\n%s", dist)) dist, err = goFormat.Source(dist) if err != nil { return nil, nil, err diff --git a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go b/vendor/github.com/daixiang0/gci/pkg/parse/parse.go index df4cfdce..33d6e170 100644 --- a/vendor/github.com/daixiang0/gci/pkg/parse/parse.go +++ b/vendor/github.com/daixiang0/gci/pkg/parse/parse.go @@ -70,44 +70,96 @@ func getImports(imp *ast.ImportSpec) (start, end int, name string) { return } -func ParseFile(src []byte, filename string) (ImportList, int, int, error) { +func ParseFile(src []byte, filename string) (ImportList, int, int, int, int, error) { fileSet := token.NewFileSet() f, err := parser.ParseFile(fileSet, filename, src, parser.ParseComments) if err != nil { - return nil, 0, 0, err + return nil, 0, 0, 0, 0, err } if len(f.Imports) == 0 { - return nil, 0, 0, NoImportError{} + return nil, 0, 0, 0, 0, NoImportError{} } - var headEnd, tailStart int - - var data ImportList - for i, imp := range f.Imports { - if imp.Path.Value == C { - continue - } - - start, end, name := getImports(imp) + var ( + // headEnd means the start of import block + headEnd int + // tailStart means the end + 1 of import block + tailStart int + // cStart means the start of C import block + cStart int + // cEnd means the end of C import block + cEnd int + data ImportList + ) - if headEnd == 0 { - headEnd = start - } - if i == len(f.Imports)-1 { - tailStart = end + for index, decl := range f.Decls { + switch decl.(type) { + // skip BadDecl and FuncDecl + case *ast.GenDecl: + genDecl := decl.(*ast.GenDecl) + + if genDecl.Tok == token.IMPORT { + // there are two cases, both end with linebreak: + // 1. + // import ( + // "xxxx" + // ) + // 2. + // import "xxx" + if headEnd == 0 { + headEnd = int(decl.Pos()) - 1 + } + tailStart = int(decl.End()) + + for _, spec := range genDecl.Specs { + imp := spec.(*ast.ImportSpec) + // there are only one C import block + // ensure C import block is the first import block + if imp.Path.Value == C { + /* + common case: + + // #include + import "C" + + notice that decl.Pos() == genDecl.Pos() > genDecl.Doc.Pos() + */ + if genDecl.Doc != nil { + cStart = int(genDecl.Doc.Pos()) - 1 + // if C import block is the first, update headEnd + if index == 0 { + headEnd = cStart + } + } else { + /* + special case: + + import "C" + */ + cStart = int(decl.Pos()) - 1 + } + + cEnd = int(decl.End()) + + continue + } + + start, end, name := getImports(imp) + + data = append(data, &GciImports{ + Start: start, + End: end, + Name: name, + Path: strings.Trim(imp.Path.Value, `"`), + }) + } + } } - - data = append(data, &GciImports{ - Start: start, - End: end, - Name: name, - Path: strings.Trim(imp.Path.Value, `"`), - }) } sort.Sort(data) - return data, headEnd, tailStart, nil + return data, headEnd, tailStart, cStart, cEnd, nil } // IsGeneratedFileByComment reports whether the source file is generated code. @@ -118,10 +170,11 @@ func IsGeneratedFileByComment(in string) bool { const ( genCodeGenerated = "code generated" genDoNotEdit = "do not edit" - genAutoFile = "autogenerated file" // easyjson + genAutoFile = "autogenerated file" // easyjson + genAutoGenerated = "automatically generated" // genny ) - markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile} + markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile, genAutoGenerated} in = strings.ToLower(in) for _, marker := range markers { if strings.Contains(in, marker) { diff --git a/vendor/github.com/daixiang0/gci/pkg/section/errors.go b/vendor/github.com/daixiang0/gci/pkg/section/errors.go index 1aa42ecd..0a120913 100644 --- a/vendor/github.com/daixiang0/gci/pkg/section/errors.go +++ b/vendor/github.com/daixiang0/gci/pkg/section/errors.go @@ -25,9 +25,9 @@ func (s SectionParsingError) Is(err error) bool { return ok } -var MissingParameterClosingBracketsError = fmt.Errorf("section parameter is missing closing %q", utils.ParameterClosingBrackets) +var MissingParameterClosingBracketsError = fmt.Errorf("section parameter is missing closing %q", utils.RightParenthesis) -var MoreThanOneOpeningQuotesError = fmt.Errorf("found more than one %q parameter start sequences", utils.ParameterClosingBrackets) +var MoreThanOneOpeningQuotesError = fmt.Errorf("found more than one %q parameter start sequences", utils.RightParenthesis) var SectionTypeDoesNotAcceptParametersError = errors.New("section type does not accept a parameter") diff --git a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go index 95c0d225..62decfe1 100644 --- a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go +++ b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go @@ -1,6 +1,6 @@ package section -// Code generated based on go1.19.1. DO NOT EDIT. +// Code generated based on go1.19.2. DO NOT EDIT. var standardPackages = map[string]struct{}{ "archive/tar": {}, diff --git a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go b/vendor/github.com/daixiang0/gci/pkg/utils/constants.go index b1de2ea2..0e7cce75 100644 --- a/vendor/github.com/daixiang0/gci/pkg/utils/constants.go +++ b/vendor/github.com/daixiang0/gci/pkg/utils/constants.go @@ -4,8 +4,8 @@ const ( Indent = '\t' Linebreak = '\n' - SectionSeparator = ":" + Colon = ":" - ParameterOpeningBrackets = "(" - ParameterClosingBrackets = ")" + LeftParenthesis = '(' + RightParenthesis = ')' ) diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md index 5152bf59..be82827c 100644 --- a/vendor/github.com/fatih/color/README.md +++ b/vendor/github.com/fatih/color/README.md @@ -7,7 +7,6 @@ suits you. ![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) - ## Install ```bash @@ -124,17 +123,17 @@ fmt.Println("All text will now be bold magenta.") ``` ### Disable/Enable color - + There might be a case where you want to explicitly disable/enable color output. the `go-isatty` package will automatically disable color output for non-tty output streams (for example if the output were piped directly to `less`). The `color` package also disables color output if the [`NO_COLOR`](https://no-color.org) environment -variable is set (regardless of its value). +variable is set to a non-empty string. -`Color` has support to disable/enable colors programatically both globally and +`Color` has support to disable/enable colors programmatically both globally and for single color definitions. For example suppose you have a CLI app and a -`--no-color` bool flag. You can easily disable the color output with: +`-no-color` bool flag. You can easily disable the color output with: ```go var flagNoColor = flag.Bool("no-color", false, "Disable color output") @@ -167,11 +166,10 @@ To output color in GitHub Actions (or other CI systems that support ANSI colors) * Save/Return previous values * Evaluate fmt.Formatter interface - ## Credits - * [Fatih Arslan](https://github.com/fatih) - * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) +* [Fatih Arslan](https://github.com/fatih) +* Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) ## License diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go index 98a60f3c..889f9e77 100644 --- a/vendor/github.com/fatih/color/color.go +++ b/vendor/github.com/fatih/color/color.go @@ -19,10 +19,10 @@ var ( // set (regardless of its value). This is a global option and affects all // colors. For more control over each color block use the methods // DisableColor() individually. - NoColor = noColorExists() || os.Getenv("TERM") == "dumb" || + NoColor = noColorIsSet() || os.Getenv("TERM") == "dumb" || (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) - // Output defines the standard output of the print functions. By default + // Output defines the standard output of the print functions. By default, // os.Stdout is used. Output = colorable.NewColorableStdout() @@ -35,10 +35,9 @@ var ( colorsCacheMu sync.Mutex // protects colorsCache ) -// noColorExists returns true if the environment variable NO_COLOR exists. -func noColorExists() bool { - _, exists := os.LookupEnv("NO_COLOR") - return exists +// noColorIsSet returns true if the environment variable NO_COLOR is set to a non-empty string. +func noColorIsSet() bool { + return os.Getenv("NO_COLOR") != "" } // Color defines a custom color object which is defined by SGR parameters. @@ -120,7 +119,7 @@ func New(value ...Attribute) *Color { params: make([]Attribute, 0), } - if noColorExists() { + if noColorIsSet() { c.noColor = boolPtr(true) } @@ -152,7 +151,7 @@ func (c *Color) Set() *Color { return c } - fmt.Fprintf(Output, c.format()) + fmt.Fprint(Output, c.format()) return c } @@ -164,16 +163,21 @@ func (c *Color) unset() { Unset() } -func (c *Color) setWriter(w io.Writer) *Color { +// SetWriter is used to set the SGR sequence with the given io.Writer. This is +// a low-level function, and users should use the higher-level functions, such +// as color.Fprint, color.Print, etc. +func (c *Color) SetWriter(w io.Writer) *Color { if c.isNoColorSet() { return c } - fmt.Fprintf(w, c.format()) + fmt.Fprint(w, c.format()) return c } -func (c *Color) unsetWriter(w io.Writer) { +// UnsetWriter resets all escape attributes and clears the output with the give +// io.Writer. Usually should be called after SetWriter(). +func (c *Color) UnsetWriter(w io.Writer) { if c.isNoColorSet() { return } @@ -192,20 +196,14 @@ func (c *Color) Add(value ...Attribute) *Color { return c } -func (c *Color) prepend(value Attribute) { - c.params = append(c.params, 0) - copy(c.params[1:], c.params[0:]) - c.params[0] = value -} - // Fprint formats using the default formats for its operands and writes to w. // Spaces are added between operands when neither is a string. // It returns the number of bytes written and any write error encountered. // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) + c.SetWriter(w) + defer c.UnsetWriter(w) return fmt.Fprint(w, a...) } @@ -227,8 +225,8 @@ func (c *Color) Print(a ...interface{}) (n int, err error) { // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) + c.SetWriter(w) + defer c.UnsetWriter(w) return fmt.Fprintf(w, format, a...) } @@ -248,8 +246,8 @@ func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { // On Windows, users should wrap w with colorable.NewColorable() if w is of // type *os.File. func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { - c.setWriter(w) - defer c.unsetWriter(w) + c.SetWriter(w) + defer c.UnsetWriter(w) return fmt.Fprintln(w, a...) } @@ -396,7 +394,7 @@ func (c *Color) DisableColor() { } // EnableColor enables the color output. Use it in conjunction with -// DisableColor(). Otherwise this method has no side effects. +// DisableColor(). Otherwise, this method has no side effects. func (c *Color) EnableColor() { c.noColor = boolPtr(false) } diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go index 04541de7..9491ad54 100644 --- a/vendor/github.com/fatih/color/doc.go +++ b/vendor/github.com/fatih/color/doc.go @@ -5,106 +5,105 @@ that suits you. Use simple and default helper functions with predefined foreground colors: - color.Cyan("Prints text in cyan.") + color.Cyan("Prints text in cyan.") - // a newline will be appended automatically - color.Blue("Prints %s in blue.", "text") + // a newline will be appended automatically + color.Blue("Prints %s in blue.", "text") - // More default foreground colors.. - color.Red("We have red") - color.Yellow("Yellow color too!") - color.Magenta("And many others ..") + // More default foreground colors.. + color.Red("We have red") + color.Yellow("Yellow color too!") + color.Magenta("And many others ..") - // Hi-intensity colors - color.HiGreen("Bright green color.") - color.HiBlack("Bright black means gray..") - color.HiWhite("Shiny white color!") + // Hi-intensity colors + color.HiGreen("Bright green color.") + color.HiBlack("Bright black means gray..") + color.HiWhite("Shiny white color!") -However there are times where custom color mixes are required. Below are some +However, there are times when custom color mixes are required. Below are some examples to create custom color objects and use the print functions of each separate color object. - // Create a new color object - c := color.New(color.FgCyan).Add(color.Underline) - c.Println("Prints cyan text with an underline.") + // Create a new color object + c := color.New(color.FgCyan).Add(color.Underline) + c.Println("Prints cyan text with an underline.") - // Or just add them to New() - d := color.New(color.FgCyan, color.Bold) - d.Printf("This prints bold cyan %s\n", "too!.") + // Or just add them to New() + d := color.New(color.FgCyan, color.Bold) + d.Printf("This prints bold cyan %s\n", "too!.") - // Mix up foreground and background colors, create new mixes! - red := color.New(color.FgRed) + // Mix up foreground and background colors, create new mixes! + red := color.New(color.FgRed) - boldRed := red.Add(color.Bold) - boldRed.Println("This will print text in bold red.") + boldRed := red.Add(color.Bold) + boldRed.Println("This will print text in bold red.") - whiteBackground := red.Add(color.BgWhite) - whiteBackground.Println("Red text with White background.") + whiteBackground := red.Add(color.BgWhite) + whiteBackground.Println("Red text with White background.") - // Use your own io.Writer output - color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + // Use your own io.Writer output + color.New(color.FgBlue).Fprintln(myWriter, "blue color!") - blue := color.New(color.FgBlue) - blue.Fprint(myWriter, "This will print text in blue.") + blue := color.New(color.FgBlue) + blue.Fprint(myWriter, "This will print text in blue.") You can create PrintXxx functions to simplify even more: - // Create a custom print function for convenient - red := color.New(color.FgRed).PrintfFunc() - red("warning") - red("error: %s", err) + // Create a custom print function for convenient + red := color.New(color.FgRed).PrintfFunc() + red("warning") + red("error: %s", err) - // Mix up multiple attributes - notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() - notice("don't forget this...") + // Mix up multiple attributes + notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() + notice("don't forget this...") You can also FprintXxx functions to pass your own io.Writer: - blue := color.New(FgBlue).FprintfFunc() - blue(myWriter, "important notice: %s", stars) - - // Mix up with multiple attributes - success := color.New(color.Bold, color.FgGreen).FprintlnFunc() - success(myWriter, don't forget this...") + blue := color.New(FgBlue).FprintfFunc() + blue(myWriter, "important notice: %s", stars) + // Mix up with multiple attributes + success := color.New(color.Bold, color.FgGreen).FprintlnFunc() + success(myWriter, don't forget this...") Or create SprintXxx functions to mix strings with other non-colorized strings: - yellow := New(FgYellow).SprintFunc() - red := New(FgRed).SprintFunc() + yellow := New(FgYellow).SprintFunc() + red := New(FgRed).SprintFunc() - fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) + fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) - info := New(FgWhite, BgGreen).SprintFunc() - fmt.Printf("this %s rocks!\n", info("package")) + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Printf("this %s rocks!\n", info("package")) Windows support is enabled by default. All Print functions work as intended. -However only for color.SprintXXX functions, user should use fmt.FprintXXX and +However, only for color.SprintXXX functions, user should use fmt.FprintXXX and set the output to color.Output: - fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) + fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) - info := New(FgWhite, BgGreen).SprintFunc() - fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) Using with existing code is possible. Just use the Set() method to set the standard output to the given parameters. That way a rewrite of an existing code is not required. - // Use handy standard colors. - color.Set(color.FgYellow) + // Use handy standard colors. + color.Set(color.FgYellow) - fmt.Println("Existing text will be now in Yellow") - fmt.Printf("This one %s\n", "too") + fmt.Println("Existing text will be now in Yellow") + fmt.Printf("This one %s\n", "too") - color.Unset() // don't forget to unset + color.Unset() // don't forget to unset - // You can mix up parameters - color.Set(color.FgMagenta, color.Bold) - defer color.Unset() // use it in your function + // You can mix up parameters + color.Set(color.FgMagenta, color.Bold) + defer color.Unset() // use it in your function - fmt.Println("All text will be now bold magenta.") + fmt.Println("All text will be now bold magenta.") There might be a case where you want to disable color output (for example to pipe the standard output of your app to somewhere else). `Color` has support to @@ -112,24 +111,24 @@ disable colors both globally and for single color definition. For example suppose you have a CLI app and a `--no-color` bool flag. You can easily disable the color output with: - var flagNoColor = flag.Bool("no-color", false, "Disable color output") + var flagNoColor = flag.Bool("no-color", false, "Disable color output") - if *flagNoColor { - color.NoColor = true // disables colorized output - } + if *flagNoColor { + color.NoColor = true // disables colorized output + } You can also disable the color by setting the NO_COLOR environment variable to any value. It also has support for single color definitions (local). You can disable/enable color output on the fly: - c := color.New(color.FgCyan) - c.Println("Prints cyan text") + c := color.New(color.FgCyan) + c.Println("Prints cyan text") - c.DisableColor() - c.Println("This is printed without any color") + c.DisableColor() + c.Println("This is printed without any color") - c.EnableColor() - c.Println("This prints again cyan...") + c.EnableColor() + c.Println("This prints again cyan...") */ package color diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go index 149f0ac8..d3d139a0 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go @@ -114,30 +114,43 @@ func (c *badCondChecker) checkForStmt(stmt *ast.ForStmt) { iter := astcast.ToIdent(init.Lhs[0]) cond := astcast.ToBinaryExpr(stmt.Cond) - if cond.Op != token.GTR || !astequal.Expr(iter, cond.X) { + + var i, n ast.Expr + var op token.Token + switch { + case cond.Op == token.GTR && astequal.Expr(iter, cond.X): + i = cond.X + n = cond.Y + op = token.LSS + case cond.Op == token.LSS && astequal.Expr(iter, cond.Y): + i = cond.Y + n = cond.X + op = token.GTR + default: return } - if !typep.SideEffectFree(c.ctx.TypesInfo, cond.Y) { + + if !typep.SideEffectFree(c.ctx.TypesInfo, n) { return } post := astcast.ToIncDecStmt(stmt.Post) - if post.Tok != token.INC || !astequal.Expr(iter, post.X) { + if post.Tok != token.INC || !astequal.Expr(iter, i) { return } - mutated := lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, cond.Y) || + mutated := lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, n) || lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, iter) if mutated { return } - c.warnForStmt(stmt, cond) + c.warnForStmt(stmt, op, cond) } -func (c *badCondChecker) warnForStmt(cause ast.Node, cond *ast.BinaryExpr) { +func (c *badCondChecker) warnForStmt(cause ast.Node, op token.Token, cond *ast.BinaryExpr) { suggest := astcopy.BinaryExpr(cond) - suggest.Op = token.LSS + suggest.Op = op c.ctx.Warn(cause, "`%s` in loop; probably meant `%s`?", cond, suggest) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go index e0d4b748..8a359000 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go @@ -365,7 +365,7 @@ func (c *badRegexpChecker) checkCharClassDups(cc syntax.Expr) { } // 2. Sort ranges, O(nlogn). - sort.Slice(ranges, func(i, j int) bool { + sort.SliceStable(ranges, func(i, j int) bool { return ranges[i].low < ranges[j].low }) diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go index 83de5052..ad16e3b3 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go @@ -54,5 +54,5 @@ func (c *dupBranchBodyChecker) checkIf(stmt *ast.IfStmt) { } func (c *dupBranchBodyChecker) warnIf(cause ast.Node) { - c.ctx.Warn(cause, "both branches in if statement has same body") + c.ctx.Warn(cause, "both branches in if statement have same body") } diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go index 910be180..742652a1 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go @@ -5,7 +5,6 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/framework/linter" - "golang.org/x/exp/typeparams" ) func init() { @@ -50,9 +49,6 @@ func (c *hugeParamChecker) checkParams(params []*ast.Field) { for _, p := range params { for _, id := range p.Names { typ := c.ctx.TypeOf(id) - if _, ok := typ.(*typeparams.TypeParam); ok { - continue - } size, ok := c.ctx.SizeOf(typ) if ok && size >= c.sizeThreshold { c.warn(id, size) diff --git a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go index b1fcf414..c3d127c5 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go @@ -11,6 +11,12 @@ func init() { var info linter.CheckerInfo info.Name = "ifElseChain" info.Tags = []string{"style"} + info.Params = linter.CheckerParams{ + "minThreshold": { + Value: 2, + Usage: "min number of if-else blocks that makes the warning trigger", + }, + } info.Summary = "Detects repeated if-else statements and suggests to replace them with switch statement" info.Before = ` if cond1 { @@ -35,7 +41,10 @@ will trigger suggestion to use switch statement. See [EffectiveGo#switch](https://golang.org/doc/effective_go.html#switch).` collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { - return astwalk.WalkerForStmt(&ifElseChainChecker{ctx: ctx}), nil + return astwalk.WalkerForStmt(&ifElseChainChecker{ + ctx: ctx, + minThreshold: info.Params.Int("minThreshold"), + }), nil }) } @@ -45,6 +54,8 @@ type ifElseChainChecker struct { cause *ast.IfStmt visited map[*ast.IfStmt]bool + + minThreshold int } func (c *ifElseChainChecker) EnterFunc(fn *ast.FuncDecl) bool { @@ -66,8 +77,7 @@ func (c *ifElseChainChecker) VisitStmt(stmt ast.Stmt) { } func (c *ifElseChainChecker) checkIfStmt(stmt *ast.IfStmt) { - const minThreshold = 2 - if c.countIfelseLen(stmt) >= minThreshold { + if c.countIfelseLen(stmt) >= c.minThreshold { c.warn() } } @@ -75,11 +85,12 @@ func (c *ifElseChainChecker) checkIfStmt(stmt *ast.IfStmt) { func (c *ifElseChainChecker) countIfelseLen(stmt *ast.IfStmt) int { count := 0 for { + if stmt.Init != nil { + return 0 // Give up + } + switch e := stmt.Else.(type) { case *ast.IfStmt: - if e.Init != nil { - return 0 // Give up - } // Else if. stmt = e count++ diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go index 47de589a..5fcce6a2 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go @@ -7,9 +7,9 @@ import ( // LocalDefVisitor visits every name definitions inside a function. // // Next elements are considered as name definitions: -// - Function parameters (input, output, receiver) -// - Every LHS of ":=" assignment that defines a new name -// - Every local var/const declaration. +// - Function parameters (input, output, receiver) +// - Every LHS of ":=" assignment that defines a new name +// - Every local var/const declaration. // // NOTE: this visitor is experimental. // This is also why it lives in a separate file. diff --git a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go index 7e564b70..04a3474f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go @@ -7,6 +7,7 @@ import ( "github.com/go-critic/go-critic/checkers/internal/lintutil" "github.com/go-critic/go-critic/framework/linter" "github.com/go-toolsmith/astcast" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/ast/astutil" ) @@ -33,6 +34,10 @@ func (c *newDerefChecker) VisitExpr(expr ast.Expr) { call := astcast.ToCallExpr(deref.X) if astcast.ToIdent(call.Fun).Name == "new" { typ := c.ctx.TypeOf(call.Args[0]) + // allow *new(T) if T is a type parameter, see #1272 for details + if typeparams.IsTypeParam(typ) { + return + } zv := lintutil.ZeroValueOf(astutil.Unparen(call.Args[0]), typ) if zv != nil { c.warn(expr, zv) diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go index eafc549d..37f46965 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go @@ -5,7 +5,6 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/framework/linter" - "golang.org/x/exp/typeparams" ) func init() { @@ -66,10 +65,8 @@ func (c *rangeValCopyChecker) VisitStmt(stmt ast.Stmt) { if typ == nil { return } - if _, ok := typ.(*typeparams.TypeParam); ok { - return - } - if size, ok := c.ctx.SizeOf(typ); ok && size >= c.sizeThreshold { + size, ok := c.ctx.SizeOf(typ) + if ok && size >= c.sizeThreshold { c.warn(rng, size) } } diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go index 35c6a644..000007a3 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -274,7 +274,7 @@ func (c *ruleguardChecker) WalkFile(f *ast.File) { func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.Engine, runCtx *ruleguard.RunContext) { type ruleguardReport struct { - node ast.Node + pos token.Pos message string fix linter.QuickFix } @@ -284,7 +284,7 @@ func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.En // TODO(quasilyte): investigate whether we should add a rule name as // a message prefix here. r := ruleguardReport{ - node: data.Node, + pos: data.Node.Pos(), message: data.Message, } fix := data.Suggestion @@ -310,9 +310,9 @@ func runRuleguardEngine(ctx *linter.CheckerContext, f *ast.File, e *ruleguard.En }) for _, report := range reports { if report.fix.Replacement != nil { - ctx.WarnFixable(report.node, report.fix, "%s", report.message) + ctx.WarnFixableWithPos(report.pos, report.fix, "%s", report.message) } else { - ctx.Warn(report.node, "%s", report.message) + ctx.WarnWithPos(report.pos, "%s", report.message) } } } diff --git a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go index b5dc5823..f0b147a6 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go @@ -2379,6 +2379,63 @@ var PrecompiledRules = &ir.File{ }, }, }, + { + Line: 777, + Name: "uncheckedInlineErr", + MatcherName: "m", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects unchecked errors in if statements", + DocBefore: "if err := expr(); err2 != nil { /*...*/ }", + DocAfter: "if err := expr(); err != nil { /*...*/ }", + Rules: []ir.Rule{{ + Line: 778, + SyntaxPatterns: []ir.PatternString{ + {Line: 779, Value: "if $err := $_($*_); $err2 != nil { $*_ }"}, + {Line: 780, Value: "if $err = $_($*_); $err2 != nil { $*_ }"}, + {Line: 781, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"}, + {Line: 782, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"}, + }, + ReportTemplate: "$err error is unchecked, maybe intended to check it instead of $err2", + WhereExpr: ir.FilterExpr{ + Line: 783, + Op: ir.FilterAndOp, + Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\") &&\n\tm[\"err\"].Text != m[\"err2\"].Text", + Args: []ir.FilterExpr{ + { + Line: 783, + Op: ir.FilterAndOp, + Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\")", + Args: []ir.FilterExpr{ + { + Line: 783, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"err\"].Type.Implements(\"error\")", + Value: "err", + Args: []ir.FilterExpr{{Line: 783, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, + }, + { + Line: 783, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"err2\"].Type.Implements(\"error\")", + Value: "err2", + Args: []ir.FilterExpr{{Line: 783, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, + }, + }, + }, + { + Line: 784, + Op: ir.FilterNeqOp, + Src: "m[\"err\"].Text != m[\"err2\"].Text", + Args: []ir.FilterExpr{ + {Line: 784, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"}, + {Line: 784, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"}, + }, + }, + }, + }, + LocationVar: "err", + }}, + }, }, } diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/linter.go b/vendor/github.com/go-critic/go-critic/framework/linter/linter.go index 750ff7cd..8573ace3 100644 --- a/vendor/github.com/go-critic/go-critic/framework/linter/linter.go +++ b/vendor/github.com/go-critic/go-critic/framework/linter/linter.go @@ -6,7 +6,6 @@ import ( "go/types" "github.com/go-toolsmith/astfmt" - "golang.org/x/exp/typeparams" ) // CheckerCollection provides additional information for a group of checkers. @@ -141,9 +140,7 @@ type QuickFix struct { // Warning represents issue that is found by checker. type Warning struct { - // Node is an AST node that caused warning to trigger. - // Can be used to obtain proper error location. - Node ast.Node + Pos token.Pos // Text is warning message without source location info. Text string @@ -279,17 +276,27 @@ type CheckerContext struct { // Warn adds a Warning to checker output. func (ctx *CheckerContext) Warn(node ast.Node, format string, args ...interface{}) { + ctx.WarnWithPos(node.Pos(), format, args...) +} + +// WarnFixable emits a warning with a fix suggestion provided by the caller. +func (ctx *CheckerContext) WarnFixable(node ast.Node, fix QuickFix, format string, args ...interface{}) { + ctx.WarnFixableWithPos(node.Pos(), fix, format, args...) +} + +// WarnWithPos adds a Warning to checker output. Useful for ruleguard's Report func. +func (ctx *CheckerContext) WarnWithPos(pos token.Pos, format string, args ...interface{}) { ctx.warnings = append(ctx.warnings, Warning{ Text: ctx.printer.Sprintf(format, args...), - Node: node, + Pos: pos, }) } -// WarnFixable emits a warning with a fix suggestion provided by the caller. -func (ctx *CheckerContext) WarnFixable(node ast.Node, fix QuickFix, format string, args ...interface{}) { +// WarnFixableWithPos adds a Warning to checker output. Useful for ruleguard's Report func. +func (ctx *CheckerContext) WarnFixableWithPos(pos token.Pos, fix QuickFix, format string, args ...interface{}) { ctx.warnings = append(ctx.warnings, Warning{ Text: ctx.printer.Sprintf(format, args...), - Node: node, + Pos: pos, Suggestion: fix, }) } @@ -319,7 +326,10 @@ func (ctx *CheckerContext) TypeOf(x ast.Expr) types.Type { // // Unlike SizesInfo.SizeOf, it will not panic on generic types. func (ctx *CheckerContext) SizeOf(typ types.Type) (int64, bool) { - if _, ok := typ.(*typeparams.TypeParam); ok { + if _, ok := typ.(*types.TypeParam); ok { + return 0, false + } + if named, ok := typ.(*types.Named); ok && named.TypeParams() != nil { return 0, false } return ctx.SizesInfo.Sizeof(typ), true diff --git a/vendor/github.com/go-toolsmith/astcast/.travis.yml b/vendor/github.com/go-toolsmith/astcast/.travis.yml deleted file mode 100644 index c32ac006..00000000 --- a/vendor/github.com/go-toolsmith/astcast/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/astcast/README.md b/vendor/github.com/go-toolsmith/astcast/README.md index b618da46..19ca0e71 100644 --- a/vendor/github.com/go-toolsmith/astcast/README.md +++ b/vendor/github.com/go-toolsmith/astcast/README.md @@ -1,15 +1,19 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astcast)](https://goreportcard.com/report/github.com/go-toolsmith/astcast) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/astcast?status.svg)](https://godoc.org/github.com/go-toolsmith/astcast) - # astcast -Package astcast wraps type assertion operations in such way that you don't have +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `astcast` wraps type assertion operations in such way that you don't have to worry about nil pointer results anymore. ## Installation +Go version 1.16+ + ```bash -go get -v github.com/go-toolsmith/astcast +go get github.com/go-toolsmith/astcast ``` ## Example @@ -84,3 +88,16 @@ func main() { fmt.Printf("%T %s\n", bar, bar.Name) } ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/astcast/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/astcast/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astcast +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astcast +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astcast +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astcast +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astcast +[version-url]: https://github.com/go-toolsmith/astcast/releases diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy.go b/vendor/github.com/go-toolsmith/astcopy/astcopy.go index 91e1f310..72bc58ce 100644 --- a/vendor/github.com/go-toolsmith/astcopy/astcopy.go +++ b/vendor/github.com/go-toolsmith/astcopy/astcopy.go @@ -346,21 +346,6 @@ func FieldList(x *ast.FieldList) *ast.FieldList { return &cp } -// FuncType returns x deep copy. -// Copy of nil argument is nil. -func FuncType(x *ast.FuncType) *ast.FuncType { - if x == nil { - return nil - } - cp := *x - cp.Params = FieldList(x.Params) - cp.Results = FieldList(x.Results) - if typeParams := typeparams.ForFuncType(x); typeParams != nil { - *typeparams.ForFuncType(&cp) = *FieldList(typeParams) - } - return &cp -} - // InterfaceType returns x deep copy. // Copy of nil argument is nil. func InterfaceType(x *ast.InterfaceType) *ast.InterfaceType { @@ -435,23 +420,6 @@ func ValueSpec(x *ast.ValueSpec) *ast.ValueSpec { return &cp } -// TypeSpec returns x deep copy. -// Copy of nil argument is nil. -func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { - if x == nil { - return nil - } - cp := *x - cp.Name = Ident(x.Name) - cp.Type = copyExpr(x.Type) - cp.Doc = CommentGroup(x.Doc) - cp.Comment = CommentGroup(x.Comment) - if typeParams := typeparams.ForTypeSpec(x); typeParams != nil { - *typeparams.ForTypeSpec(&cp) = *FieldList(typeParams) - } - return &cp -} - // Spec returns x deep copy. // Copy of nil argument is nil. func Spec(x ast.Spec) ast.Spec { diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go b/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go new file mode 100644 index 00000000..1b748bae --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/astcopy_go117.go @@ -0,0 +1,30 @@ +//go:build !go1.18 +// +build !go1.18 + +package astcopy + +// FuncType returns x deep copy. +// Copy of nil argument is nil. +func FuncType(x *ast.FuncType) *ast.FuncType { + if x == nil { + return nil + } + cp := *x + cp.Params = FieldList(x.Params) + cp.Results = FieldList(x.Results) + return &cp +} + +// TypeSpec returns x deep copy. +// Copy of nil argument is nil. +func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { + if x == nil { + return nil + } + cp := *x + cp.Name = Ident(x.Name) + cp.Type = copyExpr(x.Type) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + return &cp +} diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go b/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go new file mode 100644 index 00000000..72f800ac --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/astcopy_go118.go @@ -0,0 +1,36 @@ +//go:build go1.18 +// +build go1.18 + +package astcopy + +import ( + "go/ast" +) + +// FuncType returns x deep copy. +// Copy of nil argument is nil. +func FuncType(x *ast.FuncType) *ast.FuncType { + if x == nil { + return nil + } + cp := *x + cp.Params = FieldList(x.Params) + cp.Results = FieldList(x.Results) + cp.TypeParams = FieldList(x.TypeParams) + return &cp +} + +// TypeSpec returns x deep copy. +// Copy of nil argument is nil. +func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { + if x == nil { + return nil + } + cp := *x + cp.Name = Ident(x.Name) + cp.Type = copyExpr(x.Type) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + cp.TypeParams = FieldList(x.TypeParams) + return &cp +} diff --git a/vendor/github.com/go-toolsmith/astequal/.travis.yml b/vendor/github.com/go-toolsmith/astequal/.travis.yml deleted file mode 100644 index 8994d395..00000000 --- a/vendor/github.com/go-toolsmith/astequal/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astequal/README.md b/vendor/github.com/go-toolsmith/astequal/README.md index b14f80f6..db5e3a8c 100644 --- a/vendor/github.com/go-toolsmith/astequal/README.md +++ b/vendor/github.com/go-toolsmith/astequal/README.md @@ -1,14 +1,16 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astequal)](https://goreportcard.com/report/github.com/go-toolsmith/astequal) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/astequal?status.svg)](https://godoc.org/github.com/go-toolsmith/astequal) -[![Build Status](https://travis-ci.org/go-toolsmith/astequal.svg?branch=master)](https://travis-ci.org/go-toolsmith/astequal) - - # astequal -Package astequal provides AST (deep) equallity check operations. +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `astequal` provides AST (deep) equallity check operations. ## Installation: +Go version 1.16+ + ```bash go get github.com/go-toolsmith/astequal ``` @@ -65,3 +67,16 @@ BenchmarkEqualExpr/astequal.Expr-8 5000000 298 ns/op 0 B/op 0 BenchmarkEqualExpr/astequal.Node-8 3000000 409 ns/op 0 B/op 0 allocs/op BenchmarkEqualExpr/reflect.DeepEqual-8 50000 38898 ns/op 10185 B/op 156 allocs/op ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/astequal/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/astequal/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astequal +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astequal +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astequal +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astequal +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astequal +[version-url]: https://github.com/go-toolsmith/astequal/releases diff --git a/vendor/github.com/go-toolsmith/astfmt/.travis.yml b/vendor/github.com/go-toolsmith/astfmt/.travis.yml deleted file mode 100644 index c32ac006..00000000 --- a/vendor/github.com/go-toolsmith/astfmt/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/astfmt/README.md b/vendor/github.com/go-toolsmith/astfmt/README.md index 954c92bf..00f790fd 100644 --- a/vendor/github.com/go-toolsmith/astfmt/README.md +++ b/vendor/github.com/go-toolsmith/astfmt/README.md @@ -1,13 +1,16 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/strparse)](https://goreportcard.com/report/github.com/go-toolsmith/strparse) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/strparse?status.svg)](https://godoc.org/github.com/go-toolsmith/strparse) - - # astfmt -Package astfmt implements ast.Node formatting with fmt-like API. +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `astfmt` implements ast.Node formatting with fmt-like API. ## Installation +Go version 1.16+ + ```bash go get github.com/go-toolsmith/astfmt ``` @@ -37,3 +40,16 @@ func Example() { pp.Println(x) // => foo(bar(baz(1 + 2))) } ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/astfmt/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/astfmt/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astfmt +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astfmt +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astfmt +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astfmt +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astfmt +[version-url]: https://github.com/go-toolsmith/astfmt/releases diff --git a/vendor/github.com/go-toolsmith/astp/.gitignore b/vendor/github.com/go-toolsmith/astp/.gitignore deleted file mode 100644 index 1f6187ec..00000000 --- a/vendor/github.com/go-toolsmith/astp/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -bin -pkg -src/main -tmp \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astp/.travis.yml b/vendor/github.com/go-toolsmith/astp/.travis.yml deleted file mode 100644 index 8994d395..00000000 --- a/vendor/github.com/go-toolsmith/astp/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astp/README.md b/vendor/github.com/go-toolsmith/astp/README.md index 7313c6ab..cf5197e8 100644 --- a/vendor/github.com/go-toolsmith/astp/README.md +++ b/vendor/github.com/go-toolsmith/astp/README.md @@ -1,14 +1,16 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astp)](https://goreportcard.com/report/github.com/go-toolsmith/astp) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/astp?status.svg)](https://godoc.org/github.com/go-toolsmith/astp) -[![Build Status](https://travis-ci.org/go-toolsmith/astp.svg?branch=master)](https://travis-ci.org/go-toolsmith/astp) - - # astp -Package astp provides AST predicates. +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `astp` provides AST predicates. ## Installation: +Go version 1.16+ + ```bash go get github.com/go-toolsmith/astp ``` @@ -37,3 +39,16 @@ func main() { } } ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/astp/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/astp/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astp +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astp +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astp +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astp +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astp +[version-url]: https://github.com/go-toolsmith/astp/releases diff --git a/vendor/github.com/go-toolsmith/strparse/.travis.yml b/vendor/github.com/go-toolsmith/strparse/.travis.yml deleted file mode 100644 index 8994d395..00000000 --- a/vendor/github.com/go-toolsmith/strparse/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/strparse/README.md b/vendor/github.com/go-toolsmith/strparse/README.md index ae80a539..ac04d516 100644 --- a/vendor/github.com/go-toolsmith/strparse/README.md +++ b/vendor/github.com/go-toolsmith/strparse/README.md @@ -1,15 +1,17 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/strparse)](https://goreportcard.com/report/github.com/go-toolsmith/strparse) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/strparse?status.svg)](https://godoc.org/github.com/go-toolsmith/strparse) -[![Build Status](https://travis-ci.org/go-toolsmith/strparse.svg?branch=master)](https://travis-ci.org/go-toolsmith/strparse) - - # strparse -Package strparse provides convenience wrappers around `go/parser` for simple +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `strparse` provides convenience wrappers around `go/parser` for simple expression, statement and declaretion parsing from string. ## Installation +Go version 1.16+ + ```bash go get github.com/go-toolsmith/strparse ``` @@ -20,8 +22,8 @@ go get github.com/go-toolsmith/strparse package main import ( - "go-toolsmith/astequal" - "go-toolsmith/strparse" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/strparse" ) func main() { @@ -30,5 +32,17 @@ func main() { y := strparse.Expr(` 1+f( v[0].X ) `) fmt.Println(astequal.Expr(x, y)) // => true } - ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/strparse/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/strparse/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/strparse +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/strparse +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/strparse +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/strparse +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/strparse +[version-url]: https://github.com/go-toolsmith/strparse/releases diff --git a/vendor/github.com/go-toolsmith/typep/.travis.yml b/vendor/github.com/go-toolsmith/typep/.travis.yml deleted file mode 100644 index d3ff3cca..00000000 --- a/vendor/github.com/go-toolsmith/typep/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go vet ./... - - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/typep/README.md b/vendor/github.com/go-toolsmith/typep/README.md index f7979148..77478c44 100644 --- a/vendor/github.com/go-toolsmith/typep/README.md +++ b/vendor/github.com/go-toolsmith/typep/README.md @@ -1,15 +1,18 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/typep)](https://goreportcard.com/report/github.com/go-toolsmith/typep) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/typep?status.svg)](https://godoc.org/github.com/go-toolsmith/typep) -[![Build Status](https://travis-ci.org/go-toolsmith/typep.svg?branch=master)](https://travis-ci.org/go-toolsmith/typep) - # typep -Package typep provides type predicates. +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `typep` provides type predicates. ## Installation: +Go version 1.16+ + ```bash -go get -v github.com/go-toolsmith/typep +go get github.com/go-toolsmith/typep ``` ## Example @@ -29,9 +32,23 @@ func main() { intTyp := types.Typ[types.Int] ptr := types.NewPointer(intTyp) arr := types.NewArray(intTyp, 64) + fmt.Println(typep.HasFloatProp(floatTyp)) // => true fmt.Println(typep.HasFloatProp(intTyp)) // => false fmt.Println(typep.IsPointer(ptr)) // => true fmt.Println(typep.IsArray(arr)) // => true } ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/typep/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/typep/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/typep +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/typep +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/typep +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/typep +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/typep +[version-url]: https://github.com/go-toolsmith/typep/releases diff --git a/vendor/github.com/go-toolsmith/typep/safeExpr.go b/vendor/github.com/go-toolsmith/typep/safe_expr.go similarity index 100% rename from vendor/github.com/go-toolsmith/typep/safeExpr.go rename to vendor/github.com/go-toolsmith/typep/safe_expr.go diff --git a/vendor/github.com/go-toolsmith/typep/simplePredicates.go b/vendor/github.com/go-toolsmith/typep/simple_predicates.go similarity index 99% rename from vendor/github.com/go-toolsmith/typep/simplePredicates.go rename to vendor/github.com/go-toolsmith/typep/simple_predicates.go index 3bc9c29c..61e7d5b7 100644 --- a/vendor/github.com/go-toolsmith/typep/simplePredicates.go +++ b/vendor/github.com/go-toolsmith/typep/simple_predicates.go @@ -1,4 +1,4 @@ -// Code generated by simplePredicates_generate.go; DO NOT EDIT +// Code generated by simple_predicates_generate.go; DO NOT EDIT package typep diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/README.md b/vendor/github.com/go-xmlfmt/xmlfmt/README.md index 4eb6d69a..9f661ea2 100644 --- a/vendor/github.com/go-xmlfmt/xmlfmt/README.md +++ b/vendor/github.com/go-xmlfmt/xmlfmt/README.md @@ -3,7 +3,6 @@ [![MIT License](http://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) [![Go Doc](https://img.shields.io/badge/godoc-reference-4b68a3.svg)](https://godoc.org/github.com/go-xmlfmt/xmlfmt) [![Go Report Card](https://goreportcard.com/badge/github.com/go-xmlfmt/xmlfmt)](https://goreportcard.com/report/github.com/go-xmlfmt/xmlfmt) -[![Codeship Status](https://codeship.com/projects/c49f02b0-a384-0134-fb20-2e0351080565/status?branch=master)](https://codeship.com/projects/190297) ## Synopsis @@ -15,9 +14,18 @@ package main import "github.com/go-xmlfmt/xmlfmt" func main() { - xml1 := `aSome org-or-otherWouldnt you like to knowPatCalifia` + xml1 := `aSome org-or-otherWouldnt you like to knowPatCalifia` x := xmlfmt.FormatXML(xml1, "\t", " ") print(x) + + // If the XML Comments have nested tags in them + xml1 = ` Fred + + 23456 ` + x = xmlfmt.FormatXML(xml1, "", " ", true) + print(x) } ``` @@ -27,48 +35,107 @@ Output: ```xml - a - + a - Some org-or-other - - Wouldnt you like to know - + Some org-or-other + Wouldnt you like to know - Pat - - Califia - + Pat + Califia + + + + Fred + + 23456 + ``` There is no XML decoding and encoding involved, only pure regular expression matching and replacing. So it is much faster than going through decoding and encoding procedures. Moreover, the exact XML source string is preserved, instead of being changed by the encoder. This is why this package exists in the first place. +Note that + +- the default line ending is handled by the package automatically now. For Windows it's `CRLF`, and standard for anywhere else. No need to change the default line ending now. +- the case of XML comments nested within XML comments is ***not*** supported. Please avoid them or use any other tools to correct them before using this package. +- don't turn on the `nestedTagsInComments` parameter blindly, as the code has become 10+ times more complicated because of it. + ## Command To use it on command line, check out [xmlfmt](https://github.com/AntonioSun/xmlfmt): ``` -$ xmlfmt -XML Formatter -built on 2019-12-08 +$ xmlfmt -V +xmlfmt - XML Formatter +Copyright (C) 2016-2022, Antonio Sun The xmlfmt will format the XML string without rewriting the document -Options: - - -h, --help display help information - -f, --file *The xml file to read from (or stdin) - -p, --prefix each element begins on a new line and this prefix - -i, --indent[= ] indent string for nested elements +Built on 2022-02-06 +Version 1.1.1 + +$ xmlfmt +the required flag `-f, --file' was not specified + +Usage: + xmlfmt [OPTIONS] + +Application Options: + -f, --file= The xml file to read from (or "-" for stdin) [$XMLFMT_FILEI] + -p, --prefix= Each element begins on a new line and this prefix [$XMLFMT_PREFIX] + -i, --indent= Indent string for nested elements (default: ) [$XMLFMT_INDENT] + -n, --nested Nested tags in comments [$XMLFMT_NESTED] + -v, --verbose Verbose mode (Multiple -v options increase the verbosity) + -V, --version Show program version and exit + +Help Options: + -h, --help Show this help message + + +$ curl -sL https://pastebin.com/raw/z3euQ5PR | xmlfmt -f - + + + + a + + + + + Some org-or-other + Wouldnt you like to know + + + Pat + Califia + + + + + +$ curl -sL https://pastebin.com/raw/Zs0qy0qz | tee /tmp/xmlfmt.xml | xmlfmt -f - -n + + + Fred + + 23456 + + +$ XMLFMT_NESTED=true XMLFMT_PREFIX='|' xmlfmt -f /tmp/xmlfmt.xml + +| +| +| Fred +| +| 23456 +| ``` @@ -76,7 +143,7 @@ Options: ### The format -The Go XML Formatter is not called XML Beautifier because the result is not *exactly* as what people would expect -- some, but not all, closing tags stays on the same line, just as shown above. Having been looking at the result and thinking over it, I now think it is actually a better way to present it, as those closing tags on the same line are better stay that way in my opinion. I.e., +The Go XML Formatter is not called XML Beautifier because the result is not *exactly* as what people would expect -- most of the closing tags stays on the same line, just as shown above. Having been looking at the result and thinking over it, I now think it is actually a better way to present it, as those closing tags on the same line are better stay that way in my opinion. I.e., When it comes to very big XML strings, which is what I’m dealing every day, saving spaces by not allowing those closing tags taking extra lines is plus instead of negative to me. @@ -175,4 +242,4 @@ echo ']+?)(/?)>`) - // NL is the newline string used in XML output, define for DOS-convenient. - NL = "\r\n" + // NL is the newline string used in XML output. + NL = "\n" ) -// FormatXML will (purly) reformat the XML string in a readable way, without any rewriting/altering the structure -func FormatXML(xmls, prefix, indent string) string { - src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><") +func init() { + // define NL for Windows + if runtime.GOOS == "windows" { + NL = "\r\n" + } +} +// FormatXML will (purly) reformat the XML string in a readable way, without any rewriting/altering the structure. +// If your XML Comments have nested tags in them, or you're not 100% sure otherwise, pass `true` as the third parameter to this function. But don't turn it on blindly, as the code has become ten times more complicated because of it. +func FormatXML(xmls, prefix, indent string, nestedTagsInComments ...bool) string { + nestedTagsInComment := false + if len(nestedTagsInComments) > 0 { + nestedTagsInComment = nestedTagsInComments[0] + } + reXmlComments := regexp.MustCompile(`(?s)()`) + src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><") + if nestedTagsInComment { + src = reXmlComments.ReplaceAllStringFunc(src, func(m string) string { + parts := reXmlComments.FindStringSubmatch(m) + p2 := regexp.MustCompile(`\r*\n`).ReplaceAllString(parts[2], " ") + return parts[1] + html.EscapeString(p2) + parts[3] + }) + } rf := replaceTag(prefix, indent) - return (prefix + reg.ReplaceAllStringFunc(src, rf)) + r := prefix + reg.ReplaceAllStringFunc(src, rf) + if nestedTagsInComment { + r = reXmlComments.ReplaceAllStringFunc(r, func(m string) string { + parts := reXmlComments.FindStringSubmatch(m) + return parts[1] + html.UnescapeString(parts[2]) + parts[3] + }) + } + + return r } // replaceTag returns a closure function to do 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+?)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' as in Perl // and deal with comments as well func replaceTag(prefix, indent string) func(string) string { indentLevel := 0 + lastEndElem := true return func(m string) string { // head elem if strings.HasPrefix(m, "") { + lastEndElem = true return NL + prefix + strings.Repeat(indent, indentLevel) + m } // comment elem @@ -45,12 +76,17 @@ func replaceTag(prefix, indent string) func(string) string { // end elem if strings.HasPrefix(m, " maxLineLen { res = append(res, result.Issue{ @@ -100,7 +118,6 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r FromLinter: lllName, }) } - lineNumber++ } if err := scanner.Err(); err != nil { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go new file mode 100644 index 00000000..75500b5a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go @@ -0,0 +1,29 @@ +package golinters + +import ( + "github.com/junk1tm/musttag" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewMustTag(setting *config.MustTagSettings) *goanalysis.Linter { + var funcs []musttag.Func + + if setting != nil { + for _, fn := range setting.Functions { + funcs = append(funcs, musttag.Func{ + Name: fn.Name, + Tag: fn.Tag, + ArgPos: fn.ArgPos, + }) + } + } + + a := musttag.New(funcs...) + + return goanalysis. + NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md index 9f4604d1..1643df7a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md @@ -1,7 +1,7 @@ # nolintlint -nolintlint is a Go static analysis tool to find ill-formed or insufficiently explained `//nolint` directives for golangci -(or any other linter, using th ) +nolintlint is a Go static analysis tool to find ill-formed or insufficiently explained `//nolint` directives for golangci-lint +(or any other linter, using this package) ## Purpose diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go index 275670e1..67c14cbd 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go @@ -11,8 +11,9 @@ import ( func NewTagliatelle(settings *config.TagliatelleSettings) *goanalysis.Linter { cfg := tagliatelle.Config{ Rules: map[string]string{ - "json": "camel", - "yaml": "camel", + "json": "camel", + "yaml": "camel", + "header": "header", }, } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go index 35b43601..d4646905 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go @@ -63,25 +63,19 @@ func runUnused(pass *analysis.Pass) ([]goanalysis.Issue, error) { return nil, err } - sr := unused.Serialize(pass, res.(unused.Result), pass.Fset) - used := make(map[string]bool) - for _, obj := range sr.Used { + for _, obj := range res.(unused.Result).Used { used[fmt.Sprintf("%s %d %s", obj.Position.Filename, obj.Position.Line, obj.Name)] = true } var issues []goanalysis.Issue // Inspired by https://github.com/dominikh/go-tools/blob/d694aadcb1f50c2d8ac0a1dd06217ebb9f654764/lintcmd/lint.go#L177-L197 - for _, object := range sr.Unused { + for _, object := range res.(unused.Result).Unused { if object.Kind == "type param" { continue } - if object.InGenerated { - continue - } - key := fmt.Sprintf("%s %d %s", object.Position.Filename, object.Position.Line, object.Name) if used[key] { continue diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go index 8c4858d8..0ea4b563 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go @@ -18,13 +18,14 @@ func NewUseStdlibVars(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter { analyzer.CryptoHashFlag: cfg.CryptoHash, analyzer.HTTPMethodFlag: cfg.HTTPMethod, analyzer.HTTPStatusCodeFlag: cfg.HTTPStatusCode, - analyzer.OSDevNullFlag: cfg.OSDevNullFlag, - analyzer.RPCDefaultPathFlag: cfg.DefaultRPCPathFlag, - analyzer.SQLIsolationLevelFlag: cfg.SQLIsolationLevelFlag, + analyzer.OSDevNullFlag: cfg.OSDevNull, + analyzer.RPCDefaultPathFlag: cfg.DefaultRPCPath, + analyzer.SQLIsolationLevelFlag: cfg.SQLIsolationLevel, + analyzer.SyslogPriorityFlag: cfg.SyslogPriority, analyzer.TimeLayoutFlag: cfg.TimeLayout, analyzer.TimeMonthFlag: cfg.TimeMonth, analyzer.TimeWeekdayFlag: cfg.TimeWeekday, - analyzer.TLSSignatureSchemeFlag: cfg.TLSSignatureSchemeFlag, + analyzer.TLSSignatureSchemeFlag: cfg.TLSSignatureScheme, } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go index 1d595622..4bc70244 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go @@ -32,6 +32,9 @@ func NewWSL(settings *config.WSLSettings) *goanalysis.Linter { conf.AllowCuddleDeclaration = settings.AllowCuddleDeclaration conf.AllowCuddleWithCalls = settings.AllowCuddleWithCalls conf.AllowCuddleWithRHS = settings.AllowCuddleWithRHS + conf.ForceCuddleErrCheckAndAssign = settings.ForceCuddleErrCheckAndAssign + conf.ErrorVariableNames = settings.ErrorVariableNames + conf.ForceExclusiveShortDeclarations = settings.ForceExclusiveShortDeclarations } analyzer := &analysis.Analyzer{ diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go index 3867025f..6f406f7d 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -118,6 +118,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { forbidigoCfg *config.ForbidigoSettings funlenCfg *config.FunlenSettings gciCfg *config.GciSettings + ginkgolinterCfg *config.GinkgoLinterSettings gocognitCfg *config.GocognitSettings goconstCfg *config.GoConstSettings gocriticCfg *config.GoCriticSettings @@ -146,6 +147,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { makezeroCfg *config.MakezeroSettings malignedCfg *config.MalignedSettings misspellCfg *config.MisspellSettings + musttagCfg *config.MustTagSettings nakedretCfg *config.NakedretSettings nestifCfg *config.NestifSettings nilNilCfg *config.NilNilSettings @@ -194,6 +196,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { forbidigoCfg = &m.cfg.LintersSettings.Forbidigo funlenCfg = &m.cfg.LintersSettings.Funlen gciCfg = &m.cfg.LintersSettings.Gci + ginkgolinterCfg = &m.cfg.LintersSettings.GinkgoLinter gocognitCfg = &m.cfg.LintersSettings.Gocognit goconstCfg = &m.cfg.LintersSettings.Goconst gocriticCfg = &m.cfg.LintersSettings.Gocritic @@ -222,6 +225,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { makezeroCfg = &m.cfg.LintersSettings.Makezero malignedCfg = &m.cfg.LintersSettings.Maligned misspellCfg = &m.cfg.LintersSettings.Misspell + musttagCfg = &m.cfg.LintersSettings.MustTag nakedretCfg = &m.cfg.LintersSettings.Nakedret nestifCfg = &m.cfg.LintersSettings.Nestif nilNilCfg = &m.cfg.LintersSettings.NilNil @@ -430,15 +434,26 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithPresets(linter.PresetFormatting, linter.PresetImport). WithURL("https://github.com/daixiang0/gci"), + linter.NewConfig(golinters.NewGinkgoLinter(ginkgolinterCfg)). + WithSince("v1.51.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/nunnatsa/ginkgolinter"), + + linter.NewConfig(golinters.NewGoCheckCompilerDirectives()). + WithSince("v1.51.0"). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/leighmcculloch/gocheckcompilerdirectives"), + linter.NewConfig(golinters.NewGochecknoglobals()). WithSince("v1.12.0"). WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). WithURL("https://github.com/leighmcculloch/gochecknoglobals"), linter.NewConfig(golinters.NewGochecknoinits()). WithSince("v1.12.0"). - WithPresets(linter.PresetStyle). - WithURL("https://github.com/leighmcculloch/gochecknoinits"), + WithPresets(linter.PresetStyle), linter.NewConfig(golinters.NewGocognit(gocognitCfg)). WithSince("v1.20.0"). @@ -482,7 +497,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.0.0"). WithPresets(linter.PresetFormatting). WithAutoFix(). - WithURL("https://golang.org/cmd/gofmt/"), + WithURL("https://pkg.go.dev/cmd/gofmt"), linter.NewConfig(golinters.NewGofumpt(gofumptCfg)). WithSince("v1.28.0"). @@ -499,7 +514,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.20.0"). WithPresets(linter.PresetFormatting, linter.PresetImport). WithAutoFix(). - WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), + WithURL("https://pkg.go.dev/golang.org/x/tools/cmd/goimports"), linter.NewConfig(golinters.NewGolint(golintCfg)). WithSince("v1.0.0"). @@ -547,7 +562,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). WithAlternativeNames("vet", "vetshadow"). - WithURL("https://golang.org/cmd/vet/"), + WithURL("https://pkg.go.dev/cmd/vet"), linter.NewConfig(golinters.NewGrouper(grouperCfg)). WithSince("v1.44.0"). @@ -624,6 +639,12 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithAutoFix(). WithURL("https://github.com/client9/misspell"), + linter.NewConfig(golinters.NewMustTag(musttagCfg)). + WithSince("v1.51.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle, linter.PresetBugs). + WithURL("https://github.com/junk1tm/musttag"), + linter.NewConfig(golinters.NewNakedret(nakedretCfg)). WithSince("v1.19.0"). WithPresets(linter.PresetStyle). @@ -724,8 +745,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.28.0"). WithPresets(linter.PresetBugs, linter.PresetSQL). WithLoadForGoAnalysis(). - WithURL("https://github.com/ryanrolds/sqlclosecheck"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/ryanrolds/sqlclosecheck"), linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)). WithSince("v1.0.0"). @@ -895,6 +915,10 @@ func linterConfigsToMap(lcs []*linter.Config) map[string]*linter.Config { func (m Manager) GetAllLinterConfigsForPreset(p string) []*linter.Config { var ret []*linter.Config for _, lc := range m.GetAllSupportedLinterConfigs() { + if lc.IsDeprecated() { + continue + } + for _, ip := range lc.InPresets { if p == ip { ret = append(ret, lc) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go index 0bac1062..c4e1e176 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go @@ -160,7 +160,17 @@ func (cl *ContextLoader) debugPrintLoadedPackages(pkgs []*packages.Package) { func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error { for _, pkg := range pkgs { + var errs []packages.Error for _, err := range pkg.Errors { + // quick fix: skip error related to `go list` invocation by packages.Load() + // The behavior has been changed between go1.19 and go1.20, the error is now inside the JSON content. + // https://github.com/golangci/golangci-lint/pull/3414#issuecomment-1364756303 + if strings.Contains(err.Msg, "# command-line-arguments") { + continue + } + + errs = append(errs, err) + if strings.Contains(err.Msg, "no Go files") { return errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath) } @@ -169,6 +179,8 @@ func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) err return errors.Wrap(exitcodes.ErrFailure, err.Msg) } } + + pkg.Errors = errs } return nil diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go index b21635f0..f285b731 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -58,7 +58,7 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint var extra string if lc.Deprecation.Replacement != "" { - extra = fmt.Sprintf(" Replaced by %s.", lc.Deprecation.Replacement) + extra = fmt.Sprintf("Replaced by %s.", lc.Deprecation.Replacement) } log.Warnf("The linter '%s' is deprecated (since %s) due to: %s %s", name, lc.Deprecation.Since, lc.Deprecation.Message, extra) @@ -208,6 +208,7 @@ func (r Runner) Run(ctx context.Context, linters []*linter.Config, lintCtx *lint return } + issues = append(issues, linterIssues...) }) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go index 710f084a..62c521ea 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go @@ -13,12 +13,12 @@ import ( const envDebug = "GL_DEBUG" const ( - DebugKeyAutogenExclude = "autogen_exclude" + DebugKeyAutogenExclude = "autogen_exclude" // Debugs a filter excluding autogenerated source code. DebugKeyBinSalt = "bin_salt" DebugKeyConfigReader = "config_reader" DebugKeyEmpty = "" DebugKeyEnabledLinters = "enabled_linters" - DebugKeyEnv = "env" + DebugKeyEnv = "env" // Debugs `go env` command. DebugKeyExcludeRules = "exclude_rules" DebugKeyExec = "exec" DebugKeyFilenameUnadjuster = "filename_unadjuster" @@ -27,7 +27,7 @@ const ( DebugKeyLintersContext = "linters_context" DebugKeyLintersDB = "lintersdb" DebugKeyLintersOutput = "linters_output" - DebugKeyLoader = "loader" + DebugKeyLoader = "loader" // Debugs packages loading (including `go/packages` internal debugging). DebugKeyMaxFromLinter = "max_from_linter" DebugKeyMaxSameIssues = "max_same_issues" DebugKeyPkgCache = "pkgcache" @@ -55,10 +55,10 @@ const ( ) const ( - DebugKeyGoCritic = "gocritic" - DebugKeyMegacheck = "megacheck" - DebugKeyNolint = "nolint" - DebugKeyRevive = "revive" + DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter. + DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters. + DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments. + DebugKeyRevive = "revive" // Debugs `revice` linter. ) func getEnabledDebugs() map[string]bool { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go index bb347bd2..307a8e7a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go @@ -12,6 +12,8 @@ import ( "github.com/golangci/golangci-lint/pkg/result" ) +const defaultCheckstyleSeverity = "error" + type checkstyleOutput struct { XMLName xml.Name `xml:"checkstyle"` Version string `xml:"version,attr"` @@ -31,8 +33,6 @@ type checkstyleError struct { Source string `xml:"source,attr"` } -const defaultCheckstyleSeverity = "error" - type Checkstyle struct { w io.Writer } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go index 8127632e..8a90f145 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -9,8 +9,12 @@ import ( "github.com/golangci/golangci-lint/pkg/result" ) -// CodeClimateIssue is a subset of the Code Climate spec - https://github.com/codeclimate/spec/blob/master/SPEC.md#data-types -// It is just enough to support GitLab CI Code Quality - https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html +const defaultCodeClimateSeverity = "critical" + +// CodeClimateIssue is a subset of the Code Climate spec. +// https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types +// It is just enough to support GitLab CI Code Quality. +// https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html type CodeClimateIssue struct { Description string `json:"description"` Severity string `json:"severity,omitempty"` @@ -40,6 +44,7 @@ func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { codeClimateIssue.Location.Path = issue.Pos.Filename codeClimateIssue.Location.Lines.Begin = issue.Pos.Line codeClimateIssue.Fingerprint = issue.Fingerprint() + codeClimateIssue.Severity = defaultCodeClimateSeverity if issue.Severity != "" { codeClimateIssue.Severity = issue.Severity diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go index 4a126bde..ffef4910 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go @@ -26,7 +26,7 @@ func NewTab(printLinterName bool, log logutils.Log, w io.Writer) *Tab { } } -func (p Tab) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { +func (p *Tab) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { c := color.New(ca) return c.Sprintf(format, args...) } @@ -45,7 +45,7 @@ func (p *Tab) Print(ctx context.Context, issues []result.Issue) error { return nil } -func (p Tab) printIssue(i *result.Issue, w io.Writer) { +func (p *Tab) printIssue(i *result.Issue, w io.Writer) { text := p.SprintfColored(color.FgRed, "%s", i.Text) if p.printLinterName { text = fmt.Sprintf("%s\t%s", i.FromLinter, text) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go index c8960e0e..d59391b2 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go @@ -31,7 +31,7 @@ func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log, } } -func (p Text) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { +func (p *Text) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { if !p.useColors { return fmt.Sprintf(format, args...) } @@ -55,7 +55,7 @@ func (p *Text) Print(ctx context.Context, issues []result.Issue) error { return nil } -func (p Text) printIssue(i *result.Issue) { +func (p *Text) printIssue(i *result.Issue) { text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(i.Text)) if p.printLinterName { text += fmt.Sprintf(" (%s)", i.FromLinter) @@ -67,7 +67,7 @@ func (p Text) printIssue(i *result.Issue) { fmt.Fprintf(p.w, "%s: %s\n", pos, text) } -func (p Text) printSourceCode(i *result.Issue) { +func (p *Text) printSourceCode(i *result.Issue) { for _, line := range i.SourceLines { fmt.Fprintln(p.w, line) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go index f75ebaf5..5e41fd6a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go @@ -32,7 +32,7 @@ func NewAutogeneratedExclude() *AutogeneratedExclude { var _ Processor = &AutogeneratedExclude{} -func (p AutogeneratedExclude) Name() string { +func (p *AutogeneratedExclude) Name() string { return "autogenerated_exclude" } @@ -70,7 +70,7 @@ func (p *AutogeneratedExclude) shouldPassIssue(i *result.Issue) (bool, error) { } // isGenerated reports whether the source file is generated code. -// Using a bit laxer rules than https://golang.org/s/generatedcode to +// Using a bit laxer rules than https://go.dev/s/generatedcode to // match more generated code. See #48 and #72. func isGeneratedFileByComment(doc string) bool { const ( @@ -130,4 +130,4 @@ func getDoc(filePath string) (string, error) { return strings.Join(docLines, "\n"), nil } -func (p AutogeneratedExclude) Finish() {} +func (p *AutogeneratedExclude) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go index e9f474b9..62533b81 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go @@ -44,7 +44,7 @@ func createRules(rules []ExcludeRule, prefix string) []excludeRule { parsedRule.source = regexp.MustCompile(prefix + rule.Source) } if rule.Path != "" { - path := normalizePathInRegex(rule.Path) + path := fsutils.NormalizePathInRegex(rule.Path) parsedRule.path = regexp.MustCompile(path) } parsedRules = append(parsedRules, parsedRule) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go index 96540245..2aaafbf5 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go @@ -97,7 +97,7 @@ func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *Filename } } -func (p FilenameUnadjuster) Name() string { +func (p *FilenameUnadjuster) Name() string { return "filename_unadjuster" } @@ -128,4 +128,4 @@ func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, err }), nil } -func (FilenameUnadjuster) Finish() {} +func (p *FilenameUnadjuster) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go index c58666c5..649ed86a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go @@ -24,7 +24,7 @@ func NewMaxFromLinter(limit int, log logutils.Log, cfg *config.Config) *MaxFromL } } -func (p MaxFromLinter) Name() string { +func (p *MaxFromLinter) Name() string { return "max_from_linter" } @@ -44,7 +44,7 @@ func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) { }), nil } -func (p MaxFromLinter) Finish() { +func (p *MaxFromLinter) Finish() { walkStringToIntMapSortedByValue(p.lc, func(linter string, count int) { if count > p.limit { p.log.Infof("%d/%d issues from linter %s were hidden, use --max-issues-per-linter", diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go index e36446c9..64182e3e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go @@ -31,7 +31,7 @@ func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter { } } -func (p MaxPerFileFromLinter) Name() string { +func (p *MaxPerFileFromLinter) Name() string { return "max_per_file_from_linter" } @@ -56,4 +56,4 @@ func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, e }), nil } -func (p MaxPerFileFromLinter) Finish() {} +func (p *MaxPerFileFromLinter) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go index 84fdf0c0..391ae5fa 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go @@ -28,7 +28,7 @@ func NewMaxSameIssues(limit int, log logutils.Log, cfg *config.Config) *MaxSameI } } -func (MaxSameIssues) Name() string { +func (p *MaxSameIssues) Name() string { return "max_same_issues" } @@ -48,7 +48,7 @@ func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) { }), nil } -func (p MaxSameIssues) Finish() { +func (p *MaxSameIssues) Finish() { walkStringToIntMapSortedByValue(p.tc, func(text string, count int) { if count > p.limit { p.log.Infof("%d/%d issues with text %q were hidden, use --max-same-issues", diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go index 492dfd72..181d3bf1 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go @@ -85,7 +85,7 @@ func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters ma var _ Processor = &Nolint{} -func (p Nolint) Name() string { +func (p *Nolint) Name() string { return "nolint" } @@ -284,7 +284,7 @@ func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *to return buildRange(linters) } -func (p Nolint) Finish() { +func (p *Nolint) Finish() { if len(p.unknownLintersSet) == 0 { return } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_unix.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_unix.go deleted file mode 100644 index b0c7c338..00000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_unix.go +++ /dev/null @@ -1,8 +0,0 @@ -//go:build !windows - -package processors - -// normalizePathInRegex it's a noop function on Unix. -func normalizePathInRegex(path string) string { - return path -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_windows.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_windows.go deleted file mode 100644 index 7f3e3622..00000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_windows.go +++ /dev/null @@ -1,19 +0,0 @@ -//go:build windows - -package processors - -import ( - "path/filepath" - "regexp" - "strings" -) - -var separatorToReplace = regexp.QuoteMeta(string(filepath.Separator)) - -// normalizePathInRegex normalizes path in regular expressions. -// noop on Unix. -// This replacing should be safe because "/" are disallowed in Windows -// https://docs.microsoft.com/windows/win32/fileio/naming-a-file -func normalizePathInRegex(path string) string { - return strings.ReplaceAll(path, "/", separatorToReplace) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go index 4077b340..85c1866a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go @@ -49,7 +49,7 @@ func createSeverityRules(rules []SeverityRule, prefix string) []severityRule { parsedRule.source = regexp.MustCompile(prefix + rule.Source) } if rule.Path != "" { - path := normalizePathInRegex(rule.Path) + path := fsutils.NormalizePathInRegex(rule.Path) parsedRule.path = regexp.MustCompile(path) } parsedRules = append(parsedRules, parsedRule) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go index d657c5a0..11ab9910 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go @@ -7,6 +7,7 @@ import ( "github.com/pkg/errors" + "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -31,7 +32,7 @@ const goFileSuffix = ".go" func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDirs, error) { var patternsRe []*regexp.Regexp for _, p := range patterns { - p = normalizePathInRegex(p) + p = fsutils.NormalizePathInRegex(p) patternRe, err := regexp.Compile(p) if err != nil { return nil, errors.Wrapf(err, "can't compile regexp %q", p) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go index 1e2ca7ae..b7b86bed 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go @@ -4,6 +4,7 @@ import ( "fmt" "regexp" + "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -16,7 +17,7 @@ var _ Processor = (*SkipFiles)(nil) func NewSkipFiles(patterns []string) (*SkipFiles, error) { var patternsRe []*regexp.Regexp for _, p := range patterns { - p = normalizePathInRegex(p) + p = fsutils.NormalizePathInRegex(p) patternRe, err := regexp.Compile(p) if err != nil { return nil, fmt.Errorf("can't compile regexp %q: %s", p, err) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go index 17167dde..dc0e1e8c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go @@ -22,7 +22,7 @@ func NewUniqByLine(cfg *config.Config) *UniqByLine { var _ Processor = &UniqByLine{} -func (p UniqByLine) Name() string { +func (p *UniqByLine) Name() string { return "uniq_by_line" } @@ -55,4 +55,4 @@ func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) { }), nil } -func (p UniqByLine) Finish() {} +func (p *UniqByLine) Finish() {} diff --git a/vendor/github.com/golangci/misspell/.gitignore b/vendor/github.com/golangci/misspell/.gitignore index b1b707e3..5e5c368f 100644 --- a/vendor/github.com/golangci/misspell/.gitignore +++ b/vendor/github.com/golangci/misspell/.gitignore @@ -2,6 +2,9 @@ dist/ bin/ vendor/ +.idea/ +/misspell + # editor turds *~ *.gz diff --git a/vendor/github.com/golangci/misspell/.golangci.yml b/vendor/github.com/golangci/misspell/.golangci.yml new file mode 100644 index 00000000..1a53216a --- /dev/null +++ b/vendor/github.com/golangci/misspell/.golangci.yml @@ -0,0 +1,106 @@ +run: + timeout: 2m + skip-files: [] + +linters-settings: + govet: + enable-all: true + disable: + - fieldalignment + - shadow # FIXME(ldez) must be fixed + gocyclo: + min-complexity: 16 + goconst: + min-len: 3 + min-occurrences: 3 + misspell: + locale: US + funlen: + lines: -1 + statements: 40 + gofumpt: + extra-rules: true + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/pkg/errors + godox: + keywords: + - FIXME + gocritic: + enabled-tags: + - diagnostic + - style + - performance + disabled-checks: + - sloppyReassign + - rangeValCopy + - octalLiteral + - paramTypeCombine # already handle by gofumpt.extra-rules + - exitAfterDefer # FIXME(ldez) must be fixed + - ifElseChain # FIXME(ldez) must be fixed + settings: + hugeParam: + sizeThreshold: 100 + forbidigo: + forbid: + - '^print(ln)?$' + - '^panic$' + - '^spew\.Print(f|ln)?$' + - '^spew\.Dump$' + +linters: + enable-all: true + disable: + - deadcode # deprecated + - exhaustivestruct # deprecated + - golint # deprecated + - ifshort # deprecated + - interfacer # deprecated + - maligned # deprecated + - nosnakecase # deprecated + - scopelint # deprecated + - scopelint # deprecated + - structcheck # deprecated + - varcheck # deprecated + - execinquery # not relevant (SQL) + - rowserrcheck # not relevant (SQL) + - sqlclosecheck # not relevant (SQL) + - cyclop # duplicate of gocyclo + - dupl + - exhaustive + - exhaustruct + - forbidigo + - gochecknoglobals + - gochecknoinits + - goerr113 + - gomnd + - lll + - nilnil + - nlreturn + - paralleltest + - prealloc + - testpackage + - tparallel + - varnamelen + - wrapcheck + - wsl + - misspell + - gosec # FIXME(ldez) must be fixed + - errcheck # FIXME(ldez) must be fixed + - nonamedreturns # FIXME(ldez) must be fixed + - nakedret # FIXME(ldez) must be fixed + +issues: + exclude-use-default: false + max-per-linter: 0 + max-same-issues: 0 + exclude: + - 'ST1000: at least one file in a package should have a package comment' + - 'package-comments: should have a package comment' + exclude-rules: + - path: .*_test.go + linters: + - funlen + - goconst diff --git a/vendor/github.com/golangci/misspell/.travis.yml b/vendor/github.com/golangci/misspell/.travis.yml deleted file mode 100644 index e63e6c2b..00000000 --- a/vendor/github.com/golangci/misspell/.travis.yml +++ /dev/null @@ -1,20 +0,0 @@ -sudo: required -dist: trusty -group: edge -language: go -go: - - "1.10" -git: - depth: 1 - -script: - - ./scripts/travis.sh - -# calls goreleaser when a new tag is pushed -deploy: -- provider: script - skip_cleanup: true - script: curl -sL http://git.io/goreleaser | bash - on: - tags: true - condition: $TRAVIS_OS_NAME = linux diff --git a/vendor/github.com/golangci/misspell/Dockerfile b/vendor/github.com/golangci/misspell/Dockerfile index b8ea37b4..ce55fe61 100644 --- a/vendor/github.com/golangci/misspell/Dockerfile +++ b/vendor/github.com/golangci/misspell/Dockerfile @@ -8,9 +8,6 @@ RUN apk add --no-cache git make # these are my standard testing / linting tools RUN /bin/true \ - && go get -u github.com/golang/dep/cmd/dep \ - && go get -u github.com/alecthomas/gometalinter \ - && gometalinter --install \ && rm -rf /go/src /go/pkg # # * SCOWL word list diff --git a/vendor/github.com/golangci/misspell/Gopkg.lock b/vendor/github.com/golangci/misspell/Gopkg.lock deleted file mode 100644 index 90ed4511..00000000 --- a/vendor/github.com/golangci/misspell/Gopkg.lock +++ /dev/null @@ -1,24 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - name = "github.com/gobwas/glob" - packages = [ - ".", - "compiler", - "match", - "syntax", - "syntax/ast", - "syntax/lexer", - "util/runes", - "util/strings" - ] - revision = "5ccd90ef52e1e632236f7326478d4faa74f99438" - version = "v0.2.3" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "087ea4c49358ea8258ad9edfe514cd5ce9975c889c258e5ec7b5d2b720aae113" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/vendor/github.com/golangci/misspell/Gopkg.toml b/vendor/github.com/golangci/misspell/Gopkg.toml deleted file mode 100644 index e9b8e6a4..00000000 --- a/vendor/github.com/golangci/misspell/Gopkg.toml +++ /dev/null @@ -1,34 +0,0 @@ -# Gopkg.toml example -# -# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html -# for detailed Gopkg.toml documentation. -# -# required = ["github.com/user/thing/cmd/thing"] -# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] -# -# [[constraint]] -# name = "github.com/user/project" -# version = "1.0.0" -# -# [[constraint]] -# name = "github.com/user/project2" -# branch = "dev" -# source = "github.com/myfork/project2" -# -# [[override]] -# name = "github.com/x/y" -# version = "2.4.0" -# -# [prune] -# non-go = false -# go-tests = true -# unused-packages = true - - -[[constraint]] - name = "github.com/gobwas/glob" - version = "0.2.3" - -[prune] - go-tests = true - unused-packages = true diff --git a/vendor/github.com/golangci/misspell/LICENSE b/vendor/github.com/golangci/misspell/LICENSE index 423e1f9e..bfcfcd30 100644 --- a/vendor/github.com/golangci/misspell/LICENSE +++ b/vendor/github.com/golangci/misspell/LICENSE @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/vendor/github.com/golangci/misspell/Makefile b/vendor/github.com/golangci/misspell/Makefile index 862ab77b..e64a84b5 100644 --- a/vendor/github.com/golangci/misspell/Makefile +++ b/vendor/github.com/golangci/misspell/Makefile @@ -1,13 +1,18 @@ CONTAINER=nickg/misspell +default: lint test build + install: ## install misspell into GOPATH/bin go install ./cmd/misspell -build: hooks ## build and lint misspell - ./scripts/build.sh +build: ## build and lint misspell + go build ./cmd/misspell test: ## run all tests - go test . + go test -v . + +lint: ## run linter + golangci-lint run # real publishing is done only by travis publish: ## test goreleaser @@ -39,8 +44,8 @@ clean: ## clean up time ci: ## run test like travis-ci does, requires docker docker run --rm \ - -v $(PWD):/go/src/github.com/client9/misspell \ - -w /go/src/github.com/client9/misspell \ + -v $(PWD):/go/src/github.com/golangci/misspell \ + -w /go/src/github.com/golangci/misspell \ ${CONTAINER} \ make build falsepositives @@ -52,16 +57,10 @@ docker-pull: ## pull latest test image docker-console: ## log into the test image docker run --rm -it \ - -v $(PWD):/go/src/github.com/client9/misspell \ - -w /go/src/github.com/client9/misspell \ + -v $(PWD):/go/src/github.com/golangci/misspell \ + -w /go/src/github.com/golangci/misspell \ ${CONTAINER} sh -.git/hooks/pre-commit: scripts/pre-commit.sh - cp -f scripts/pre-commit.sh .git/hooks/pre-commit -.git/hooks/commit-msg: scripts/commit-msg.sh - cp -f scripts/commit-msg.sh .git/hooks/commit-msg -hooks: .git/hooks/pre-commit .git/hooks/commit-msg ## install git precommit hooks - .PHONY: help ci console docker-build bench # https://www.client9.com/self-documenting-makefiles/ @@ -69,6 +68,6 @@ help: @awk -F ':|##' '/^[^\t].+?:.*?##/ {\ printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \ }' $(MAKEFILE_LIST) -.DEFAULT_GOAL=help +.DEFAULT_GOAL=default .PHONY=help diff --git a/vendor/github.com/golangci/misspell/README.md b/vendor/github.com/golangci/misspell/README.md index 5b68af04..cccd0499 100644 --- a/vendor/github.com/golangci/misspell/README.md +++ b/vendor/github.com/golangci/misspell/README.md @@ -19,7 +19,7 @@ Both will install as `./bin/misspell`. You can adjust the download location usi If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way: ``` -go get -u github.com/client9/misspell/cmd/misspell +go install github.com/client9/misspell/cmd/misspell@latest ``` and misspell will be in your `GOPATH` diff --git a/vendor/github.com/golangci/misspell/ascii.go b/vendor/github.com/golangci/misspell/ascii.go index 1430718d..d60af5a8 100644 --- a/vendor/github.com/golangci/misspell/ascii.go +++ b/vendor/github.com/golangci/misspell/ascii.go @@ -1,7 +1,7 @@ package misspell -// ByteToUpper converts an ascii byte to upper cases -// Uses a branchless algorithm +// ByteToUpper converts an ascii byte to upper cases. +// Uses a branch-less algorithm. func ByteToUpper(x byte) byte { b := byte(0x80) | x c := b - byte(0x61) @@ -10,8 +10,8 @@ func ByteToUpper(x byte) byte { return x - (e >> 2) } -// ByteToLower converts an ascii byte to lower case -// uses a branchless algorithm +// ByteToLower converts an ascii byte to lower case. +// Uses a branch-less algorithm. func ByteToLower(eax byte) byte { ebx := eax&byte(0x7f) + byte(0x25) ebx = ebx&byte(0x7f) + byte(0x1a) @@ -19,7 +19,7 @@ func ByteToLower(eax byte) byte { return eax + ebx } -// ByteEqualFold does ascii compare, case insensitive +// ByteEqualFold does ascii compare, case insensitive. func ByteEqualFold(a, b byte) bool { return a == b || ByteToLower(a) == ByteToLower(b) } @@ -27,7 +27,7 @@ func ByteEqualFold(a, b byte) bool { // StringEqualFold ASCII case-insensitive comparison // golang toUpper/toLower for both bytes and strings // appears to be Unicode based which is super slow -// based from https://codereview.appspot.com/5180044/patch/14007/21002 +// based from https://codereview.appspot.com/5180044/patch/14007/21002. func StringEqualFold(s1, s2 string) bool { if len(s1) != len(s2) { return false @@ -47,9 +47,7 @@ func StringEqualFold(s1, s2 string) bool { return true } -// StringHasPrefixFold is similar to strings.HasPrefix but comparison -// is done ignoring ASCII case. -// / +// StringHasPrefixFold is similar to strings.HasPrefix but comparison is done ignoring ASCII case. func StringHasPrefixFold(s1, s2 string) bool { // prefix is bigger than input --> false if len(s1) < len(s2) { diff --git a/vendor/github.com/golangci/misspell/case.go b/vendor/github.com/golangci/misspell/case.go index 2ea3850d..88ad44fa 100644 --- a/vendor/github.com/golangci/misspell/case.go +++ b/vendor/github.com/golangci/misspell/case.go @@ -4,10 +4,10 @@ import ( "strings" ) -// WordCase is an enum of various word casing styles +// WordCase is an enum of various word casing styles. type WordCase int -// Various WordCase types.. likely to be not correct +// Various WordCase types... likely to be not correct. const ( CaseUnknown WordCase = iota CaseLower @@ -15,7 +15,7 @@ const ( CaseTitle ) -// CaseStyle returns what case style a word is in +// CaseStyle returns what case style a word is in. func CaseStyle(word string) WordCase { upperCount := 0 lowerCount := 0 @@ -42,11 +42,10 @@ func CaseStyle(word string) WordCase { return CaseUnknown } -// CaseVariations returns -// If AllUpper or First-Letter-Only is upcased: add the all upper case version -// If AllLower, add the original, the title and upcase forms -// If Mixed, return the original, and the all upcase form -// +// CaseVariations returns: +// If AllUpper or First-Letter-Only is upcased: add the all upper case version. +// If AllLower, add the original, the title and upcase forms. +// If Mixed, return the original, and the all upcase form. func CaseVariations(word string, style WordCase) []string { switch style { case CaseLower: diff --git a/vendor/github.com/golangci/misspell/goreleaser.yml b/vendor/github.com/golangci/misspell/goreleaser.yml index 560cb381..b4c8c099 100644 --- a/vendor/github.com/golangci/misspell/goreleaser.yml +++ b/vendor/github.com/golangci/misspell/goreleaser.yml @@ -1,6 +1,3 @@ -# goreleaser.yml -# https://github.com/goreleaser/goreleaser - project_name: misspell builds: @@ -14,22 +11,18 @@ builds: - windows goarch: - amd64 + - arm64 env: - CGO_ENABLED=0 - ignore: - - goos: darwin - goarch: 386 - - goos: windows - goarch: 386 -archive: - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" - replacements: - amd64: 64bit - 386: 32bit - darwin: mac - files: - - none* +archives: + - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + replacements: + amd64: 64bit + 386: 32bit + darwin: mac + files: + - LICENSE checksum: name_template: "{{ .ProjectName }}_{{ .Version }}_checksums.txt" diff --git a/vendor/github.com/golangci/misspell/legal.go b/vendor/github.com/golangci/misspell/legal.go index 20076974..4f9bcfc6 100644 --- a/vendor/github.com/golangci/misspell/legal.go +++ b/vendor/github.com/golangci/misspell/legal.go @@ -3,7 +3,7 @@ package misspell // Legal provides licensing info. const Legal = ` -Execept where noted below, the source code for misspell is +Except where noted below, the source code for misspell is copyright Nick Galbreath and distribution is allowed under a MIT license. See the following for details: @@ -44,5 +44,4 @@ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -` +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.` diff --git a/vendor/github.com/golangci/misspell/mime.go b/vendor/github.com/golangci/misspell/mime.go index 9db4902b..76a96cfd 100644 --- a/vendor/github.com/golangci/misspell/mime.go +++ b/vendor/github.com/golangci/misspell/mime.go @@ -4,21 +4,18 @@ import ( "bytes" "fmt" "io" - "io/ioutil" "net/http" "os" "path/filepath" "strings" ) -// The number of possible binary formats is very large -// items that might be checked into a repo or be an -// artifact of a build. Additions welcome. +// The number of possible binary formats is very large items that might be checked into a repo or be an artifact of a build. +// Additions welcome. // -// Golang's internal table is very small and can't be -// relied on. Even then things like ".js" have a mime -// type of "application/javascipt" which isn't very helpful. -// "[x]" means we have sniff test and suffix test should be eliminated +// Golang's internal table is very small and can't be relied on. +// Even then things like ".js" have a mime type of "application/javascript" which isn't very helpful. +// "[x]" means we have sniff test and suffix test should be eliminated. var binary = map[string]bool{ ".a": true, // [ ] archive ".bin": true, // [ ] binary @@ -52,12 +49,10 @@ var binary = map[string]bool{ ".zip": true, // [x] archive } -// isBinaryFilename returns true if the file is likely to be binary +// isBinaryFilename returns true if the file is likely to be binary. // -// Better heuristics could be done here, in particular a binary -// file is unlikely to be UTF-8 encoded. However this is cheap -// and will solve the immediate need of making sure common -// binary formats are not corrupted by mistake. +// Better heuristics could be done here, in particular a binary file is unlikely to be UTF-8 encoded. +// However, this is cheap and will solve the immediate need of making sure common binary formats are not corrupted by mistake. func isBinaryFilename(s string) bool { return binary[strings.ToLower(filepath.Ext(s))] } @@ -70,8 +65,8 @@ var scm = map[string]bool{ "CVS": true, } -// isSCMPath returns true if the path is likely part of a (private) SCM -// directory. E.g. ./git/something = true +// isSCMPath returns true if the path is likely part of a (private) SCM directory. +// E.g. ./git/something = true. func isSCMPath(s string) bool { // hack for .git/COMMIT_EDITMSG and .git/TAG_EDITMSG // normally we don't look at anything in .git @@ -128,29 +123,30 @@ func isTextFile(raw []byte) bool { } } - // allow any text/ type with utf-8 encoding - // DetectContentType sometimes returns charset=utf-16 for XML stuff - // in which case ignore. + // allow any text/ type with utf-8 encoding. + // DetectContentType sometimes returns charset=utf-16 for XML stuff in which case ignore. mime := http.DetectContentType(raw) return strings.HasPrefix(mime, "text/") && strings.HasSuffix(mime, "charset=utf-8") } -// ReadTextFile returns the contents of a file, first testing if it is a text file -// returns ("", nil) if not a text file -// returns ("", error) if error -// returns (string, nil) if text +// ReadTextFile returns the contents of a file, first testing if it is a text file: // -// unfortunately, in worse case, this does -// 1 stat -// 1 open,read,close of 512 bytes -// 1 more stat,open, read everything, close (via ioutil.ReadAll) -// This could be kinder to the filesystem. +// returns ("", nil) if not a text file +// returns ("", error) if error +// returns (string, nil) if text +// +// unfortunately, in worse case, this does: +// +// 1 stat +// 1 open,read,close of 512 bytes +// 1 more stat,open, read everything, close (via io.ReadAll) +// This could be kinder to the filesystem. // // This uses some heuristics of the file's extension (e.g. .zip, .txt) and // uses a sniffer to determine if the file is text or not. // Using file extensions isn't great, but probably // good enough for real-world use. -// Golang's built in sniffer is problematic for differnet reasons. It's +// Golang's built-in sniffer is problematic for different reasons. It's // optimized for HTML, and is very limited in detection. It would be good // to explicitly add some tests for ELF/DWARF formats to make sure we never // corrupt binary files. @@ -164,9 +160,8 @@ func ReadTextFile(filename string) (string, error) { } fstat, err := os.Stat(filename) - if err != nil { - return "", fmt.Errorf("Unable to stat %q: %s", filename, err) + return "", fmt.Errorf("unable to stat %q: %w", filename, err) } // directory: nothing to do. @@ -181,26 +176,26 @@ func ReadTextFile(filename string) (string, error) { if fstat.Size() > 50000 { fin, err := os.Open(filename) if err != nil { - return "", fmt.Errorf("Unable to open large file %q: %s", filename, err) + return "", fmt.Errorf("unable to open large file %q: %w", filename, err) } defer fin.Close() buf := make([]byte, 512) _, err = io.ReadFull(fin, buf) if err != nil { - return "", fmt.Errorf("Unable to read 512 bytes from %q: %s", filename, err) + return "", fmt.Errorf("unable to read 512 bytes from %q: %w", filename, err) } if !isTextFile(buf) { return "", nil } - // set so we don't double check this file + // set so we don't double-check this file isText = true } // read in whole file - raw, err := ioutil.ReadFile(filename) + raw, err := os.ReadFile(filename) if err != nil { - return "", fmt.Errorf("Unable to read all %q: %s", filename, err) + return "", fmt.Errorf("unable to read all %q: %w", filename, err) } if !isText && !isTextFile(raw) { diff --git a/vendor/github.com/golangci/misspell/notwords.go b/vendor/github.com/golangci/misspell/notwords.go index 06d0d5a5..a250cf7f 100644 --- a/vendor/github.com/golangci/misspell/notwords.go +++ b/vendor/github.com/golangci/misspell/notwords.go @@ -13,10 +13,10 @@ var ( ) // RemovePath attempts to strip away embedded file system paths, e.g. -// /foo/bar or /static/myimg.png // -// TODO: windows style +// /foo/bar or /static/myimg.png // +// TODO: windows style. func RemovePath(s string) string { out := bytes.Buffer{} var idx int @@ -57,28 +57,28 @@ func RemovePath(s string) string { return out.String() } -// replaceWithBlanks returns a string with the same number of spaces as the input +// replaceWithBlanks returns a string with the same number of spaces as the input. func replaceWithBlanks(s string) string { return strings.Repeat(" ", len(s)) } -// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz" +// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz". func RemoveEmail(s string) string { return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks) } -// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz" +// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz". func RemoveHost(s string) string { return reHost.ReplaceAllStringFunc(s, replaceWithBlanks) } -// RemoveBackslashEscapes removes characters that are preceeded by a backslash -// commonly found in printf format stringd "\nto" +// RemoveBackslashEscapes removes characters that are preceded by a backslash. +// commonly found in printf format string "\nto". func removeBackslashEscapes(s string) string { return reBackslash.ReplaceAllStringFunc(s, replaceWithBlanks) } -// RemoveNotWords blanks out all the not words +// RemoveNotWords blanks out all the not words. func RemoveNotWords(s string) string { // do most selective/specific first return removeBackslashEscapes(RemoveHost(RemoveEmail(RemovePath(StripURL(s))))) diff --git a/vendor/github.com/golangci/misspell/replace.go b/vendor/github.com/golangci/misspell/replace.go index a99bbcc5..68d904b0 100644 --- a/vendor/github.com/golangci/misspell/replace.go +++ b/vendor/github.com/golangci/misspell/replace.go @@ -27,7 +27,7 @@ func inArray(haystack []string, needle string) bool { var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`) -// Diff is datastructure showing what changed in a single line +// Diff is datastructures showing what changed in a single line. type Diff struct { Filename string FullLine string @@ -37,7 +37,7 @@ type Diff struct { Corrected string } -// Replacer is the main struct for spelling correction +// Replacer is the main struct for spelling correction. type Replacer struct { Replacements []string Debug bool @@ -45,7 +45,7 @@ type Replacer struct { corrected map[string]string } -// New creates a new default Replacer using the main rule list +// New creates a new default Replacer using the main rule list. func New() *Replacer { r := Replacer{ Replacements: DictMain, @@ -54,31 +54,31 @@ func New() *Replacer { return &r } -// RemoveRule deletes existings rules. -// TODO: make inplace to save memory +// RemoveRule deletes existing rules. +// TODO: make in place to save memory. func (r *Replacer) RemoveRule(ignore []string) { - newwords := make([]string, 0, len(r.Replacements)) + newWords := make([]string, 0, len(r.Replacements)) for i := 0; i < len(r.Replacements); i += 2 { if inArray(ignore, r.Replacements[i]) { continue } - newwords = append(newwords, r.Replacements[i:i+2]...) + newWords = append(newWords, r.Replacements[i:i+2]...) } r.engine = nil - r.Replacements = newwords + r.Replacements = newWords } // AddRuleList appends new rules. // Input is in the same form as Strings.Replacer: [ old1, new1, old2, new2, ....] -// Note: does not check for duplictes +// Note: does not check for duplicates. func (r *Replacer) AddRuleList(additions []string) { r.engine = nil r.Replacements = append(r.Replacements, additions...) } -// Compile compiles the rules. Required before using the Replace functions +// Compile compiles the rules. +// Required before using the Replace functions. func (r *Replacer) Compile() { - r.corrected = make(map[string]string, len(r.Replacements)/2) for i := 0; i < len(r.Replacements); i += 2 { r.corrected[r.Replacements[i]] = r.Replacements[i+1] @@ -92,11 +92,14 @@ extract words from each line1 replace word -> newword if word == new-word - continue + + continue + if new-word in list of replacements - continue -new word not original, and not in list of replacements - some substring got mixed up. UNdo + + continue + +new word not original, and not in list of replacements some substring got mixed up. UNdo. */ func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(Diff)) { first := 0 @@ -136,9 +139,8 @@ func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(D io.WriteString(buf, s[first:]) } -// ReplaceGo is a specialized routine for correcting Golang source -// files. Currently only checks comments, not identifiers for -// spelling. +// ReplaceGo is a specialized routine for correcting Golang source files. +// Currently only checks comments, not identifiers for spelling. func (r *Replacer) ReplaceGo(input string) (string, []Diff) { var s scanner.Scanner s.Init(strings.NewReader(input)) @@ -169,7 +171,7 @@ Loop: return input, nil } if lastPos < len(input) { - output = output + input[lastPos:] + output += input[lastPos:] } diffs := make([]Diff, 0, 8) buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100)) @@ -187,11 +189,9 @@ Loop: } return buf.String(), diffs - } -// Replace is corrects misspellings in input, returning corrected version -// along with a list of diffs. +// Replace is corrects misspellings in input, returning corrected version along with a list of diffs. func (r *Replacer) Replace(input string) (string, []Diff) { output := r.engine.Replace(input) if input == output { @@ -215,8 +215,8 @@ func (r *Replacer) Replace(input string) (string, []Diff) { return buf.String(), diffs } -// ReplaceReader applies spelling corrections to a reader stream. Diffs are -// emitted through a callback. +// ReplaceReader applies spelling corrections to a reader stream. +// Diffs are emitted through a callback. func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) error { var ( err error @@ -239,7 +239,7 @@ func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) er io.WriteString(w, line) continue } - // but it can be inaccurate, so we need to double check + // but it can be inaccurate, so we need to double-check r.recheckLine(line, lineNum, w, next) } return nil diff --git a/vendor/github.com/golangci/misspell/stringreplacer.go b/vendor/github.com/golangci/misspell/stringreplacer.go index 3151eceb..73ca9a56 100644 --- a/vendor/github.com/golangci/misspell/stringreplacer.go +++ b/vendor/github.com/golangci/misspell/stringreplacer.go @@ -6,7 +6,6 @@ package misspell import ( "io" - // "log" "strings" ) @@ -38,7 +37,7 @@ func (r *StringReplacer) Replace(s string) string { } // WriteString writes s to w with all replacements performed. -func (r *StringReplacer) WriteString(w io.Writer, s string) (n int, err error) { +func (r *StringReplacer) WriteString(w io.Writer, s string) (int, error) { return r.r.WriteString(w, s) } @@ -46,14 +45,14 @@ func (r *StringReplacer) WriteString(w io.Writer, s string) (n int, err error) { // and values may be empty. For example, the trie containing keys "ax", "ay", // "bcbc", "x" and "xy" could have eight nodes: // -// n0 - -// n1 a- -// n2 .x+ -// n3 .y+ -// n4 b- -// n5 .cbc+ -// n6 x+ -// n7 .y+ +// n0 - +// n1 a- +// n2 .x+ +// n3 .y+ +// n4 b- +// n5 .cbc+ +// n6 x+ +// n7 .y+ // // n0 is the root node, and its children are n1, n4 and n6; n1's children are // n2 and n3; n4's child is n5; n6's child is n7. Nodes n0, n1 and n4 (marked @@ -103,6 +102,7 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) { return } + //nolint:nestif // TODO(ldez) must be fixed. if t.prefix != "" { // Need to split the prefix among multiple nodes. var n int // length of the longest common prefix @@ -157,42 +157,6 @@ func (t *trieNode) add(key, val string, priority int, r *genericReplacer) { } } -func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) { - // Iterate down the trie to the end, and grab the value and keylen with - // the highest priority. - bestPriority := 0 - node := &r.root - n := 0 - for node != nil { - if node.priority > bestPriority && !(ignoreRoot && node == &r.root) { - bestPriority = node.priority - val = node.value - keylen = n - found = true - } - - if s == "" { - break - } - if node.table != nil { - index := r.mapping[ByteToLower(s[0])] - if int(index) == r.tableSize { - break - } - node = node.table[index] - s = s[1:] - n++ - } else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) { - n += len(node.prefix) - s = s[len(node.prefix):] - node = node.next - } else { - break - } - } - return -} - // genericReplacer is the fully generic algorithm. // It's used as a fallback when nothing faster can be used. type genericReplacer struct { @@ -236,38 +200,40 @@ func makeGenericReplacer(oldnew []string) *genericReplacer { return r } -type appendSliceWriter []byte - -// Write writes to the buffer to satisfy io.Writer. -func (w *appendSliceWriter) Write(p []byte) (int, error) { - *w = append(*w, p...) - return len(p), nil -} - -// WriteString writes to the buffer without string->[]byte->string allocations. -func (w *appendSliceWriter) WriteString(s string) (int, error) { - *w = append(*w, s...) - return len(s), nil -} - -type stringWriterIface interface { - WriteString(string) (int, error) -} - -type stringWriter struct { - w io.Writer -} - -func (w stringWriter) WriteString(s string) (int, error) { - return w.w.Write([]byte(s)) -} +func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) { + // Iterate down the trie to the end, and grab the value and keylen with + // the highest priority. + bestPriority := 0 + node := &r.root + n := 0 + for node != nil { + if node.priority > bestPriority && !(ignoreRoot && node == &r.root) { + bestPriority = node.priority + val = node.value + keylen = n + found = true + } -func getStringWriter(w io.Writer) stringWriterIface { - sw, ok := w.(stringWriterIface) - if !ok { - sw = stringWriter{w} + if s == "" { + break + } + if node.table != nil { + index := r.mapping[ByteToLower(s[0])] + if int(index) == r.tableSize { + break + } + node = node.table[index] + s = s[1:] + n++ + } else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) { + n += len(node.prefix) + s = s[len(node.prefix):] + node = node.next + } else { + break + } } - return sw + return } func (r *genericReplacer) Replace(s string) string { @@ -276,6 +242,7 @@ func (r *genericReplacer) Replace(s string) string { return string(buf) } +//nolint:gocognit // TODO(ldez) must be fixed. func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) { sw := getStringWriter(w) var last, wn int @@ -316,7 +283,7 @@ func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) if err != nil { return } - //log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val) + // debug helper: log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val) wn, err = sw.WriteString(val) n += wn if err != nil { @@ -334,3 +301,33 @@ func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) } return } + +type appendSliceWriter []byte + +// Write writes to the buffer to satisfy io.Writer. +func (w *appendSliceWriter) Write(p []byte) (int, error) { + *w = append(*w, p...) + return len(p), nil +} + +// WriteString writes to the buffer without string->[]byte->string allocations. +func (w *appendSliceWriter) WriteString(s string) (int, error) { + *w = append(*w, s...) + return len(s), nil +} + +type stringWriter struct { + w io.Writer +} + +func (w stringWriter) WriteString(s string) (int, error) { + return w.w.Write([]byte(s)) +} + +func getStringWriter(w io.Writer) io.StringWriter { + sw, ok := w.(io.StringWriter) + if !ok { + sw = stringWriter{w} + } + return sw +} diff --git a/vendor/github.com/golangci/misspell/url.go b/vendor/github.com/golangci/misspell/url.go index 1a259f5f..203b91a7 100644 --- a/vendor/github.com/golangci/misspell/url.go +++ b/vendor/github.com/golangci/misspell/url.go @@ -7,11 +7,12 @@ import ( // Regexp for URL https://mathiasbynens.be/demo/url-regex // // original @imme_emosol (54 chars) has trouble with dashes in hostname -// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS -var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?\.#]+\.?)+(/[^\s]*)?`) +// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS. +var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?.#]+\.?)+(/\S*)?`) // StripURL attemps to replace URLs with blank spaces, e.g. -// "xxx http://foo.com/ yyy -> "xxx yyyy" +// +// "xxx http://foo.com/ yyy -> "xxx yyyy". func StripURL(s string) string { return reURL.ReplaceAllStringFunc(s, replaceWithBlanks) } diff --git a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go index c7b4fa97..3d7b18d7 100644 --- a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go +++ b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go @@ -60,6 +60,7 @@ type builder struct { block *block vars map[*ast.Object]*variable results []*ast.FieldList + defers []bool breaks branchStack continues branchStack gotos branchStack @@ -181,6 +182,9 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor { } brek.setDestination(bld.newBlock(exits...)) bld.breaks.pop() + case *ast.DeferStmt: + bld.defers[len(bld.defers)-1] = true + return bld case *ast.LabeledStmt: bld.gotos.get(n.Label).setDestination(bld.newBlock(bld.block)) bld.labelStmt = n @@ -360,6 +364,7 @@ func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) { v.fundept++ } bld.results = append(bld.results, typ.Results) + bld.defers = append(bld.defers, false) b := bld.block bld.newBlock() @@ -369,6 +374,7 @@ func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) { bld.block = b bld.results = bld.results[:len(bld.results)-1] + bld.defers = bld.defers[:len(bld.defers)-1] for _, v := range bld.vars { v.fundept-- } @@ -422,8 +428,11 @@ func (bld *builder) swtch(stmt ast.Stmt, cases []ast.Stmt) { bld.breaks.pop() } -// An operation that might panic marks named function results as used. +// If an operation might panic and be recovered, mark named function results as used. func (bld *builder) maybePanic() { + if len(bld.defers) == 0 || !bld.defers[len(bld.defers)-1] { + return + } if len(bld.results) == 0 { return } diff --git a/vendor/github.com/junk1tm/musttag/.golangci.yml b/vendor/github.com/junk1tm/musttag/.golangci.yml new file mode 100644 index 00000000..641471cc --- /dev/null +++ b/vendor/github.com/junk1tm/musttag/.golangci.yml @@ -0,0 +1,23 @@ +linters: + disable-all: true + enable: + # enabled by default: + - errcheck + - gosimple + - govet + - ineffassign + - staticcheck + - typecheck + - unused + # disabled by default: + - gocritic + - gofumpt + +linters-settings: + gocritic: + enabled-tags: + - diagnostic + - style + - performance + - experimental + - opinionated diff --git a/vendor/github.com/junk1tm/musttag/.goreleaser.yml b/vendor/github.com/junk1tm/musttag/.goreleaser.yml new file mode 100644 index 00000000..6f85d818 --- /dev/null +++ b/vendor/github.com/junk1tm/musttag/.goreleaser.yml @@ -0,0 +1,30 @@ +builds: + - main: ./cmd/musttag + env: + - CGO_ENABLED=0 + flags: + - -trimpath + ldflags: + - -s -w -X main.version={{.Version}} + targets: + - darwin_amd64 + - darwin_arm64 + - linux_amd64 + - windows_amd64 + +archives: + - replacements: + darwin: macOS + format_overrides: + - goos: windows + format: zip + +brews: + - tap: + owner: junk1tm + name: homebrew-tap + branch: main + token: "{{ .Env.HOMEBREW_TAP_GITHUB_TOKEN }}" + homepage: "https://github.com/junk1tm/musttag" + description: "A Go linter that enforces field tags in (un)marshaled structs" + license: "MIT" diff --git a/vendor/github.com/junk1tm/musttag/LICENSE b/vendor/github.com/junk1tm/musttag/LICENSE new file mode 100644 index 00000000..38baef8d --- /dev/null +++ b/vendor/github.com/junk1tm/musttag/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 junk1tm + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/junk1tm/musttag/README.md b/vendor/github.com/junk1tm/musttag/README.md new file mode 100644 index 00000000..c04eae7c --- /dev/null +++ b/vendor/github.com/junk1tm/musttag/README.md @@ -0,0 +1,93 @@ +# musttag + +[![ci](https://github.com/junk1tm/musttag/actions/workflows/go.yml/badge.svg)](https://github.com/junk1tm/musttag/actions/workflows/go.yml) +[![docs](https://pkg.go.dev/badge/github.com/junk1tm/musttag.svg)](https://pkg.go.dev/github.com/junk1tm/musttag) +[![report](https://goreportcard.com/badge/github.com/junk1tm/musttag)](https://goreportcard.com/report/github.com/junk1tm/musttag) +[![codecov](https://codecov.io/gh/junk1tm/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/junk1tm/musttag) + +A Go linter that enforces field tags in (un)marshaled structs + +## 📌 About + +`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag: + +```go +// BAD: +var user struct { + Name string +} +data, err := json.Marshal(user) + +// GOOD: +var user struct { + Name string `json:"name"` +} +data, err := json.Marshal(user) +``` + +The rational from [Uber Style Guide][1]: + +> The serialized form of the structure is a contract between different systems. +> Changes to the structure of the serialized form, including field names, break this contract. +> Specifying field names inside tags makes the contract explicit, +> and it guards against accidentally breaking the contract by refactoring or renaming fields. + +## 🚀 Features + +`musttag` supports these packages out of the box: + +* `encoding/json` +* `encoding/xml` +* `gopkg.in/yaml.v3` +* `github.com/BurntSushi/toml` +* `github.com/mitchellh/mapstructure` +* ...and any [custom one](#custom-packages) + +## 📦 Install + +### Go + +```shell +go install github.com/junk1tm/musttag/cmd/musttag@latest +``` + +### Brew + +```shell +brew install junk1tm/tap/musttag +``` + +### Manual + +Download a prebuilt binary from the [Releases][2] page. + +## 📋 Usage + +As a standalone binary: + +```shell +musttag ./... +``` + +Via `go vet`: + +```shell +go vet -vettool=$(which musttag) ./... +``` + +### Custom packages + +The `-fn=name:tag:argpos` flag can be used to report functions from custom packages, where + +* `name` is the full name of the function, including the package +* `tag` is the struct tag whose presence should be ensured +* `argpos` is the position of the argument to check + +For example, to support the `sqlx.Get` function: + +```shell +musttag -fn="github.com/jmoiron/sqlx.Get:db:1" ./... +``` + +[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs +[2]: https://github.com/junk1tm/musttag/releases diff --git a/vendor/github.com/junk1tm/musttag/musttag.go b/vendor/github.com/junk1tm/musttag/musttag.go new file mode 100644 index 00000000..43440226 --- /dev/null +++ b/vendor/github.com/junk1tm/musttag/musttag.go @@ -0,0 +1,254 @@ +// Package musttag implements the musttag analyzer. +package musttag + +import ( + "flag" + "go/ast" + "go/token" + "go/types" + "reflect" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +// Func describes a function call to look for, e.g. json.Marshal. +type Func struct { + Name string // Name is the full name of the function, including the package. + Tag string // Tag is the struct tag whose presence should be ensured. + ArgPos int // ArgPos is the position of the argument to check. +} + +// builtin is a set of functions supported out of the box. +var builtin = []Func{ + {Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0}, + {Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0}, + {Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1}, + {Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0}, + {Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0}, + + {Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0}, + {Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0}, + {Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1}, + {Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0}, + + {Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0}, + {Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1}, + {Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0}, + {Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0}, + + {Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1}, + {Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1}, + {Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2}, + {Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1}, + {Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0}, + {Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0}, + + {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1}, +} + +// flags creates a flag set for the analyzer. +// The funcs slice will be filled with custom functions passed via CLI flags. +func flags(funcs *[]Func) flag.FlagSet { + fs := flag.NewFlagSet("musttag", flag.ContinueOnError) + fs.Func("fn", "report custom function (name:tag:argpos)", func(s string) error { + parts := strings.Split(s, ":") + if len(parts) != 3 || parts[0] == "" || parts[1] == "" { + return strconv.ErrSyntax + } + pos, err := strconv.Atoi(parts[2]) + if err != nil { + return err + } + *funcs = append(*funcs, Func{ + Name: parts[0], + Tag: parts[1], + ArgPos: pos, + }) + return nil + }) + return *fs +} + +// New creates a new musttag analyzer. +// To report a custom function provide its description via Func, +// it will be added to the builtin ones. +func New(funcs ...Func) *analysis.Analyzer { + var flagFuncs []Func + return &analysis.Analyzer{ + Name: "musttag", + Doc: "enforce field tags in (un)marshaled structs", + Flags: flags(&flagFuncs), + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: func(pass *analysis.Pass) (any, error) { + l := len(builtin) + len(funcs) + len(flagFuncs) + m := make(map[string]Func, l) + toMap := func(slice []Func) { + for _, fn := range slice { + m[fn.Name] = fn + } + } + toMap(builtin) + toMap(funcs) + toMap(flagFuncs) + return run(pass, m) + }, + } +} + +// for tests only. +var ( + // should the same struct be reported only once for the same tag? + reportOnce = true + + // reportf is a wrapper for pass.Reportf (as a variable, so it could be mocked in tests). + reportf = func(pass *analysis.Pass, pos token.Pos, fn Func) { + // TODO(junk1tm): print the name of the struct type as well? + pass.Reportf(pos, "exported fields should be annotated with the %q tag", fn.Tag) + } +) + +// run starts the analysis. +func run(pass *analysis.Pass, funcs map[string]Func) (any, error) { + type report struct { + pos token.Pos // the position for report. + tag string // the missing struct tag. + } + + // store previous reports to prevent reporting + // the same struct more than once (if reportOnce is true). + reports := make(map[report]struct{}) + + walk := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + filter := []ast.Node{(*ast.CallExpr)(nil)} + + walk.Preorder(filter, func(n ast.Node) { + call, ok := n.(*ast.CallExpr) + if !ok { + return // not a function call. + } + + callee := typeutil.StaticCallee(pass.TypesInfo, call) + if callee == nil { + return // not a static call. + } + + fn, ok := funcs[callee.FullName()] + if !ok { + return // the function is not supported. + } + + if len(call.Args) <= fn.ArgPos { + return // TODO(junk1tm): return a proper error. + } + + arg := call.Args[fn.ArgPos] + if unary, ok := arg.(*ast.UnaryExpr); ok { + arg = unary.X // e.g. json.Marshal(&foo) + } + + initialPos := token.NoPos + switch arg := arg.(type) { + case *ast.Ident: // e.g. json.Marshal(foo) + if arg.Obj == nil { + return // e.g. json.Marshal(nil) + } + initialPos = arg.Obj.Pos() + case *ast.CompositeLit: // e.g. json.Marshal(struct{}{}) + initialPos = arg.Pos() + } + + t := pass.TypesInfo.TypeOf(arg) + s, ok := parseStruct(t, initialPos) + if !ok { + return // not a struct argument. + } + + reportPos, ok := checkStruct(s, fn.Tag, make(map[string]struct{})) + if ok { + return // nothing to report. + } + + r := report{reportPos, fn.Tag} + if _, ok := reports[r]; ok && reportOnce { + return // already reported. + } + + reportf(pass, reportPos, fn) + reports[r] = struct{}{} + }) + + return nil, nil +} + +// structInfo expands types.Struct with its position in the source code. +// If the struct is anonymous, Pos points to the corresponding identifier. +type structInfo struct { + *types.Struct + Pos token.Pos +} + +// parseStruct parses the given types.Type, returning the underlying struct type. +// If it's a named type, the result will contain the position of its declaration, +// or the given token.Pos otherwise. +func parseStruct(t types.Type, pos token.Pos) (*structInfo, bool) { + for { + // unwrap pointers (if any) first. + ptr, ok := t.(*types.Pointer) + if !ok { + break + } + t = ptr.Elem() + } + + switch t := t.(type) { + case *types.Named: // a struct of the named type. + if s, ok := t.Underlying().(*types.Struct); ok { + return &structInfo{Struct: s, Pos: t.Obj().Pos()}, true + } + case *types.Struct: // an anonymous struct. + return &structInfo{Struct: t, Pos: pos}, true + } + + return nil, false +} + +// checkStruct recursively checks the given struct and returns the position for report, +// in case one of its fields is missing the tag. +func checkStruct(s *structInfo, tag string, visited map[string]struct{}) (token.Pos, bool) { + visited[s.String()] = struct{}{} + for i := 0; i < s.NumFields(); i++ { + if !s.Field(i).Exported() { + continue + } + + st := reflect.StructTag(s.Tag(i)) + if _, ok := st.Lookup(tag); !ok && !s.Field(i).Embedded() { + return s.Pos, false + } + + t := s.Field(i).Type() + nested, ok := parseStruct(t, s.Pos) // TODO(junk1tm): or s.Field(i).Pos()? + if !ok { + continue + } + if _, ok := visited[nested.String()]; ok { + continue + } + if pos, ok := checkStruct(nested, tag, visited); !ok { + return pos, false + } + } + + return token.NoPos, true +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go index 0a4067f9..a5ee3711 100644 --- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go +++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go @@ -569,73 +569,98 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { if !v.ignoreCall(stmt.Call) && v.callReturnsError(stmt.Call) { v.addErrorAtPosition(stmt.Call.Lparen, stmt.Call) } + case *ast.GenDecl: + if stmt.Tok != token.VAR { + break + } + + for _, spec := range stmt.Specs { + vspec := spec.(*ast.ValueSpec) + + if len(vspec.Values) == 0 { + // ignore declarations w/o assignments + continue + } + + var lhs []ast.Expr + for _, name := range vspec.Names { + lhs = append(lhs, ast.Expr(name)) + } + v.checkAssignment(lhs, vspec.Values) + } + case *ast.AssignStmt: - if len(stmt.Rhs) == 1 { - // single value on rhs; check against lhs identifiers - if call, ok := stmt.Rhs[0].(*ast.CallExpr); ok { - if !v.blank { - break - } - if v.ignoreCall(call) { - break - } - isError := v.errorsByArg(call) - for i := 0; i < len(stmt.Lhs); i++ { - if id, ok := stmt.Lhs[i].(*ast.Ident); ok { - // We shortcut calls to recover() because errorsByArg can't - // check its return types for errors since it returns interface{}. - if id.Name == "_" && (v.isRecover(call) || isError[i]) { - v.addErrorAtPosition(id.NamePos, call) - } + v.checkAssignment(stmt.Lhs, stmt.Rhs) + + default: + } + return v +} + +func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { + if len(rhs) == 1 { + // single value on rhs; check against lhs identifiers + if call, ok := rhs[0].(*ast.CallExpr); ok { + if !v.blank { + return + } + if v.ignoreCall(call) { + return + } + isError := v.errorsByArg(call) + for i := 0; i < len(lhs); i++ { + if id, ok := lhs[i].(*ast.Ident); ok { + // We shortcut calls to recover() because errorsByArg can't + // check its return types for errors since it returns interface{}. + if id.Name == "_" && (v.isRecover(call) || isError[i]) { + v.addErrorAtPosition(id.NamePos, call) } } - } else if assert, ok := stmt.Rhs[0].(*ast.TypeAssertExpr); ok { - if !v.asserts { - break - } - if assert.Type == nil { - // type switch - break - } - if len(stmt.Lhs) < 2 { - // assertion result not read - v.addErrorAtPosition(stmt.Rhs[0].Pos(), nil) - } else if id, ok := stmt.Lhs[1].(*ast.Ident); ok && v.blank && id.Name == "_" { - // assertion result ignored - v.addErrorAtPosition(id.NamePos, nil) - } } - } else { - // multiple value on rhs; in this case a call can't return - // multiple values. Assume len(stmt.Lhs) == len(stmt.Rhs) - for i := 0; i < len(stmt.Lhs); i++ { - if id, ok := stmt.Lhs[i].(*ast.Ident); ok { - if call, ok := stmt.Rhs[i].(*ast.CallExpr); ok { - if !v.blank { - continue - } - if v.ignoreCall(call) { - continue - } - if id.Name == "_" && v.callReturnsError(call) { - v.addErrorAtPosition(id.NamePos, call) - } - } else if assert, ok := stmt.Rhs[i].(*ast.TypeAssertExpr); ok { - if !v.asserts { - continue - } - if assert.Type == nil { - // Shouldn't happen anyway, no multi assignment in type switches - continue - } - v.addErrorAtPosition(id.NamePos, nil) + } else if assert, ok := rhs[0].(*ast.TypeAssertExpr); ok { + if !v.asserts { + return + } + if assert.Type == nil { + // type switch + return + } + if len(lhs) < 2 { + // assertion result not read + v.addErrorAtPosition(rhs[0].Pos(), nil) + } else if id, ok := lhs[1].(*ast.Ident); ok && v.blank && id.Name == "_" { + // assertion result ignored + v.addErrorAtPosition(id.NamePos, nil) + } + } + } else { + // multiple value on rhs; in this case a call can't return + // multiple values. Assume len(lhs) == len(rhs) + for i := 0; i < len(lhs); i++ { + if id, ok := lhs[i].(*ast.Ident); ok { + if call, ok := rhs[i].(*ast.CallExpr); ok { + if !v.blank { + continue + } + if v.ignoreCall(call) { + continue + } + if id.Name == "_" && v.callReturnsError(call) { + v.addErrorAtPosition(id.NamePos, call) + } + } else if assert, ok := rhs[i].(*ast.TypeAssertExpr); ok { + if !v.asserts { + continue } + if assert.Type == nil { + // Shouldn't happen anyway, no multi assignment in type switches + continue + } + v.addErrorAtPosition(id.NamePos, nil) } } } - default: } - return v } func isErrorType(t types.Type) bool { diff --git a/vendor/github.com/kkHAIKE/contextcheck/README.md b/vendor/github.com/kkHAIKE/contextcheck/README.md index a383228a..2cc7b2e4 100644 --- a/vendor/github.com/kkHAIKE/contextcheck/README.md +++ b/vendor/github.com/kkHAIKE/contextcheck/README.md @@ -3,7 +3,7 @@ # contextcheck -`contextcheck` is a static analysis tool, it is used to check the function whether use a non-inherited context, which will result in a broken call link. +`contextcheck` is a static analysis tool, it is used to check whether the function uses a non-inherited context, which will result in a broken call link. For example: diff --git a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go index e4a79b0d..c9ad0101 100644 --- a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go +++ b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go @@ -24,7 +24,7 @@ var pkgprefix string func NewAnalyzer(cfg Configuration) *analysis.Analyzer { analyzer := &analysis.Analyzer{ Name: "contextcheck", - Doc: "check the function whether use a non-inherited context", + Doc: "check whether the function uses a non-inherited context", Run: NewRun(nil, cfg.DisableFact), Requires: []*analysis.Analyzer{ buildssa.Analyzer, @@ -97,14 +97,25 @@ type runner struct { disableFact bool } +func getPkgRoot(pkg string) string { + arr := strings.Split(pkg, "/") + if len(arr) < 3 { + return arr[0] + } + if strings.IndexByte(arr[0], '.') == -1 { + return arr[0] + } + return strings.Join(arr[:3], "/") +} + func NewRun(pkgs []*packages.Package, disableFact bool) func(pass *analysis.Pass) (interface{}, error) { m := make(map[string]bool) for _, pkg := range pkgs { - m[strings.Split(pkg.PkgPath, "/")[0]] = true + m[getPkgRoot(pkg.PkgPath)] = true } return func(pass *analysis.Pass) (interface{}, error) { // skip different repo - if len(m) > 0 && !m[strings.Split(pass.Pkg.Path(), "/")[0]] { + if len(m) > 0 && !m[getPkgRoot(pass.Pkg.Path())] { return nil, nil } if len(m) == 0 && pkgprefix != "" && !strings.HasPrefix(pass.Pkg.Path(), pkgprefix) { diff --git a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml index 22ff4404..95d44aaa 100644 --- a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml +++ b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml @@ -1,43 +1,51 @@ +# yaml-language-server: $schema=https://goreleaser.com/static/schema.json + project_name: exportloopref -release: - github: - owner: kyoh86 - name: exportloopref -brews: -- install: | - bin.install "exportloopref" - github: - owner: kyoh86 - name: homebrew-tap - folder: Formula - homepage: https://github.com/kyoh86/exportloopref - description: An analyzer that finds exporting pointers for loop variables. builds: -- goos: - - linux - - darwin - - windows - goarch: - - amd64 - - "386" - main: ./cmd/exportloopref - ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} - binary: exportloopref + - id: default + goos: + - linux + - darwin + - windows + goarch: + - amd64 + - arm64 + - "386" + main: ./cmd/exportloopref + binary: exportloopref +brews: + - install: | + bin.install "exportloopref" + tap: + owner: kyoh86 + name: homebrew-tap + folder: Formula + homepage: https://github.com/kyoh86/exportloopref + description: An analyzer that finds exporting pointers for loop variables. + license: MIT +nfpms: + - builds: + - default + maintainer: kyoh86 + homepage: https://github.com/kyoh86/exportloopref + description: An analyzer that finds exporting pointers for loop variables. + license: MIT + formats: + - apk + - deb + - rpm archives: -- id: gzip - format: tar.gz - format_overrides: - - goos: windows - format: zip - name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" - files: - - licence* - - LICENCE* - - license* - - LICENSE* - - readme* - - README* - - changelog* - - CHANGELOG* -snapshot: - name_template: SNAPSHOT-{{ .Commit }} + - id: gzip + format: tar.gz + format_overrides: + - goos: windows + format: zip + files: + - licence* + - LICENCE* + - license* + - LICENSE* + - readme* + - README* + - changelog* + - CHANGELOG* diff --git a/vendor/github.com/kyoh86/exportloopref/README.md b/vendor/github.com/kyoh86/exportloopref/README.md index 5c019c73..0f581ffc 100644 --- a/vendor/github.com/kyoh86/exportloopref/README.md +++ b/vendor/github.com/kyoh86/exportloopref/README.md @@ -1,6 +1,8 @@ # exportloopref An analyzer that finds exporting pointers for loop variables. +![](https://repository-images.githubusercontent.com/256768552/a1c5bb80-dd73-11eb-9453-e520f517e730) +Pin them all! [![PkgGoDev](https://pkg.go.dev/badge/kyoh86/exportloopref)](https://pkg.go.dev/kyoh86/exportloopref) [![Go Report Card](https://goreportcard.com/badge/github.com/kyoh86/exportloopref)](https://goreportcard.com/report/github.com/kyoh86/exportloopref) @@ -9,7 +11,7 @@ An analyzer that finds exporting pointers for loop variables. ## What's this? -Sample problem code from: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/simple/simple.go +Sample problem code from: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/simple/simple.go ```go package main @@ -109,7 +111,7 @@ func printp(p *int) { } ``` -ref: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/fixed/fixed.go +ref: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/fixed/fixed.go ## Sensing policy @@ -120,7 +122,7 @@ e.g. ```go var s Foo -for _, p := []int{10, 11, 12, 13} { +for _, p := range []int{10, 11, 12, 13} { s.Bar(&p) // If s stores the pointer, it will be bug. } ``` diff --git a/vendor/github.com/kyoh86/exportloopref/exportloopref.go b/vendor/github.com/kyoh86/exportloopref/exportloopref.go index 4d1671a0..d071d5c3 100644 --- a/vendor/github.com/kyoh86/exportloopref/exportloopref.go +++ b/vendor/github.com/kyoh86/exportloopref/exportloopref.go @@ -17,21 +17,15 @@ var Analyzer = &analysis.Analyzer{ Run: run, RunDespiteErrors: true, Requires: []*analysis.Analyzer{inspect.Analyzer}, - // ResultType reflect.Type - // FactTypes []Fact -} - -func init() { - // Analyzer.Flags.StringVar(&v, "name", "default", "description") } func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) search := &Searcher{ - Stats: map[token.Pos]struct{}{}, - Vars: map[token.Pos]map[token.Pos]struct{}{}, - Types: pass.TypesInfo.Types, + LoopVars: map[token.Pos]struct{}{}, + LocalVars: map[token.Pos]map[token.Pos]struct{}{}, + Pass: pass, } nodeFilter := []ast.Node{ @@ -42,50 +36,60 @@ func run(pass *analysis.Pass) (interface{}, error) { (*ast.UnaryExpr)(nil), } - inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool { - id, insert, digg := search.Check(n, stack) - if id != nil { - dMsg := fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name) - fMsg := fmt.Sprintf("loop variable %s should be pinned", id.Name) - var suggest []analysis.SuggestedFix - if insert != token.NoPos { - suggest = []analysis.SuggestedFix{{ - Message: fMsg, - TextEdits: []analysis.TextEdit{{ - Pos: insert, - End: insert, - NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)), - }}, - }} - } - d := analysis.Diagnostic{Pos: id.Pos(), - End: id.End(), - Message: dMsg, - Category: "exportloopref", - SuggestedFixes: suggest, - } - pass.Report(d) - } - return digg - }) + inspect.WithStack(nodeFilter, search.CheckAndReport) return nil, nil } type Searcher struct { - // Statement variables : map to collect positions that - // variables are declared like below. + // LoopVars is positions that loop-variables are declared like below. // - for , := range ... - // - var int - // - D := ... - Stats map[token.Pos]struct{} - // Local variables maps loop-position, decl-location to ignore - // safe pointers for variable which declared in the loop. - Vars map[token.Pos]map[token.Pos]struct{} - Types map[ast.Expr]types.TypeAndValue + // - for := ; ; + LoopVars map[token.Pos]struct{} + // LocalVars is positions of loops and the variables declared in them. + // Use this to determine if a point assignment is an export outside the loop. + LocalVars map[token.Pos]map[token.Pos]struct{} + + Pass *analysis.Pass +} + +// CheckAndReport inspects each node with stack. +// It is implemented as the I/F of the "golang.org/x/tools/go/analysis/passes/inspect".Analysis.WithStack. +func (s *Searcher) CheckAndReport(n ast.Node, push bool, stack []ast.Node) bool { + id, insert, digg := s.Check(n, stack) + if id == nil { + // no prob. + return digg + } + + // suggests fix + var suggest []analysis.SuggestedFix + if insert != token.NoPos { + suggest = []analysis.SuggestedFix{{ + Message: fmt.Sprintf("loop variable %s should be pinned", id.Name), + TextEdits: []analysis.TextEdit{{ + Pos: insert, + End: insert, + NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)), + }}, + }} + } + + // report a diagnostic + d := analysis.Diagnostic{Pos: id.Pos(), + End: id.End(), + Message: fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name), + Category: "exportloopref", + SuggestedFixes: suggest, + } + s.Pass.Report(d) + return digg } -func (s *Searcher) Check(n ast.Node, stack []ast.Node) (*ast.Ident, token.Pos, bool) { +// Check each node and stack, whether it exports loop variables or not. +// Finding export, report the *ast.Ident of exported loop variable, +// and token.Pos to insert assignment to fix the diagnostic. +func (s *Searcher) Check(n ast.Node, stack []ast.Node) (loopVar *ast.Ident, insertPos token.Pos, digg bool) { switch typed := n.(type) { case *ast.RangeStmt: s.parseRangeStmt(typed) @@ -102,72 +106,92 @@ func (s *Searcher) Check(n ast.Node, stack []ast.Node) (*ast.Ident, token.Pos, b return nil, token.NoPos, true } +// parseRangeStmt will check range statement (i.e. `for , := range ...`), +// and collect positions of and . func (s *Searcher) parseRangeStmt(n *ast.RangeStmt) { - s.addStat(n.Key) - s.addStat(n.Value) + s.storeLoopVars(n.Key) + s.storeLoopVars(n.Value) } +// parseForStmt will check for statement (i.e. `for := ; ; `), +// and collect positions of . func (s *Searcher) parseForStmt(n *ast.ForStmt) { switch post := n.Post.(type) { case *ast.AssignStmt: // e.g. for p = head; p != nil; p = p.next for _, lhs := range post.Lhs { - s.addStat(lhs) + s.storeLoopVars(lhs) } case *ast.IncDecStmt: // e.g. for i := 0; i < n; i++ - s.addStat(post.X) + s.storeLoopVars(post.X) } } -func (s *Searcher) addStat(expr ast.Expr) { +func (s *Searcher) storeLoopVars(expr ast.Expr) { if id, ok := expr.(*ast.Ident); ok { - s.Stats[id.Pos()] = struct{}{} + s.LoopVars[id.Pos()] = struct{}{} } } +// parseDeclStmt will parse declaring statement (i.e. `var`, `type`, `const`), +// and store the position if it is "var" declaration and is in any loop. func (s *Searcher) parseDeclStmt(n *ast.DeclStmt, stack []ast.Node) { + genDecl, ok := n.Decl.(*ast.GenDecl) + if !ok { + // (dead branch) + // if the Decl is not GenDecl (i.e. `var`, `type` or `const` statement), it is ignored + return + } + if genDecl.Tok != token.VAR { + // if the Decl is not `var` (may be `type` or `const`), it is ignored + return + } + loop, _ := s.innermostLoop(stack) if loop == nil { return } - // Register declaring variables - if genDecl, ok := n.Decl.(*ast.GenDecl); ok && genDecl.Tok == token.VAR { - for _, spec := range genDecl.Specs { - for _, name := range spec.(*ast.ValueSpec).Names { - s.addVar(loop, name) - } + // Register declared variables + for _, spec := range genDecl.Specs { + for _, name := range spec.(*ast.ValueSpec).Names { + s.storeLocalVar(loop, name) } } } +// parseDeclStmt will parse assignment statement (i.e. ` = `), +// and store the position if it is . func (s *Searcher) parseAssignStmt(n *ast.AssignStmt, stack []ast.Node) { + if n.Tok != token.DEFINE { + // if the statement is simple assignment (without definement), it is ignored + return + } + loop, _ := s.innermostLoop(stack) if loop == nil { return } // Find statements declaring local variable - if n.Tok == token.DEFINE { - for _, h := range n.Lhs { - s.addVar(loop, h) - } + for _, h := range n.Lhs { + s.storeLocalVar(loop, h) } } -func (s *Searcher) addVar(loop ast.Node, expr ast.Expr) { +func (s *Searcher) storeLocalVar(loop ast.Node, expr ast.Expr) { loopPos := loop.Pos() id, ok := expr.(*ast.Ident) if !ok { return } - vars, ok := s.Vars[loopPos] + vars, ok := s.LocalVars[loopPos] if !ok { vars = map[token.Pos]struct{}{} } vars[id.Obj.Pos()] = struct{}{} - s.Vars[loopPos] = vars + s.LocalVars[loopPos] = vars } func insertionPosition(block *ast.BlockStmt) token.Pos { @@ -189,13 +213,15 @@ func (s *Searcher) innermostLoop(stack []ast.Node) (ast.Node, token.Pos) { return nil, token.NoPos } +// checkUnaryExpr check unary expression (i.e. like `-x`, `*p` or `&v`) and stack. +// THIS IS THE ESSENTIAL PART OF THIS PARSER. func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Ident, token.Pos, bool) { - loop, insert := s.innermostLoop(stack) - if loop == nil { + if n.Op != token.AND { return nil, token.NoPos, true } - if n.Op != token.AND { + loop, insert := s.innermostLoop(stack) + if loop == nil { return nil, token.NoPos, true } @@ -207,7 +233,7 @@ func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Iden // If the identity is not the loop statement variable, // it will not be reported. - if _, isStat := s.Stats[id.Obj.Pos()]; !isStat { + if _, isDecl := s.LoopVars[id.Obj.Pos()]; !isDecl { return nil, token.NoPos, true } @@ -266,12 +292,15 @@ func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Iden } func (s *Searcher) isVar(loop ast.Node, expr ast.Expr) bool { - vars := s.Vars[loop.Pos()] // map[token.Pos]struct{} + vars := s.LocalVars[loop.Pos()] // map[token.Pos]struct{} if vars == nil { return false } switch typed := expr.(type) { case (*ast.Ident): + if typed.Obj == nil { + return false // global var in another file (ref: #13) + } _, isVar := vars[typed.Obj.Pos()] return isVar case (*ast.IndexExpr): // like X[Y], check X @@ -287,7 +316,7 @@ func (s *Searcher) getIdentity(expr ast.Expr) *ast.Ident { switch typed := expr.(type) { case *ast.SelectorExpr: // Ignore if the parent is pointer ref (fix for #2) - if _, ok := s.Types[typed.X].Type.(*types.Pointer); ok { + if _, ok := s.Pass.TypesInfo.Types[typed.X].Type.(*types.Pointer); ok { return nil } diff --git a/vendor/github.com/ldez/tagliatelle/.golangci.yml b/vendor/github.com/ldez/tagliatelle/.golangci.yml index 53313e30..ec5c5c76 100644 --- a/vendor/github.com/ldez/tagliatelle/.golangci.yml +++ b/vendor/github.com/ldez/tagliatelle/.golangci.yml @@ -24,7 +24,7 @@ linters-settings: gofumpt: extra-rules: true depguard: - list-type: blacklist + list-type: denylist include-go-root: false packages: - github.com/sirupsen/logrus @@ -46,12 +46,19 @@ linters-settings: linters: enable-all: true disable: + - deadcode # deprecated + - exhaustivestruct # deprecated - golint # deprecated - - maligned # deprecated + - ifshort # deprecated - interfacer # deprecated + - maligned # deprecated + - nosnakecase # deprecated - scopelint # deprecated + - structcheck # deprecated + - varcheck # deprecated - sqlclosecheck # not relevant (SQL) - rowserrcheck # not relevant (SQL) + - execinquery # not relevant (SQL) - cyclop # duplicate of gocyclo - lll - dupl @@ -61,7 +68,7 @@ linters: - goerr113 - wrapcheck - exhaustive - - exhaustivestruct + - exhaustruct - testpackage - tparallel - paralleltest @@ -71,9 +78,11 @@ linters: - varnamelen - nilnil - errchkjson + - nonamedreturns issues: exclude-use-default: false max-per-linter: 0 max-same-issues: 0 - exclude: [] + exclude: + - 'package-comments: should have a package comment' diff --git a/vendor/github.com/ldez/tagliatelle/readme.md b/vendor/github.com/ldez/tagliatelle/readme.md index 85849eab..7bd728dd 100644 --- a/vendor/github.com/ldez/tagliatelle/readme.md +++ b/vendor/github.com/ldez/tagliatelle/readme.md @@ -15,6 +15,7 @@ Supported string casing: - `goPascal` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). - `goKebab` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). - `goSnake` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). +- `header` - `upper` - `lower` @@ -70,6 +71,18 @@ Supported string casing: | NameJSON | name-json | name-JSON | | UneTête | une-tête | une-tête | +| Source | Header Case | +|----------------|------------------| +| GooID | Goo-Id | +| HTTPStatusCode | Http-Status-Code | +| FooBAR | Foo-Bar | +| URL | Url | +| ID | Id | +| hostIP | Host-Ip | +| JSON | Json | +| JSONName | Json-Name | +| NameJSON | Name-Json | +| UneTête | Une-Tête | ## Examples @@ -82,3 +95,81 @@ type Foo struct { Value string `json:"val,omitempty"`// must be "value" } ``` + +## What this tool is about + +This tool is about validating tags according to rules you define. +The tool also allows to fix tags according to the rules you defined. + +This tool is not intended to validate the fact a tag in valid or not. +To do that, you can use `go vet`, or use [golangci-lint](https://golangci-lint.run) ["go vet"](https://golangci-lint.run/usage/linters/#govet) linter. + +## How to use the tool + +### As a golangci-lint linter + +Define the rules, you want via your [golangci-lint](https://golangci-lint.run) configuration file: + +```yaml +linters-settings: + tagliatelle: + # Check the struck tag name case. + case: + # Use the struct field name to check the name of the struct tag. + # Default: false + use-field-name: true + rules: + # Any struct tag type can be used. + # Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` + json: camel + yaml: camel + xml: camel +``` + +More information here https://golangci-lint.run/usage/linters/#tagliatelle + +### Install and run it from the binary + +Not recommended. + +```shell +go install github.com/ldez/tagliatelle/cmd/tagliatelle@latest +``` + +then launch it manually. + +## Rules + +Here are the default rules for the well known and used tags, when using tagliatelle as a binary or [golangci-lint linter](https://golangci-lint.run/usage/linters/#tagliatelle): + +- `json`: `camel` +- `yaml`: `camel` +- `xml`: `camel` +- `bson`: `camel` +- `avro`: `snake` +- `header`: `header` + +### Custom Rules + +The tool is not limited to the tags used in example, you can use it to validate any tag. + +You can add your own tag, for example `whatever` and tells the tool you want to use `kebab`. + +This option is only available via [golangci-lint](https://golangci-lint.run). + +```yaml +linters-settings: + tagliatelle: + # Check the struck tag name case. + case: + # Use the struct field name to check the name of the struct tag. + # Default: false + use-field-name: true + rules: + # Any struct tag type can be used. + # Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` + json: camel + yaml: camel + xml: camel + whatever: kebab +``` diff --git a/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/vendor/github.com/ldez/tagliatelle/tagliatelle.go index 53e77d1c..c4653763 100644 --- a/vendor/github.com/ldez/tagliatelle/tagliatelle.go +++ b/vendor/github.com/ldez/tagliatelle/tagliatelle.go @@ -200,6 +200,8 @@ func getConverter(c string) (func(s string) string, error) { return strcase.ToGoKebab, nil case "goSnake": return strcase.ToGoSnake, nil + case "header": + return toHeader, nil case "upper": return strings.ToUpper, nil case "lower": @@ -208,3 +210,7 @@ func getConverter(c string) (func(s string) string, error) { return nil, fmt.Errorf("unsupported case: %s", c) } } + +func toHeader(s string) string { + return strcase.ToCase(s, strcase.TitleCase, '-') +} diff --git a/vendor/github.com/leonklingele/grouper/LICENSE b/vendor/github.com/leonklingele/grouper/LICENSE index 15bc112b..f288702d 100644 --- a/vendor/github.com/leonklingele/grouper/LICENSE +++ b/vendor/github.com/leonklingele/grouper/LICENSE @@ -1,22 +1,23 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. + Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. + The GNU General Public License is a free, copyleft license for +software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to +the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free -software for all its users. +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you @@ -25,34 +26,44 @@ them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. @@ -61,7 +72,7 @@ modification follow. 0. Definitions. - "This License" refers to version 3 of the GNU Affero General Public License. + "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. @@ -538,45 +549,35 @@ to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. + 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single +under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General +Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published +GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's +versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. @@ -634,29 +635,40 @@ the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. + GNU General Public License for more details. - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . + You should have received a copy of the GNU General Public License + along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go index 39bbcf00..d569c0c9 100644 --- a/vendor/github.com/mattn/go-isatty/isatty_bsd.go +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -1,5 +1,5 @@ -//go:build (darwin || freebsd || openbsd || netbsd || dragonfly) && !appengine -// +build darwin freebsd openbsd netbsd dragonfly +//go:build (darwin || freebsd || openbsd || netbsd || dragonfly || hurd) && !appengine +// +build darwin freebsd openbsd netbsd dragonfly hurd // +build !appengine package isatty diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go index 5c63f35b..d6b4f410 100644 --- a/vendor/github.com/mgechev/revive/config/config.go +++ b/vendor/github.com/mgechev/revive/config/config.go @@ -86,6 +86,7 @@ var allRules = append([]lint.Rule{ &rule.OptimizeOperandsOrderRule{}, &rule.UseAnyRule{}, &rule.DataRaceRule{}, + &rule.CommentSpacingsRule{}, }, defaultRules...) var allFormatters = []lint.Formatter{ diff --git a/vendor/github.com/mgechev/revive/formatter/sarif.go b/vendor/github.com/mgechev/revive/formatter/sarif.go index ee62adcc..c6288db7 100644 --- a/vendor/github.com/mgechev/revive/formatter/sarif.go +++ b/vendor/github.com/mgechev/revive/formatter/sarif.go @@ -81,8 +81,8 @@ func (l *reviveRunLog) AddResult(failure lint.Failure) { } position := failure.Position filename := position.Start.Filename - line := positiveOrZero(position.Start.Line - 1) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_line - column := positiveOrZero(position.Start.Column - 1) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_column + line := positiveOrZero(position.Start.Line) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_line + column := positiveOrZero(position.Start.Column) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_column result := garif.NewResult(garif.NewMessageFromText(failure.Failure)) location := garif.NewLocation().WithURI(filename).WithLineColumn(line, column) diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go index 414be38c..36a7003d 100644 --- a/vendor/github.com/mgechev/revive/rule/add-constant.go +++ b/vendor/github.com/mgechev/revive/rule/add-constant.go @@ -3,6 +3,7 @@ package rule import ( "fmt" "go/ast" + "regexp" "strconv" "strings" "sync" @@ -32,8 +33,9 @@ func (wl whiteList) add(kind, list string) { // AddConstantRule lints unused params in functions. type AddConstantRule struct { - whiteList whiteList - strLitLimit int + whiteList whiteList + ignoreFunctions []*regexp.Regexp + strLitLimit int sync.Mutex } @@ -47,7 +49,13 @@ func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lin failures = append(failures, failure) } - w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int), strLitLimit: r.strLitLimit, whiteLst: r.whiteList} + w := lintAddConstantRule{ + onFailure: onFailure, + strLits: make(map[string]int), + strLitLimit: r.strLitLimit, + whiteLst: r.whiteList, + ignoreFunctions: r.ignoreFunctions, + } ast.Walk(w, file.AST) @@ -60,28 +68,76 @@ func (*AddConstantRule) Name() string { } type lintAddConstantRule struct { - onFailure func(lint.Failure) - strLits map[string]int - strLitLimit int - whiteLst whiteList + onFailure func(lint.Failure) + strLits map[string]int + strLitLimit int + whiteLst whiteList + ignoreFunctions []*regexp.Regexp } func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { + case *ast.CallExpr: + w.checkFunc(n) + return nil case *ast.GenDecl: return nil // skip declarations case *ast.BasicLit: - switch kind := n.Kind.String(); kind { - case kindFLOAT, kindINT: - w.checkNumLit(kind, n) - case kindSTRING: - w.checkStrLit(n) - } + w.checkLit(n) } return w } +func (w lintAddConstantRule) checkFunc(expr *ast.CallExpr) { + fName := w.getFuncName(expr) + + for _, arg := range expr.Args { + switch t := arg.(type) { + case *ast.CallExpr: + w.checkFunc(t) + case *ast.BasicLit: + if w.isIgnoredFunc(fName) { + continue + } + w.checkLit(t) + } + } +} + +func (w lintAddConstantRule) getFuncName(expr *ast.CallExpr) string { + switch f := expr.Fun.(type) { + case *ast.SelectorExpr: + switch prefix := f.X.(type) { + case *ast.Ident: + return prefix.Name + "." + f.Sel.Name + } + case *ast.Ident: + return f.Name + } + + return "" +} + +func (w lintAddConstantRule) checkLit(n *ast.BasicLit) { + switch kind := n.Kind.String(); kind { + case kindFLOAT, kindINT: + w.checkNumLit(kind, n) + case kindSTRING: + w.checkStrLit(n) + } +} + +func (w lintAddConstantRule) isIgnoredFunc(fName string) bool { + for _, pattern := range w.ignoreFunctions { + if pattern.MatchString(fName) { + return true + } + } + + return false +} + func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) { if w.whiteLst[kindSTRING][n.Value] { return @@ -158,6 +214,25 @@ func (r *AddConstantRule) configure(arguments lint.Arguments) { panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) } r.strLitLimit = limit + case "ignoreFuncs": + excludes, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule, string expected. Got '%v' (%T)", v, v)) + } + + for _, exclude := range strings.Split(excludes, ",") { + exclude = strings.Trim(exclude, " ") + if exclude == "" { + panic("Invalid argument to the ignoreFuncs parameter of add-constant rule, expected regular expression must not be empty.") + } + + exp, err := regexp.Compile(exclude) + if err != nil { + panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule: regexp %q does not compile: %v", exclude, err)) + } + + r.ignoreFunctions = append(r.ignoreFunctions, exp) + } } } } diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment-spacings.go new file mode 100644 index 00000000..abe2ad76 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/comment-spacings.go @@ -0,0 +1,82 @@ +package rule + +import ( + "fmt" + "strings" + "sync" + + "github.com/mgechev/revive/lint" +) + +// CommentSpacings Rule check the whether there is a space between +// the comment symbol( // ) and the start of the comment text +type CommentSpacingsRule struct { + allowList []string + sync.Mutex +} + +func (r *CommentSpacingsRule) configure(arguments lint.Arguments) { + r.Lock() + defer r.Unlock() + + if r.allowList == nil { + r.allowList = []string{ + "//go:", + "//revive:", + } + + for _, arg := range arguments { + allow, ok := arg.(string) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg)) + } + r.allowList = append(r.allowList, `//`+allow+`:`) + } + } +} + +func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { + r.configure(args) + + var failures []lint.Failure + + for _, cg := range file.AST.Comments { + for _, comment := range cg.List { + commentLine := comment.Text + if len(commentLine) < 3 { + continue // nothing to do + } + + isOK := commentLine[2] == ' ' + if isOK { + continue + } + + if r.isAllowed(commentLine) { + continue + } + + failures = append(failures, lint.Failure{ + Node: comment, + Confidence: 1, + Category: "style", + Failure: "no space between comment delimiter and comment text", + }) + } + } + return failures +} + +func (*CommentSpacingsRule) Name() string { + return "comment-spacings" +} + +func (r *CommentSpacingsRule) isAllowed(line string) bool { + for _, allow := range r.allowList { + if strings.HasPrefix(line, allow) { + return true + } + } + + return false +} diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go index bfbf6717..ed0fcfae 100644 --- a/vendor/github.com/mgechev/revive/rule/early-return.go +++ b/vendor/github.com/mgechev/revive/rule/early-return.go @@ -1,12 +1,15 @@ package rule import ( + "fmt" "go/ast" + "go/token" "github.com/mgechev/revive/lint" ) -// EarlyReturnRule lints given else constructs. +// EarlyReturnRule finds opportunities to reduce nesting by inverting +// the condition of an "if" block. type EarlyReturnRule struct{} // Apply applies the rule to given file. @@ -32,47 +35,142 @@ type lintEarlyReturnRule struct { } func (w lintEarlyReturnRule) Visit(node ast.Node) ast.Visitor { - switch n := node.(type) { + ifStmt, ok := node.(*ast.IfStmt) + if !ok { + return w + } + + w.visitIf(ifStmt, false, false) + return nil +} + +func (w lintEarlyReturnRule) visitIf(ifStmt *ast.IfStmt, hasNonReturnBranch, hasIfInitializer bool) { + // look for other if-else chains nested inside this if { } block + ast.Walk(w, ifStmt.Body) + + if ifStmt.Else == nil { + // no else branch + return + } + + if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { + hasIfInitializer = true + } + bodyFlow := w.branchFlow(ifStmt.Body) + + switch elseBlock := ifStmt.Else.(type) { case *ast.IfStmt: - if n.Else == nil { - // no else branch - return w + if bodyFlow.canFlowIntoNext() { + hasNonReturnBranch = true } + w.visitIf(elseBlock, hasNonReturnBranch, hasIfInitializer) + + case *ast.BlockStmt: + // look for other if-else chains nested inside this else { } block + ast.Walk(w, elseBlock) - elseBlock, ok := n.Else.(*ast.BlockStmt) - if !ok { - // is if-else-if - return w + if hasNonReturnBranch && bodyFlow != branchFlowEmpty { + // if we de-indent this block then a previous branch + // might flow into it, affecting program behaviour + return } - lenElseBlock := len(elseBlock.List) - if lenElseBlock < 1 { - // empty else block, continue (there is another rule that warns on empty blocks) - return w + if !bodyFlow.canFlowIntoNext() { + // avoid overlapping with superfluous-else + return } - lenThenBlock := len(n.Body.List) - if lenThenBlock < 1 { - // then block is empty thus the stmt can be simplified - w.onFailure(lint.Failure{ - Confidence: 1, - Node: n, - Failure: "if c { } else {... return} can be simplified to if !c { ... return }", - }) + elseFlow := w.branchFlow(elseBlock) + if !elseFlow.canFlowIntoNext() { + failMsg := fmt.Sprintf("if c {%[1]s } else {%[2]s } can be simplified to if !c {%[2]s }%[1]s", + bodyFlow, elseFlow) - return w - } + if hasIfInitializer { + // if statement has a := initializer, so we might need to move the assignment + // onto its own line in case the body references it + failMsg += " (move short variable declaration to its own line if necessary)" + } - _, lastThenStmtIsReturn := n.Body.List[lenThenBlock-1].(*ast.ReturnStmt) - _, lastElseStmtIsReturn := elseBlock.List[lenElseBlock-1].(*ast.ReturnStmt) - if lastElseStmtIsReturn && !lastThenStmtIsReturn { w.onFailure(lint.Failure{ Confidence: 1, - Node: n, - Failure: "if c {...} else {... return } can be simplified to if !c { ... return } ...", + Node: ifStmt, + Failure: failMsg, }) } + + default: + panic("invalid node type for else") } +} - return w +type branchFlowKind int + +const ( + branchFlowEmpty branchFlowKind = iota + branchFlowReturn + branchFlowPanic + branchFlowContinue + branchFlowBreak + branchFlowGoto + branchFlowRegular +) + +func (w lintEarlyReturnRule) branchFlow(block *ast.BlockStmt) branchFlowKind { + blockLen := len(block.List) + if blockLen == 0 { + return branchFlowEmpty + } + + switch stmt := block.List[blockLen-1].(type) { + case *ast.ReturnStmt: + return branchFlowReturn + case *ast.BlockStmt: + return w.branchFlow(stmt) + case *ast.BranchStmt: + switch stmt.Tok { + case token.BREAK: + return branchFlowBreak + case token.CONTINUE: + return branchFlowContinue + case token.GOTO: + return branchFlowGoto + } + case *ast.ExprStmt: + if call, ok := stmt.X.(*ast.CallExpr); ok && isIdent(call.Fun, "panic") { + return branchFlowPanic + } + } + + return branchFlowRegular +} + +// Whether this branch's control can flow into the next statement following the if-else chain +func (k branchFlowKind) canFlowIntoNext() bool { + switch k { + case branchFlowReturn, branchFlowPanic, branchFlowContinue, branchFlowBreak, branchFlowGoto: + return false + default: + return true + } +} + +func (k branchFlowKind) String() string { + switch k { + case branchFlowEmpty: + return "" + case branchFlowReturn: + return " ... return" + case branchFlowPanic: + return " ... panic()" + case branchFlowContinue: + return " ... continue" + case branchFlowBreak: + return " ... break" + case branchFlowGoto: + return " ... goto" + case branchFlowRegular: + return " ..." + default: + panic("invalid kind") + } } diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical-branches.go index b1a69097..9222c8a9 100644 --- a/vendor/github.com/mgechev/revive/rule/identical-branches.go +++ b/vendor/github.com/mgechev/revive/rule/identical-branches.go @@ -63,8 +63,10 @@ func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool { } ref := gofmt(branches[0]) + refSize := len(branches[0].List) for i := 1; i < len(branches); i++ { - if gofmt(branches[i]) != ref { + currentSize := len(branches[i].List) + if currentSize != refSize || gofmt(branches[i]) != ref { return false } } diff --git a/vendor/github.com/mgechev/revive/rule/nested-structs.go b/vendor/github.com/mgechev/revive/rule/nested-structs.go index 968511f2..fd122699 100644 --- a/vendor/github.com/mgechev/revive/rule/nested-structs.go +++ b/vendor/github.com/mgechev/revive/rule/nested-structs.go @@ -37,6 +37,11 @@ type lintNestedStructs struct { func (l *lintNestedStructs) Visit(n ast.Node) ast.Visitor { switch v := n.(type) { + case *ast.TypeSpec: + _, isInterface := v.Type.(*ast.InterfaceType) + if isInterface { + return nil // do not analyze interface declarations + } case *ast.FuncDecl: if v.Body != nil { ast.Walk(l, v.Body) diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go index e7841e8c..0e30ebf8 100644 --- a/vendor/github.com/mgechev/revive/rule/string-format.go +++ b/vendor/github.com/mgechev/revive/rule/string-format.go @@ -68,6 +68,7 @@ type stringFormatSubrule struct { parent *lintStringFormatRule scope stringFormatSubruleScope regexp *regexp.Regexp + negated bool errorMessage string } @@ -89,17 +90,18 @@ var parseStringFormatScope = regexp.MustCompile( func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) { for i, argument := range arguments { - scope, regex, errorMessage := w.parseArgument(argument, i) + scope, regex, negated, errorMessage := w.parseArgument(argument, i) w.rules = append(w.rules, stringFormatSubrule{ parent: w, scope: scope, regexp: regex, + negated: negated, errorMessage: errorMessage, }) } } -func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, errorMessage string) { +func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) { g, ok := argument.([]interface{}) // Cast to generic slice first if !ok { w.configError("argument is not a slice", ruleNum, 0) @@ -146,7 +148,12 @@ func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) ( } // Strip / characters from the beginning and end of rule[1] before compiling - regex, err := regexp.Compile(rule[1][1 : len(rule[1])-1]) + negated = rule[1][0] == '!' + offset := 1 + if negated { + offset++ + } + regex, err := regexp.Compile(rule[1][offset : len(rule[1])-1]) if err != nil { w.parseError(fmt.Sprintf("unable to compile %s as regexp", rule[1]), ruleNum, 1) } @@ -155,7 +162,7 @@ func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) ( if len(rule) == 3 { errorMessage = rule[2] } - return scope, regex, errorMessage + return scope, regex, negated, errorMessage } // Report an invalid config, this is specifically the user's fault @@ -261,7 +268,26 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) { } func (r *stringFormatSubrule) lintMessage(s string, node ast.Node) { - // Fail if the string doesn't match the user's regex + if r.negated { + if !r.regexp.MatchString(s) { + return + } + // Fail if the string does match the user's regex + var failure string + if len(r.errorMessage) > 0 { + failure = r.errorMessage + } else { + failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String()) + } + r.parent.onFailure(lint.Failure{ + Confidence: 1, + Failure: failure, + Node: node, + }) + return + } + + // Fail if the string does NOT match the user's regex if r.regexp.MatchString(s) { return } diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go index 6cde24b7..32a5fe48 100644 --- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go +++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go @@ -4,6 +4,8 @@ import ( "fmt" "go/ast" "go/types" + "regexp" + "strings" "sync" "github.com/mgechev/revive/lint" @@ -11,24 +13,30 @@ import ( // UnhandledErrorRule lints given else constructs. type UnhandledErrorRule struct { - ignoreList ignoreListType + ignoreList []*regexp.Regexp sync.Mutex } -type ignoreListType map[string]struct{} - func (r *UnhandledErrorRule) configure(arguments lint.Arguments) { r.Lock() if r.ignoreList == nil { - r.ignoreList = make(ignoreListType, len(arguments)) - for _, arg := range arguments { argStr, ok := arg.(string) if !ok { panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) } - r.ignoreList[argStr] = struct{}{} + argStr = strings.Trim(argStr, " ") + if argStr == "" { + panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.") + } + + exp, err := regexp.Compile(argStr) + if err != nil { + panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err)) + } + + r.ignoreList = append(r.ignoreList, exp) } } r.Unlock() @@ -60,7 +68,7 @@ func (*UnhandledErrorRule) Name() string { } type lintUnhandledErrors struct { - ignoreList ignoreListType + ignoreList []*regexp.Regexp pkg *lint.Package onFailure func(lint.Failure) } @@ -102,8 +110,8 @@ func (w *lintUnhandledErrors) Visit(node ast.Node) ast.Visitor { } func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) { - funcName := gofmt(n.Fun) - if _, mustIgnore := w.ignoreList[funcName]; mustIgnore { + name := w.funcName(n) + if w.isIgnoredFunc(name) { return } @@ -111,10 +119,34 @@ func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) { Category: "bad practice", Confidence: 1, Node: n, - Failure: fmt.Sprintf("Unhandled error in call to function %v", funcName), + Failure: fmt.Sprintf("Unhandled error in call to function %v", gofmt(n.Fun)), }) } +func (w *lintUnhandledErrors) funcName(call *ast.CallExpr) string { + fn, ok := w.getFunc(call) + if !ok { + return gofmt(call.Fun) + } + + name := fn.FullName() + name = strings.Replace(name, "(", "", -1) + name = strings.Replace(name, ")", "", -1) + name = strings.Replace(name, "*", "", -1) + + return name +} + +func (w *lintUnhandledErrors) isIgnoredFunc(funcName string) bool { + for _, pattern := range w.ignoreList { + if len(pattern.FindString(funcName)) == len(funcName) { + return true + } + } + + return false +} + func (*lintUnhandledErrors) isTypeError(t *types.Named) bool { const errorTypeName = "_.error" @@ -130,3 +162,17 @@ func (w *lintUnhandledErrors) returnsAnError(tt *types.Tuple) bool { } return false } + +func (w *lintUnhandledErrors) getFunc(call *ast.CallExpr) (*types.Func, bool) { + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return nil, false + } + + fn, ok := w.pkg.TypesInfo().ObjectOf(sel.Sel).(*types.Func) + if !ok { + return nil, false + } + + return fn, true +} diff --git a/vendor/github.com/nishanths/exhaustive/.gitignore b/vendor/github.com/nishanths/exhaustive/.gitignore index 10acec6e..94c13223 100644 --- a/vendor/github.com/nishanths/exhaustive/.gitignore +++ b/vendor/github.com/nishanths/exhaustive/.gitignore @@ -3,8 +3,10 @@ tags # binary -cmd/exhaustive/exhaustive -exhaustive +/cmd/exhaustive/exhaustive +/exhaustive # testing artifacts -coverage.out +/coverage.out + +/CHANGELOG.md diff --git a/vendor/github.com/nishanths/exhaustive/Makefile b/vendor/github.com/nishanths/exhaustive/Makefile index 981a7ebe..868f7fce 100644 --- a/vendor/github.com/nishanths/exhaustive/Makefile +++ b/vendor/github.com/nishanths/exhaustive/Makefile @@ -1,19 +1,33 @@ .PHONY: default default: build +.PHONY: all +all: build vet test + .PHONY: build build: go build ./... + go build ./cmd/exhaustive .PHONY: test test: - go test -cover ./... + go test -count=1 ./... + +.PHONY: testshort +testshort: + go test -short -count=1 ./... + +.PHONY: cover +cover: + go test -cover -coverprofile=coverage.out ./... + go tool cover -html=coverage.out .PHONY: install-vet install-vet: go install github.com/nishanths/exhaustive/cmd/exhaustive@latest go install github.com/gordonklaus/ineffassign@latest go install github.com/kisielk/errcheck@latest + go install honnef.co/go/tools/cmd/staticcheck@latest .PHONY: vet vet: @@ -21,6 +35,7 @@ vet: exhaustive ./... ineffassign ./... errcheck ./... + staticcheck -checks="inherit,-S1034" ./... .PHONY: upgrade-deps upgrade-deps: diff --git a/vendor/github.com/nishanths/exhaustive/README.md b/vendor/github.com/nishanths/exhaustive/README.md index a65d9de2..0f16d81e 100644 --- a/vendor/github.com/nishanths/exhaustive/README.md +++ b/vendor/github.com/nishanths/exhaustive/README.md @@ -1,32 +1,41 @@ -## exhaustive [![Godoc][2]][1] +# exhaustive [![Godoc][godoc-svg]][godoc] -Check exhaustiveness of enum switch statements and map literals in Go source code. +Package exhaustive defines an analyzer that checks exhaustiveness of switch +statements of enum-like constants in Go source code. + +For supported flags, the definition of enum, and the definition of +exhaustiveness used by this package, see [pkg.go.dev][godoc-doc]. For a +changelog, see [CHANGELOG][changelog] in the GitHub wiki. + +The analyzer can be configured to additionally check exhaustiveness of map +literals whose key type is enum-like. + +## Usage + +Command line program: ``` go install github.com/nishanths/exhaustive/cmd/exhaustive@latest -``` - -For docs on the flags, the definition of enum, and the definition of -exhaustiveness, see [godocs.io][4]. -For the changelog, see [CHANGELOG][changelog] in the wiki. +exhaustive [flags] [packages] +``` -The package provides an `Analyzer` that follows the guidelines in the -[`go/analysis`][3] package; this should make it possible to integrate -exhaustive with your own analysis driver program. +Package: -## Bugs +``` +go get github.com/nishanths/exhaustive +``` -`exhaustive` does not report missing cases if the switch statement -switches on a type parameterized type. See [this -issue](https://github.com/nishanths/exhaustive/issues/31) for details. +The `exhaustive.Analyzer` variable follows the guidelines of the +[`golang.org/x/tools/go/analysis`][xanalysis] package. This should make it +possible to integrate `exhaustive` in your own analysis driver program. ## Example -Given the enum +Given an enum: ```go -package token +package token // import "example.org/token" type Token int @@ -39,12 +48,12 @@ const ( ) ``` -and the code +And code that switches on the enum: ```go -package calc +package calc // import "example.org/calc" -import "token" +import "example.org/token" func f(t token.Token) { switch t { @@ -55,29 +64,38 @@ func f(t token.Token) { } } -func g(t token.Token) string { - return map[token.Token]string{ - token.Add: "add", - token.Subtract: "subtract", - token.Multiply: "multiply", - }[t] +var m = map[token.Token]string{ + token.Add: "add", + token.Subtract: "subtract", + token.Multiply: "multiply", } ``` -running exhaustive will print +Running `exhaustive` with default options will report: + +``` +% exhaustive example.org/calc +calc.go:6:2: missing cases in switch of type token.Token: token.Quotient, token.Remainder +``` + +Specify the flag `-check=switch,map` to additionally check exhaustiveness of +map literal keys: ``` -calc.go:6:2: missing cases in switch of type token.Token: Quotient, Remainder -calc.go:15:9: missing map keys of type token.Token: Quotient, Remainder +% exhaustive -check=switch,map example.org/calc +calc.go:6:2: missing cases in switch of type token.Token: token.Quotient, token.Remainder +calc.go:14:9: missing keys in map of key type token.Token: token.Quotient, token.Remainder ``` ## Contributing -Issues and pull requests are welcome. Before making a substantial -change, please discuss it in an issue. +Issues and changes are welcome. Please discuss substantial changes +in an issue first. -[1]: https://godocs.io/github.com/nishanths/exhaustive -[2]: https://godocs.io/github.com/nishanths/exhaustive?status.svg -[3]: https://pkg.go.dev/golang.org/x/tools/go/analysis -[4]: https://godocs.io/github.com/nishanths/exhaustive +[godoc]: https://pkg.go.dev/github.com/nishanths/exhaustive +[godoc-svg]: https://pkg.go.dev/badge/github.com/nishanths/exhaustive.svg +[godoc-doc]: https://pkg.go.dev/github.com/nishanths/exhaustive#section-documentation +[godoc-flags]: https://pkg.go.dev/github.com/nishanths/exhaustive#hdr-Flags +[xanalysis]: https://pkg.go.dev/golang.org/x/tools/go/analysis [changelog]: https://github.com/nishanths/exhaustive/wiki/CHANGELOG +[issue-typeparam]: https://github.com/nishanths/exhaustive/issues/31 diff --git a/vendor/github.com/nishanths/exhaustive/comment.go b/vendor/github.com/nishanths/exhaustive/comment.go index 1232df11..69b6e543 100644 --- a/vendor/github.com/nishanths/exhaustive/comment.go +++ b/vendor/github.com/nishanths/exhaustive/comment.go @@ -7,36 +7,30 @@ import ( "strings" ) -// Generated file definition +// For definition of generated file see: // http://golang.org/s/generatedcode -// -// To convey to humans and machine tools that code is generated, generated -// source should have a line that matches the following regular expression (in -// Go syntax): -// -// ^// Code generated .* DO NOT EDIT\.$ -// -// This line must appear before the first non-comment, non-blank -// text in the file. + +var generatedCodeRe = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) func isGeneratedFile(file *ast.File) bool { // NOTE: file.Comments includes file.Doc as well, so no need // to separately check file.Doc. - for _, c := range file.Comments { for _, cc := range c.List { - // This check is intended to handle "must appear before the - // first non-comment, non-blank text in the file". - // TODO: Is this check fully correct? Seems correct based - // on https://golang.org/ref/spec#Source_file_organization. + // This check handles the "must appear before the first + // non-comment, non-blank text in the file" requirement. + // + // According to https://golang.org/ref/spec#Source_file_organization + // the package clause is the first element in a file, which + // should make it the first non-comment, non-blank text. if c.Pos() >= file.Package { return false } // According to the docs: - // '\r' has been removed. - // '\n' has been removed for //-style comments, which is what we care about. - // Also manually verified. - if isGeneratedFileComment(cc.Text) { + // '\r' has been removed. + // '\n' has been removed for //-style comments + // This has also been manually verified. + if generatedCodeRe.MatchString(cc.Text) { return true } } @@ -45,38 +39,15 @@ func isGeneratedFile(file *ast.File) bool { return false } -var generatedCodeRe = regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`) - -func isGeneratedFileComment(s string) bool { - return generatedCodeRe.MatchString(s) -} - -type generatedCache map[*ast.File]bool - -func (c generatedCache) IsGenerated(file *ast.File) bool { - if _, ok := c[file]; !ok { - c[file] = isGeneratedFile(file) - } - return c[file] -} - -// ignoreDirective is used to exclude checking of specific switch statements. -const ignoreDirective = "//exhaustive:ignore" -const enforceDirective = "//exhaustive:enforce" - -type commentsCache map[*ast.File]ast.CommentMap - -func (c commentsCache) GetComments(file *ast.File, set *token.FileSet) ast.CommentMap { - if _, ok := c[file]; !ok { - c[file] = ast.NewCommentMap(set, file, file.Comments) - } - return c[file] -} +const ( + ignoreComment = "//exhaustive:ignore" + enforceComment = "//exhaustive:enforce" +) -func containsDirective(comments []*ast.CommentGroup, directive string) bool { +func hasComment(comments []*ast.CommentGroup, comment string) bool { for _, c := range comments { for _, cc := range c.List { - if strings.HasPrefix(cc.Text, directive) { + if strings.HasPrefix(cc.Text, comment) { return true } } @@ -84,10 +55,6 @@ func containsDirective(comments []*ast.CommentGroup, directive string) bool { return false } -func containsEnforceDirective(comments []*ast.CommentGroup) bool { - return containsDirective(comments, enforceDirective) -} - -func containsIgnoreDirective(comments []*ast.CommentGroup) bool { - return containsDirective(comments, ignoreDirective) +func fileCommentMap(fset *token.FileSet, file *ast.File) ast.CommentMap { + return ast.NewCommentMap(fset, file, file.Comments) } diff --git a/vendor/github.com/nishanths/exhaustive/common.go b/vendor/github.com/nishanths/exhaustive/common.go new file mode 100644 index 00000000..20fcf040 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/common.go @@ -0,0 +1,368 @@ +package exhaustive + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "regexp" + "sort" + "strings" + + "golang.org/x/tools/go/ast/astutil" +) + +func denotesPackage(ident *ast.Ident, info *types.Info) bool { + obj := info.ObjectOf(ident) + if obj == nil { + return false + } + _, ok := obj.(*types.PkgName) + return ok +} + +// exprConstVal returns the constantValue for an expression if the +// expression is a constant value and if the expression is considered +// valid to satisfy exhaustiveness as defined by this program. +// Otherwise it returns (_, false). +func exprConstVal(e ast.Expr, info *types.Info) (constantValue, bool) { + handleIdent := func(ident *ast.Ident) (constantValue, bool) { + obj := info.Uses[ident] + if obj == nil { + return "", false + } + if _, ok := obj.(*types.Const); !ok { + return "", false + } + // There are two scenarios. + // See related test cases in typealias/quux/quux.go. + // + // Scenario 1 + // + // Tag package and constant package are the same. This is + // simple; we just use fs.ModeDir's value. + // + // Example: + // + // var mode fs.FileMode + // switch mode { + // case fs.ModeDir: + // } + // + // Scenario 2 + // + // Tag package and constant package are different. In this + // scenario, too, we accept the case clause expr constant value, + // as is. If the Go type checker is okay with the name being + // listed in the case clause, we don't care much further. + // + // Example: + // + // var mode fs.FileMode + // switch mode { + // case os.ModeDir: + // } + // + // Or equivalently: + // + // // The type of mode is effectively fs.FileMode, + // // due to type alias. + // var mode os.FileMode + // switch mode { + // case os.ModeDir: + // } + return determineConstVal(ident, info), true + } + + e = stripTypeConversions(astutil.Unparen(e), info) + + switch e := e.(type) { + case *ast.Ident: + return handleIdent(e) + + case *ast.SelectorExpr: + x := astutil.Unparen(e.X) + // Ensure we only see the form pkg.Const, and not e.g. + // structVal.f or structVal.inner.f. + // + // For this purpose, first we check that X, which is everything + // except the rightmost field selector *ast.Ident (the Sel + // field), is also an *ast.Ident. + xIdent, ok := x.(*ast.Ident) + if !ok { + return "", false + } + // Second, check that it's a package. It doesn't matter which + // package, just that it denotes some package. + if !denotesPackage(xIdent, info) { + return "", false + } + return handleIdent(e.Sel) + + default: + // e.g. literal + // these aren't considered towards satisfying exhaustiveness. + return "", false + } +} + +// stripTypeConversions removing type conversions from the experession. +func stripTypeConversions(e ast.Expr, info *types.Info) ast.Expr { + c, ok := e.(*ast.CallExpr) + if !ok { + return e + } + typ := info.TypeOf(c.Fun) + if typ == nil { + // can happen for built-ins. + return e + } + // do not allow function calls. + if _, ok := typ.Underlying().(*types.Signature); ok { + return e + } + // type conversions have exactly one arg. + if len(c.Args) != 1 { + return e + } + return stripTypeConversions(astutil.Unparen(c.Args[0]), info) +} + +// member is a single member of an enum type. +type member struct { + pos token.Pos + typ enumType + name string + val constantValue +} + +// typeAndMembers combines an enumType and its members set. +type typeAndMembers struct { + et enumType + em enumMembers +} + +type checklist struct { + info map[enumType]enumMembers + checkl map[member]struct{} + ignoreConstantRe *regexp.Regexp + ignoreTypeRe *regexp.Regexp +} + +func (c *checklist) ignoreConstant(pattern *regexp.Regexp) { + c.ignoreConstantRe = pattern +} + +func (c *checklist) ignoreType(pattern *regexp.Regexp) { + c.ignoreTypeRe = pattern +} + +func (*checklist) reMatch(re *regexp.Regexp, s string) bool { + if re == nil { + return false + } + return re.MatchString(s) +} + +func (c *checklist) add(et enumType, em enumMembers, includeUnexported bool) { + addOne := func(name string) { + if isBlankIdentifier(name) { + // Blank identifier is often used to skip entries in iota + // lists. Also, it can't be referenced anywhere (e.g. can't + // be referenced in switch statement cases) It doesn't make + // sense to include it as required member to satisfy + // exhaustiveness. + return + } + if !ast.IsExported(name) && !includeUnexported { + return + } + if c.reMatch(c.ignoreConstantRe, fmt.Sprintf("%s.%s", et.Pkg().Path(), name)) { + return + } + if c.reMatch(c.ignoreTypeRe, fmt.Sprintf("%s.%s", et.Pkg().Path(), et.TypeName.Name())) { + return + } + mem := member{ + em.NameToPos[name], + et, + name, + em.NameToValue[name], + } + if c.checkl == nil { + c.checkl = make(map[member]struct{}) + } + c.checkl[mem] = struct{}{} + } + + if c.info == nil { + c.info = make(map[enumType]enumMembers) + } + c.info[et] = em + + for _, name := range em.Names { + addOne(name) + } +} + +func (c *checklist) found(val constantValue) { + // delete all same-valued items. + for et, em := range c.info { + for _, name := range em.ValueToNames[val] { + delete(c.checkl, member{ + em.NameToPos[name], + et, + name, + em.NameToValue[name], + }) + } + } +} + +func (c *checklist) remaining() map[member]struct{} { + return c.checkl +} + +// group is a collection of same-valued members, possibly from +// different enum types. +type group []member + +func groupMissing(missing map[member]struct{}, types []enumType) []group { + // indices maps each element in the input slice to its index. + indices := func(vs []enumType) map[enumType]int { + ret := make(map[enumType]int, len(vs)) + for i, v := range vs { + ret[v] = i + } + return ret + } + + typesOrder := indices(types) // for quick lookup + astBefore := func(x, y member) bool { + if typesOrder[x.typ] < typesOrder[y.typ] { + return true + } + if typesOrder[x.typ] > typesOrder[y.typ] { + return false + } + return x.pos < y.pos + } + + // byConstVal groups member names by constant value. + byConstVal := func(members map[member]struct{}) map[constantValue][]member { + ret := make(map[constantValue][]member) + for m := range members { + ret[m.val] = append(ret[m.val], m) + } + return ret + } + + var groups []group + for _, members := range byConstVal(missing) { + groups = append(groups, group(members)) + } + + // sort members within each group in AST order. + for i := range groups { + g := groups[i] + sort.Slice(g, func(i, j int) bool { return astBefore(g[i], g[j]) }) + groups[i] = g + } + // sort groups themselves in AST order. + // the index [0] access is safe, because there will be at least one + // element per group. + sort.Slice(groups, func(i, j int) bool { return astBefore(groups[i][0], groups[j][0]) }) + + return groups +} + +func diagnosticEnumType(enumType *types.TypeName) string { + return enumType.Pkg().Name() + "." + enumType.Name() +} + +func diagnosticEnumTypes(types []enumType) string { + var buf strings.Builder + for i := range types { + buf.WriteString(diagnosticEnumType(types[i].TypeName)) + if i != len(types)-1 { + buf.WriteByte('|') + } + } + return buf.String() +} + +func diagnosticMember(m member) string { + return m.typ.Pkg().Name() + "." + m.name +} + +func diagnosticGroups(gs []group) string { + out := make([]string, len(gs)) + for i := range gs { + var buf strings.Builder + for j := range gs[i] { + buf.WriteString(diagnosticMember(gs[i][j])) + if j != len(gs[i])-1 { + buf.WriteByte('|') + } + } + out[i] = buf.String() + } + return strings.Join(out, ", ") +} + +func toEnumTypes(es []typeAndMembers) []enumType { + out := make([]enumType, len(es)) + for i := range es { + out[i] = es[i].et + } + return out +} + +func dedupEnumTypes(types []enumType) []enumType { + // TODO(nishanths) this function is a candidate to use generics. + + m := make(map[enumType]struct{}) + var ret []enumType + for _, t := range types { + _, ok := m[t] + if ok { + continue + } + m[t] = struct{}{} + ret = append(ret, t) + } + return ret +} + +// TODO(nishanths) If dropping pre-go1.18 support, the following +// types and functions are candidates to use generics. + +type boolCache struct { + m map[*ast.File]bool + value func(*ast.File) bool +} + +func (c boolCache) get(file *ast.File) bool { + if c.m == nil { + c.m = make(map[*ast.File]bool) + } + if _, ok := c.m[file]; !ok { + c.m[file] = c.value(file) + } + return c.m[file] +} + +type commentCache struct { + m map[*ast.File]ast.CommentMap + value func(*token.FileSet, *ast.File) ast.CommentMap +} + +func (c commentCache) get(fset *token.FileSet, file *ast.File) ast.CommentMap { + if c.m == nil { + c.m = make(map[*ast.File]ast.CommentMap) + } + if _, ok := c.m[file]; !ok { + c.m[file] = c.value(fset, file) + } + return c.m[file] +} diff --git a/vendor/github.com/nishanths/exhaustive/common_go118.go b/vendor/github.com/nishanths/exhaustive/common_go118.go new file mode 100644 index 00000000..aebdd806 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/common_go118.go @@ -0,0 +1,122 @@ +//go:build go1.18 +// +build go1.18 + +package exhaustive + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" +) + +func fromNamed(pass *analysis.Pass, t *types.Named, typeparam bool) (result []typeAndMembers, ok bool) { + if tpkg := t.Obj().Pkg(); tpkg == nil { + // go/types documentation says: nil for labels and + // objects in the Universe scope. This happens for the built-in + // error type for example. + return nil, false // not a valid enum type, so ok == false + } + + et := enumType{t.Obj()} + if em, ok := importFact(pass, et); ok { + return []typeAndMembers{{et, em}}, true + } + + if typeparam { + if intf, ok := t.Underlying().(*types.Interface); ok { + return fromInterface(pass, intf, typeparam) + } + } + + return nil, false // not a valid enum type, so ok == false +} + +func fromInterface(pass *analysis.Pass, intf *types.Interface, typeparam bool) (result []typeAndMembers, all bool) { + all = true + + for i := 0; i < intf.NumEmbeddeds(); i++ { + embed := intf.EmbeddedType(i) + + switch embed.(type) { + case *types.Union: + u := embed.(*types.Union) + // gather from each term in the union. + for i := 0; i < u.Len(); i++ { + r, a := fromType(pass, u.Term(i).Type(), typeparam) + result = append(result, r...) + all = all && a + } + + case *types.Named: + r, a := fromNamed(pass, embed.(*types.Named), typeparam) + result = append(result, r...) + all = all && a + + default: + // don't care about these. + // e.g. basic type + } + } + + return +} + +func fromType(pass *analysis.Pass, t types.Type, typeparam bool) (result []typeAndMembers, ok bool) { + switch t := t.(type) { + case *types.Named: + return fromNamed(pass, t, typeparam) + + case *types.TypeParam: + // does not appear to be explicitly documented, but based on + // spec (see section Type constraints) and source code, we can + // expect constraints to have underlying type *types.Interface. + intf := t.Constraint().Underlying().(*types.Interface) + return fromInterface(pass, intf, typeparam) + + case *types.Interface: + // anonymous interface. + // e.g. func foo[T interface { M } | interface { N }](v T) {} + if !typeparam { + return nil, true + } + return fromInterface(pass, t, typeparam) + + default: + // ignore these. + return nil, true + } +} + +func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []typeAndMembers, ok bool) { + _, typeparam := t.(*types.TypeParam) + result, ok = fromType(pass, t, typeparam) + + if typeparam { + var kind types.BasicKind + var kindSet bool + + // sameKind reports whether each type t that the function is called + // with has the same underlying basic kind. + sameBasicKind := func(t types.Type) (ok bool) { + basic, ok := t.Underlying().(*types.Basic) + if !ok { + return false + } + if kindSet && kind != basic.Kind() { + return false + } + kind = basic.Kind() + kindSet = true + return true + } + + for _, rr := range result { + if !sameBasicKind(rr.et.TypeName.Type()) { + ok = false + break + } + } + } + + return result, ok +} diff --git a/vendor/github.com/nishanths/exhaustive/common_pre_go118.go b/vendor/github.com/nishanths/exhaustive/common_pre_go118.go new file mode 100644 index 00000000..f916c17f --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/common_pre_go118.go @@ -0,0 +1,37 @@ +//go:build !go1.18 +// +build !go1.18 + +package exhaustive + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" +) + +func fromNamed(pass *analysis.Pass, t *types.Named) (result typeAndMembers, ok bool) { + if tpkg := t.Obj().Pkg(); tpkg == nil { + return typeAndMembers{}, false + } + + et := enumType{t.Obj()} + em, ok := importFact(pass, et) + if !ok { + return typeAndMembers{}, false + } + + return typeAndMembers{et, em}, true +} + +func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []typeAndMembers, ok bool) { + switch t := t.(type) { + case *types.Named: + e, ok := fromNamed(pass, t) + if !ok { + return nil, false + } + return []typeAndMembers{e}, true + default: + return nil, false + } +} diff --git a/vendor/github.com/nishanths/exhaustive/enum.go b/vendor/github.com/nishanths/exhaustive/enum.go index 2b287e39..fa46eb92 100644 --- a/vendor/github.com/nishanths/exhaustive/enum.go +++ b/vendor/github.com/nishanths/exhaustive/enum.go @@ -10,39 +10,46 @@ import ( "golang.org/x/tools/go/ast/inspector" ) -// constantValue is a constant.Value.ExactString(). +// constantValue is a (constant.Value).ExactString value. type constantValue string -// Represents an enum type (or a potential enum type). -// It is a defined (named) type's name. +// enumType represents an enum type as defined by this program, which +// effectively is a defined (named) type. type enumType struct{ *types.TypeName } func (et enumType) String() string { return et.TypeName.String() } // for debugging func (et enumType) scope() *types.Scope { return et.TypeName.Parent() } // scope that the type is declared in func (et enumType) factObject() types.Object { return et.TypeName } // types.Object for fact export -// enumMembers is the members for a single enum type. +// enumMembers is set of enum members for a single enum type. // The zero value is ready to use. type enumMembers struct { - Names []string // enum member names, AST order + Names []string // enum member names + NameToPos map[string]token.Pos // member name -> AST position NameToValue map[string]constantValue // enum member name -> constant value ValueToNames map[constantValue][]string // constant value -> enum member names } -func (em *enumMembers) add(name string, val constantValue) { +// add adds an enum member to the set. +func (em *enumMembers) add(name string, val constantValue, pos token.Pos) { + if em.NameToPos == nil { + em.NameToPos = make(map[string]token.Pos) + } if em.NameToValue == nil { em.NameToValue = make(map[string]constantValue) } if em.ValueToNames == nil { em.ValueToNames = make(map[constantValue][]string) } - em.Names = append(em.Names, name) + em.NameToPos[name] = pos em.NameToValue[name] = val em.ValueToNames[val] = append(em.ValueToNames[val], name) } -func (em enumMembers) String() string { return em.factString() } // for debugging +func (em enumMembers) String() string { + return em.factString() +} func (em enumMembers) factString() string { var buf strings.Builder @@ -74,7 +81,7 @@ func findEnums(pkgScopeOnly bool, pkg *types.Package, inspect *inspector.Inspect continue } v := result[enumTyp] - v.add(memberName, val) + v.add(memberName, val, name.Pos()) result[enumTyp] = v } } @@ -84,6 +91,25 @@ func findEnums(pkgScopeOnly bool, pkg *types.Package, inspect *inspector.Inspect } func possibleEnumMember(constName *ast.Ident, info *types.Info) (et enumType, name string, val constantValue, ok bool) { + // Notes + // + // type T int + // const A T = iota // obj.Type() is T + // + // type R T + // const B R = iota // obj.Type() is R + // + // type T2 int + // type T1 = T2 + // const C T1 = iota // obj.Type() is T2 + // + // type T3 = T4 + // type T4 int + // type T5 = T3 + // const D T5 = iota // obj.Type() is T4 + // + // In all these cases, validNamedBasic(obj.Type()) == true. + obj := info.Defs[constName] if obj == nil { panic(fmt.Sprintf("info.Defs[%s] == nil", constName)) @@ -91,44 +117,22 @@ func possibleEnumMember(constName *ast.Ident, info *types.Info) (et enumType, na if _, ok = obj.(*types.Const); !ok { panic(fmt.Sprintf("obj must be *types.Const, got %T", obj)) } - if isBlankIdentifier(obj) { + if isBlankIdentifier(obj.Name()) { // These objects have a nil parent scope. // Also, we have no real purpose to record them. return enumType{}, "", "", false } - - /* - NOTE: - - type T int - const A T = iota // obj.Type() is T - - type R T - const B R = iota // obj.Type() is R - - type T2 int - type T1 = T2 - const C T1 = iota // obj.Type() is T2 - - type T3 = T4 - type T4 int - type T5 = T3 - const D T5 = iota // obj.Type() is T4 - - // And, in all these cases, validNamedBasic(obj.Type()) == true. - */ - if !validNamedBasic(obj.Type()) { return enumType{}, "", "", false } - named := obj.Type().(*types.Named) // guaranteed by validNamedBasic() + named := obj.Type().(*types.Named) // guaranteed by validNamedBasic tn := named.Obj() - // Enum type's scope and enum member's scope must be the same. If they're - // not, don't consider the const a member. Additionally, the enum type and - // the enum member must be in the same package (the scope check accounts for - // this, too). + // By definition, enum type's scope and enum member's scope must be the + // same. If they're not, don't consider the const a member. Additionally, + // the enum type and the enum member must be in the same package (the + // scope check accounts for this, too). if tn.Parent() != obj.Parent() { return enumType{}, "", "", false } @@ -141,8 +145,8 @@ func determineConstVal(name *ast.Ident, info *types.Info) constantValue { return constantValue(c.Val().ExactString()) } -func isBlankIdentifier(obj types.Object) bool { - return obj.Name() == "_" // NOTE: go/types/decl.go does a direct comparison like this +func isBlankIdentifier(name string) bool { + return name == "_" // NOTE: go/types/decl.go does a direct comparison like this } func validBasic(basic *types.Basic) bool { @@ -154,10 +158,12 @@ func validBasic(basic *types.Basic) bool { } // validNamedBasic returns whether the type t is a named type whose underlying -// type is a valid basic type to form an enum. -// A type that passes this check meets the definition of an enum type. -// Note that -// validNamedBasic(t) == true => t.(*types.Named) +// type is a valid basic type to form an enum. A type that passes this check +// meets the definition of an enum type. +// +// The following is guaranteed: +// +// validNamedBasic(t) == true => t.(*types.Named) func validNamedBasic(t types.Type) bool { named, ok := t.(*types.Named) if !ok { diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go index 8ec80e06..ddb9ee0a 100644 --- a/vendor/github.com/nishanths/exhaustive/exhaustive.go +++ b/vendor/github.com/nishanths/exhaustive/exhaustive.go @@ -1,247 +1,252 @@ /* -Package exhaustive provides an analyzer that checks exhaustiveness of enum -switch statements and map literals in Go source code. +Package exhaustive defines an analyzer that checks exhaustiveness of switch +statements of enum-like constants in Go source code. The analyzer can be +configured to additionally check exhaustiveness of map literals whose key type +is enum-like. # Definition of enum -The Go language spec does not provide an explicit definition for an enum. For -the purpose of this analyzer, an enum type is any named type (a.k.a. defined -type) whose underlying type is an integer (includes byte and rune), a float, -or a string type. An enum type has associated with it constants of this named -type; these constants constitute the enum members. +The Go [language spec] does not provide an explicit definition for enums. For +the purpose of this analyzer, and by convention, an enum type is any named +type that has: -In the example below, Biome is an enum type with 3 members. + - underlying type float, string, or integer (includes byte and + rune, which are aliases for uint8 and int32, respectively); and + - at least one constant of the type defined in the same scope. + +In the example below, Biome is an enum type. The three constants are its +enum members. + + package eco type Biome int const ( - Tundra Biome = 1 - Savanna Biome = 2 - Desert Biome = 3 + Tundra Biome = 1 + Savanna Biome = 2 + Desert Biome = 3 ) -For a constant to be an enum member for an enum type, the constant must be -declared in the same scope as the enum type. Note that the scope requirement -implies that only constants declared in the same package as the enum type's -package can constitute the enum members for the enum type. - -Enum member constants for a given enum type don't necessarily have to all be -declared in the same const block. Constant values may be specified using iota, -using explicit values, or by any means of declaring a valid Go const. It is -allowed for multiple enum member constants for a given enum type to have the -same constant value. +Enum member constants for a particular enum type do not necessarily all +have to be declared in the same const block. The constant values may be +specified using iota, using literal values, or using any valid means for +declaring a Go constant. It is allowed for multiple enum member +constants for a particular enum type to have the same constant value. # Definition of exhaustiveness A switch statement that switches on a value of an enum type is exhaustive if -all of the enum type's members are listed in the switch statement's cases. If -multiple enum member constants have the same constant value, it is sufficient -for any one of these same-valued members to be listed. +all enum members, by constant value, are listed in the switch +statement's cases. If multiple members have the same constant value, it is +sufficient for any one of these same-valued members to be listed. For an enum type defined in the same package as the switch statement, both exported and unexported enum members must be listed to satisfy exhaustiveness. For an enum type defined in an external package, it is sufficient that only -exported enum members are listed. +exported enum members are listed. In a switch statement's cases, only +identifiers (e.g. Tundra) and qualified identifiers (e.g. somepkg.Grassland) +that name constants may contribute towards satisfying exhaustiveness; other +expressions such as literal values and function calls will not. + +By default, the existence of a default case in a switch statement does not +unconditionally make a switch statement exhaustive. Use the +-default-signifies-exhaustive flag to adjust this behavior. + +A similar definition of exhaustiveness applies to a map literal whose key type +is an enum type. For the map literal to be considered exhaustive, all enum +members, by constant value, must be listed as keys. Empty map literals are not +checked. For the analyzer to check map literals, the -check flag must include +the value "map". + +# Type parameters + +A switch statement that switches on a value whose type is a type parameter is +checked for exhaustiveness if each type element in the type constraint is an +enum type and shares the same underlying basic type kind. + +In the following example, the switch statement on the value of type parameter +T will be checked, because each type element of T—namely M, N, and O—is an +enum type and shares the same underlying basic type kind (i.e. int8). To +satisfy exhaustiveness, all enum members, by constant value, for each of the +enum types M, N, and O—namely A, B, C, and D—must be listed in the switch +statement's cases. + + func bar[T M | I](v T) { + switch v { + case T(A): + case T(B): + case T(C): + case T(D): + } + } -Only identifiers denoting constants (e.g. Tundra) and qualified identifiers -denoting constants (e.g. somepkg.Grassland) listed in a switch statement's -cases can contribute towards satisfying exhaustiveness. Literal values, struct -fields, re-assignable variables, etc. will not. + type I interface{ N | J } + type J interface{ O } -The analyzer will produce a diagnostic about unhandled enum members if the -required memebers are not listed in a switch statement's cases (this applies -even if the switch statement has a 'default' case). + type M int8 + const A M = 1 -# Map literals + type N int8 + const B N = 2 + const C N = 3 -All of the above also applies to map literals in which the key type is an enum -type. Empty map literals are never checked. The -check flag must include -"map" for map literals to be checked. + type O int8 + const D O = 4 # Type aliases -The analyzer handles type aliases for an enum type in the following manner. -Consider the example below. T2 is a enum type, and T1 is an alias for T2. Note -that we don't term T1 itself an enum type; it is only an alias for an enum -type. +The analyzer handles type aliases as shown in the example below. Here T2 is a +enum type. T1 is an alias for T2. Note that T1 itself isn't considered an enum +type; T1 is only an alias for an enum type. package pkg type T1 = newpkg.T2 const ( - A = newpkg.A - B = newpkg.B + A = newpkg.A + B = newpkg.B ) package newpkg type T2 int const ( - A T2 = 1 - B T2 = 2 + A T2 = 1 + B T2 = 2 ) -Then a switch statement that switches on a value of type T1 (which, in -reality, is just an alternate spelling for type T2) is exhaustive if all of -T2's enum members are listed in the switch statement's cases. The same -conditions described in the previous section for same-valued enum members and -for exported/unexported enum members apply here too. +A switch statement that switches on a value of type T1 (which, in reality, is +just an alternate spelling for type T2) is exhaustive if all of T2's enum +members, by constant value, are listed in the switch statement's cases. +(Recall that only constants declared in the same scope as type T2's scope can +be T2's enum members.) -It is worth noting that, though T1 and T2 are identical types, only constants -declared in the same scope as type T2's scope can be T2's enum members. In the -example, newpkg.A and newpkg.B are T2's enum members. +The following switch statements are exhaustive. -The analyzer guarantees that introducing a type alias (such as type T1 = -newpkg.T2) will never result in new diagnostics from the analyzer, as long as -the set of enum member constant values of the new RHS type (newpkg.T2) is a -subset of the set of enum member constant values of the old LHS type (T1). - -# Advanced notes - -Non-enum member constants in a switch statement's cases: Recall from an -earlier section that a constant must be declared in the same scope as the enum -type to be an enum member. It is valid, however, both to the Go type checker -and to this analyzer, for any constant of the right type to be listed in the -cases of an enum switch statement (it does not necessarily have to be an enum -member constant declared in the same scope/package as the enum type's -scope/package). This is particularly useful when a type alias is involved: A -forwarding constant declaration (such as pkg.A, in type T1's package) can take -the place of the actual enum member constant (newpkg.A, in type T2's package) -in the switch statement's cases to satisfy exhaustiveness. - - var v pkg.T1 = pkg.ReturnsT1() // v is effectively of type newpkg.T2 due to alias - switch v { - case pkg.A: // valid substitute for newpkg.A (same constant value) - case pkg.B: // valid substitute for newpkg.B (same constant value) + // Note: the type of v is effectively newpkg.T2, due to type aliasing. + func f(v pkg.T1) { + switch v { + case newpkg.A: + case newpkg.B: + } } + func g(v pkg.T1) { + switch v { + case pkg.A: + case pkg.B: + } + } + +The analyzer guarantees that introducing a type alias (such as type T1 = +newpkg.T2) will not result in new diagnostics from the analyzer, as long as +the set of enum member constant values of the alias RHS type is a subset of +the set of enum member constant values of the LHS type. + # Flags -Notable flags supported by the analyzer are described below. -All of these flags are optional. - - flag type default value - - -check string switch - -explicit-exhaustive-switch bool false - -explicit-exhaustive-map bool false - -check-generated bool false - -default-signifies-exhaustive bool false - -ignore-enum-members string (none) - -package-scope-only bool false - -The -check flag specifies the program elements that should be checked for -exhaustiveness. By default, only switch statements are checked. Specify --check=switch,map to also check map literals. - -If the -explicit-exhaustive-switch flag is enabled, the analyzer only runs on -switch statements explicitly marked with the comment text -("exhaustive:enforce"). Otherwise, it runs on every enum switch statement not -marked with the comment text ("exhaustive:ignore"). - -If the -explicit-exhaustive-map flag is enabled, the analyzer only runs on -map literals explicitly marked with the comment text -("exhaustive:enforce"). Otherwise, it runs on every enum map literal not -marked with the comment text ("exhaustive:ignore"). - -If the -check-generated flag is enabled, switch statements in generated Go -source files are also checked. Otherwise, by default, switch statements in -generated files are not checked. See https://golang.org/s/generatedcode for the -definition of generated file. - -If the -default-signifies-exhaustive flag is enabled, the presence of a -'default' case in a switch statement always satisfies exhaustiveness, even if -all enum members are not listed. It is not recommended that you enable this -flag; enabling it generally defeats the purpose of exhaustiveness checking. - -The -ignore-enum-members flag specifies a regular expression in Go syntax. Enum -members matching the regular expression don't have to be listed in switch -statement cases to satisfy exhaustiveness. The specified regular expression is -matched against an enum member name inclusive of the enum package import path: -for example, if the enum package import path is "example.com/pkg" and the member -name is "Tundra", the specified regular expression will be matched against the -string "example.com/pkg.Tundra". - -If the -package-scope-only flag is enabled, the analyzer only finds enums -defined in package scopes, and consequently only switch statements that switch -on package-scoped enums will be checked for exhaustiveness. By default, the -analyzer finds enums defined in all scopes, and checks switch statements that -switch on all these enums. +Summary: + + flag type default value + ---- ---- ------------- + -check comma-separated string switch + -explicit-exhaustive-switch bool false + -explicit-exhaustive-map bool false + -check-generated bool false + -default-signifies-exhaustive bool false + -ignore-enum-members regexp pattern (none) + -ignore-enum-types regexp pattern (none) + -package-scope-only bool false + +Flag descriptions: + + - The -check flag specifies a comma-separated list of program elements + that should be checked for exhaustiveness; supported program elements + are "switch" and "map". The default flag value is "switch", which means + that only switch statements are checked. Specify the flag value + "switch,map" to check both switch statements and map literals. + + - If -explicit-exhaustive-switch is enabled, the analyzer checks a switch + statement only if it is associated with a comment beginning with + "//exhaustive:enforce". Otherwise, the analyzer checks every enum switch + statement not associated with a comment beginning with + "//exhaustive:ignore". + + - The -explicit-exhaustive-map flag is the map literal counterpart for the + -explicit-exhaustive-switch flag. + + - If -check-generated is enabled, switch statements and map literals in + generated Go source files are checked. By default, the analyzer does not + check generated files. Refer to https://golang.org/s/generatedcode for + the definition of generated files. + + - If -default-signifies-exhaustive is enabled, the presence of a default + case in a switch statement unconditionally satisfies exhaustiveness (all + enum members do not have to be listed). Enabling this flag usually tends + to counter the purpose of exhaustiveness checking, so it is not + recommended that you enable this flag. + + - The -ignore-enum-members flag specifies a regular expression in Go + package regexp syntax. Constants matching the regular expression do not + have to be listed in switch statement cases or map literals in order to + satisfy exhaustiveness. The specified regular expression is matched + against the constant name inclusive of the enum package import path. For + example, if the package import path of the constant is "example.org/eco" + and the constant name is "Tundra", the specified regular expression will + be matched against the string "example.org/eco.Tundra". + + - The -ignore-enum-types flag is similar to the -ignore-enum-members flag, + except that it applies to types. + + - If -package-scope-only is enabled, the analyzer only finds enums defined + in package scope but not in inner scopes such as functions; consequently + only switch statements and map literals that use such enums are checked + for exhaustiveness. By default, the analyzer finds enums defined in all + scopes, including in inner scopes such as functions. # Skip analysis -In implicitly exhaustive switch mode (-explicit-exhaustive-switch=false), skip -checking of a specific switch statement by associating the comment shown in -the example below with the switch statement. Note the lack of whitespace -between the comment marker ("//") and the comment text ("exhaustive:ignore"). +To skip analysis of a switch statement or a map literal, associate it with a +comment that begins with "//exhaustive:ignore". For example: //exhaustive:ignore - switch v { ... } + switch v { + case A: + case B: + } + +To ignore specific constants in exhaustiveness checks, use the +-ignore-enum-members flag: -In explicitly exhaustive switch mode (-explicit-exhaustive-switch=true), run -exhaustiveness checks on a specific switch statement by associating the -comment shown in the example below with the switch statement. + exhaustive -ignore-enum-members '^example\.org/eco\.Tundra$' - //exhaustive:enforce - switch v { ... } +To ignore specific types, use the -ignore-enum-types flag: -To ignore specific enum members, see the -ignore-enum-members flag. + exhaustive -ignore-enum-types '^time\.Duration$|^example\.org/measure\.Unit$' -Switch statements in generated Go source files are not checked by default. -Use the -check-generated flag to change this behavior. +[language spec]: https://golang.org/ref/spec */ package exhaustive import ( - "flag" + "fmt" "go/ast" - "regexp" - "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" ) -var _ flag.Value = (*regexpFlag)(nil) - -// regexpFlag implements the flag.Value interface for parsing -// regular expression flag values. -type regexpFlag struct{ r *regexp.Regexp } - -func (v *regexpFlag) String() string { - if v == nil || v.r == nil { - return "" - } - return v.r.String() -} - -func (v *regexpFlag) Set(expr string) error { - if expr == "" { - v.r = nil - return nil - } - - r, err := regexp.Compile(expr) - if err != nil { - return err - } - - v.r = r - return nil -} - -func (v *regexpFlag) value() *regexp.Regexp { return v.r } - func init() { - Analyzer.Flags.StringVar(&fCheck, CheckFlag, checkSwitch, "program elements to check for exhaustiveness") - Analyzer.Flags.BoolVar(&fExplicitExhaustiveSwitch, ExplicitExhaustiveSwitchFlag, false, "only run exhaustive check on switches with \"//exhaustive:enforce\" comment") - Analyzer.Flags.BoolVar(&fExplicitExhaustiveMap, ExplicitExhaustiveMapFlag, false, "only run exhaustive check on map literals with \"//exhaustive:enforce\" comment") - Analyzer.Flags.BoolVar(&fCheckGenerated, CheckGeneratedFlag, false, "check switch statements in generated files") - Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "presence of \"default\" case in switch statements satisfies exhaustiveness, even if all enum members are not listed") - Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "enum members matching `regex` do not have to be listed in switch statements to satisfy exhaustiveness") - Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "consider enums only in package scopes, not in inner scopes") + Analyzer.Flags.Var(&fCheck, CheckFlag, "comma-separated list of program `elements` that should be checked for exhaustiveness; supported elements are: switch, map") + Analyzer.Flags.BoolVar(&fExplicitExhaustiveSwitch, ExplicitExhaustiveSwitchFlag, false, `check switch statement only if associated with "//exhaustive:enforce" comment`) + Analyzer.Flags.BoolVar(&fExplicitExhaustiveMap, ExplicitExhaustiveMapFlag, false, `check map literal only if associated with "//exhaustive:enforce" comment`) + Analyzer.Flags.BoolVar(&fCheckGenerated, CheckGeneratedFlag, false, "check generated files") + Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "presence of default case in switch statement unconditionally satisfies exhaustiveness") + Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "constants matching `regexp` are ignored for exhaustiveness checks") + Analyzer.Flags.Var(&fIgnoreEnumTypes, IgnoreEnumTypesFlag, "types matching `regexp` are ignored for exhaustiveness checks") + Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "find enums only in package scopes, not inner scopes") var unused string - Analyzer.Flags.StringVar(&unused, IgnorePatternFlag, "", "no effect (deprecated); see -"+IgnoreEnumMembersFlag+" instead") + Analyzer.Flags.StringVar(&unused, IgnorePatternFlag, "", "no effect (deprecated); use -"+IgnoreEnumMembersFlag) Analyzer.Flags.StringVar(&unused, CheckingStrategyFlag, "", "no effect (deprecated)") } @@ -254,59 +259,70 @@ const ( CheckGeneratedFlag = "check-generated" DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" IgnoreEnumMembersFlag = "ignore-enum-members" + IgnoreEnumTypesFlag = "ignore-enum-types" PackageScopeOnlyFlag = "package-scope-only" - IgnorePatternFlag = "ignore-pattern" // Deprecated: see IgnoreEnumMembersFlag instead. + IgnorePatternFlag = "ignore-pattern" // Deprecated: use IgnoreEnumMembersFlag. CheckingStrategyFlag = "checking-strategy" // Deprecated. ) +// checkElement is a program element supported by the -check flag. +type checkElement string + +const ( + elementSwitch checkElement = "switch" + elementMap checkElement = "map" +) + +func validCheckElement(s string) error { + switch checkElement(s) { + case elementSwitch: + return nil + case elementMap: + return nil + default: + return fmt.Errorf("invalid program element %q", s) + } +} + +var defaultCheckElements = []string{ + string(elementSwitch), +} + +// Flag values. var ( - fCheck string + fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} fExplicitExhaustiveSwitch bool fExplicitExhaustiveMap bool fCheckGenerated bool fDefaultSignifiesExhaustive bool fIgnoreEnumMembers regexpFlag + fIgnoreEnumTypes regexpFlag fPackageScopeOnly bool ) -const ( - checkSwitch = "switch" - checkMap = "map" -) - // resetFlags resets the flag variables to their default values. // Useful in tests. func resetFlags() { - fCheck = checkSwitch + fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} fExplicitExhaustiveSwitch = false fExplicitExhaustiveMap = false fCheckGenerated = false fDefaultSignifiesExhaustive = false fIgnoreEnumMembers = regexpFlag{} + fIgnoreEnumTypes = regexpFlag{} fPackageScopeOnly = false } var Analyzer = &analysis.Analyzer{ Name: "exhaustive", - Doc: "check exhaustiveness of enum switch statements and map literals", + Doc: "check exhaustiveness of enum switch statements", Run: run, Requires: []*analysis.Analyzer{inspect.Analyzer}, FactTypes: []analysis.Fact{&enumMembersFact{}}, } func run(pass *analysis.Pass) (interface{}, error) { - checks := make(map[string]bool) - for _, v := range strings.Split(fCheck, ",") { - v = strings.TrimSpace(v) - switch v { - case checkSwitch: - checks[checkSwitch] = true - case checkMap: - checks[checkMap] = true - } - } - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for typ, members := range findEnums( @@ -318,50 +334,37 @@ func run(pass *analysis.Pass) (interface{}, error) { exportFact(pass, typ, members) } - generated := make(generatedCache) - comments := make(commentsCache) - - swChecker := switchChecker( - pass, - switchConfig{ - explicitExhaustiveSwitch: fExplicitExhaustiveSwitch, - defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, - checkGeneratedFiles: fCheckGenerated, - ignoreEnumMembers: fIgnoreEnumMembers.value(), - }, - generated, - comments, - ) - - mapChecker := mapChecker( - pass, - mapConfig{ - explicitExhaustiveMap: fExplicitExhaustiveMap, - checkGeneratedFiles: fCheckGenerated, - ignoreEnumMembers: fIgnoreEnumMembers.value(), - }, - generated, - comments, - ) - - var types []ast.Node - if checks[checkSwitch] { - types = append(types, &ast.SwitchStmt{}) + generated := boolCache{value: isGeneratedFile} + comments := commentCache{value: fileCommentMap} + swConf := switchConfig{ + explicit: fExplicitExhaustiveSwitch, + defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, + checkGenerated: fCheckGenerated, + ignoreConstant: fIgnoreEnumMembers.re, + ignoreType: fIgnoreEnumTypes.re, } - if checks[checkMap] { - types = append(types, &ast.CompositeLit{}) + mapConf := mapConfig{ + explicit: fExplicitExhaustiveMap, + checkGenerated: fCheckGenerated, + ignoreConstant: fIgnoreEnumMembers.re, + ignoreType: fIgnoreEnumTypes.re, } - - inspect.WithStack(types, func(n ast.Node, push bool, stack []ast.Node) bool { - var proceed bool - switch n.(type) { - case *ast.SwitchStmt: - proceed, _ = swChecker(n, push, stack) - case *ast.CompositeLit: - proceed, _ = mapChecker(n, push, stack) + swChecker := switchChecker(pass, swConf, generated, comments) + mapChecker := mapChecker(pass, mapConf, generated, comments) + + // NOTE: should not share the same inspect.WithStack call for different + // program elements: the visitor function for a program element may + // exit traversal early, but this shouldn't affect traversal for + // other program elements. + for _, e := range fCheck.elements { + switch checkElement(e) { + case elementSwitch: + inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, toVisitor(swChecker)) + case elementMap: + inspect.WithStack([]ast.Node{&ast.CompositeLit{}}, toVisitor(mapChecker)) + default: + panic(fmt.Sprintf("unknown checkElement %v", e)) } - return proceed - }) - + } return nil, nil } diff --git a/vendor/github.com/nishanths/exhaustive/flag.go b/vendor/github.com/nishanths/exhaustive/flag.go new file mode 100644 index 00000000..49d3d3c6 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/flag.go @@ -0,0 +1,75 @@ +package exhaustive + +import ( + "flag" + "regexp" + "strings" +) + +var _ flag.Value = (*regexpFlag)(nil) +var _ flag.Value = (*stringsFlag)(nil) + +// regexpFlag implements flag.Value for parsing +// regular expression flag inputs. +type regexpFlag struct{ re *regexp.Regexp } + +func (f *regexpFlag) String() string { + if f == nil || f.re == nil { + return "" + } + return f.re.String() +} + +func (f *regexpFlag) Set(expr string) error { + if expr == "" { + f.re = nil + return nil + } + + re, err := regexp.Compile(expr) + if err != nil { + return err + } + + f.re = re + return nil +} + +// stringsFlag implements flag.Value for parsing a comma-separated string +// list. Surrounding whitespace is stripped from the input and from each +// element. If filter is non-nil it is called for each element in the input. +type stringsFlag struct { + elements []string + filter func(string) error +} + +func (f *stringsFlag) String() string { + if f == nil { + return "" + } + return strings.Join(f.elements, ",") +} + +func (f *stringsFlag) filterFunc() func(string) error { + if f.filter != nil { + return f.filter + } + return func(_ string) error { return nil } +} + +func (f *stringsFlag) Set(input string) error { + input = strings.TrimSpace(input) + if input == "" { + f.elements = nil + return nil + } + + for _, el := range strings.Split(input, ",") { + el = strings.TrimSpace(el) + if err := f.filterFunc()(el); err != nil { + return err + } + f.elements = append(f.elements, el) + } + return nil +} diff --git a/vendor/github.com/nishanths/exhaustive/map.go b/vendor/github.com/nishanths/exhaustive/map.go index 1b86fe5b..06923338 100644 --- a/vendor/github.com/nishanths/exhaustive/map.go +++ b/vendor/github.com/nishanths/exhaustive/map.go @@ -5,35 +5,30 @@ import ( "go/ast" "go/types" "regexp" - "strings" "golang.org/x/tools/go/analysis" ) // mapConfig is configuration for mapChecker. type mapConfig struct { - explicitExhaustiveMap bool - checkGeneratedFiles bool - ignoreEnumMembers *regexp.Regexp // can be nil + explicit bool + checkGenerated bool + ignoreConstant *regexp.Regexp // can be nil + ignoreType *regexp.Regexp // can be nil } -// mapChecker returns a node visitor that checks exhaustiveness -// of enum keys in map literal for the supplied pass, and reports diagnostics if non-exhaustive. -// It expects to only see *ast.CompositeLit nodes. -func mapChecker(pass *analysis.Pass, cfg mapConfig, generated generatedCache, comments commentsCache) nodeVisitor { +// mapChecker returns a node visitor that checks for exhaustiveness of +// map literals for the supplied pass, and reports diagnostics. The +// node visitor expects only *ast.CompositeLit nodes. +func mapChecker(pass *analysis.Pass, cfg mapConfig, generated boolCache, comments commentCache) nodeVisitor { return func(n ast.Node, push bool, stack []ast.Node) (bool, string) { if !push { - // The proceed return value should not matter; it is ignored by - // inspector package for pop calls. - // Nevertheless, return true to be on the safe side for the future. return true, resultNotPush } file := stack[0].(*ast.File) - if !cfg.checkGeneratedFiles && generated.IsGenerated(file) { - // Don't check this file. - // Return false because the children nodes of node `n` don't have to be checked. + if !cfg.checkGenerated && generated.get(file) { return false, resultGeneratedFile } @@ -45,7 +40,6 @@ func mapChecker(pass *analysis.Pass, cfg mapConfig, generated generatedCache, co if !ok2 { return true, resultNotMapLiteral } - mapType, ok = namedType.Underlying().(*types.Map) if !ok { return true, resultNotMapLiteral @@ -53,23 +47,19 @@ func mapChecker(pass *analysis.Pass, cfg mapConfig, generated generatedCache, co } if len(lit.Elts) == 0 { - // because it may be used as an alternative for make(map[...]...) return false, resultEmptyMapLiteral } - keyType, ok := mapType.Key().(*types.Named) - if !ok { - return true, resultMapKeyIsNotNamedType - } - - fileComments := comments.GetComments(file, pass.Fset) + fileComments := comments.get(pass.Fset, file) var relatedComments []*ast.CommentGroup for i := range stack { - // iterate over stack in the reverse order (from bottom to top) + // iterate over stack in the reverse order (from inner + // node to outer node) node := stack[len(stack)-1-i] switch node.(type) { // need to check comments associated with following nodes, - // because logic of ast package doesn't allow to associate comment with *ast.CompositeLit + // because logic of ast package doesn't associate comment + // with *ast.CompositeLit as required. case *ast.CompositeLit, // stack[len(stack)-1] *ast.ReturnStmt, // return ... *ast.IndexExpr, // map[enum]...{...}[key] @@ -81,68 +71,64 @@ func mapChecker(pass *analysis.Pass, cfg mapConfig, generated generatedCache, co *ast.ValueSpec: // var declaration relatedComments = append(relatedComments, fileComments[node]...) continue + default: + // stop iteration on the first inappropriate node + break } - // stop iteration on the first inappropriate node - break } - if !cfg.explicitExhaustiveMap && containsIgnoreDirective(relatedComments) { - // Skip checking of this map literal due to ignore directive comment. - // Still return true because there may be nested map literals - // that are not to be ignored. - return true, resultMapIgnoreComment + if !cfg.explicit && hasComment(relatedComments, ignoreComment) { + // Skip checking of this map literal due to ignore + // comment. Still return true because there may be nested + // map literals that are not to be ignored. + return true, resultIgnoreComment } - if cfg.explicitExhaustiveMap && !containsEnforceDirective(relatedComments) { - // Skip checking of this map literal due to missing enforce directive comment. - return true, resultMapNoEnforceComment + if cfg.explicit && !hasComment(relatedComments, enforceComment) { + return true, resultNoEnforceComment } - keyPkg := keyType.Obj().Pkg() - if keyPkg == nil { - // The Go documentation says: nil for labels and objects in the Universe scope. - // This happens for the `error` type, for example. - return true, resultNilMapKeyTypePkg + es, ok := composingEnumTypes(pass, mapType.Key()) + if !ok || len(es) == 0 { + return true, resultEnumTypes } - enumTyp := enumType{keyType.Obj()} - members, ok := importFact(pass, enumTyp) - if !ok { - return true, resultMapKeyNotEnum - } + var checkl checklist + checkl.ignoreConstant(cfg.ignoreConstant) + checkl.ignoreType(cfg.ignoreType) - samePkg := keyPkg == pass.Pkg // do the map literal and the map key type (i.e. enum type) live in the same package? - checkUnexported := samePkg // we want to include unexported members in the exhaustiveness check only if we're in the same package - checklist := makeChecklist(members, keyPkg, checkUnexported, cfg.ignoreEnumMembers) - - for _, e := range lit.Elts { - expr, ok := e.(*ast.KeyValueExpr) - if !ok { - continue // is it possible for valid map literal? - } - analyzeCaseClauseExpr(expr.Key, pass.TypesInfo, checklist.found) + for _, e := range es { + checkl.add(e.et, e.em, pass.Pkg == e.et.Pkg()) } - if len(checklist.remaining()) == 0 { - // All enum members accounted for. - // Nothing to report. + analyzeMapLiteral(lit, pass.TypesInfo, checkl.found) + if len(checkl.remaining()) == 0 { return true, resultEnumMembersAccounted } - - pass.Report(makeMapDiagnostic(lit, samePkg, enumTyp, members, checklist.remaining())) + pass.Report(makeMapDiagnostic(lit, dedupEnumTypes(toEnumTypes(es)), checkl.remaining())) return true, resultReportedDiagnostic } } -// Makes a "missing map keys" diagnostic. -// samePkg should be true if the enum type and the map literal are defined in the same package. -func makeMapDiagnostic(lit *ast.CompositeLit, samePkg bool, enumTyp enumType, allMembers enumMembers, missingMembers map[string]struct{}) analysis.Diagnostic { - message := fmt.Sprintf("missing map keys of type %s: %s", - diagnosticEnumTypeName(enumTyp.TypeName, samePkg), - strings.Join(diagnosticMissingMembers(missingMembers, allMembers), ", ")) +func analyzeMapLiteral(lit *ast.CompositeLit, info *types.Info, each func(constantValue)) { + for _, e := range lit.Elts { + expr, ok := e.(*ast.KeyValueExpr) + if !ok { + continue + } + if val, ok := exprConstVal(expr.Key, info); ok { + each(val) + } + } +} +func makeMapDiagnostic(lit *ast.CompositeLit, enumTypes []enumType, missing map[member]struct{}) analysis.Diagnostic { return analysis.Diagnostic{ - Pos: lit.Pos(), - End: lit.End(), - Message: message, + Pos: lit.Pos(), + End: lit.End(), + Message: fmt.Sprintf( + "missing keys in map of key type %s: %s", + diagnosticEnumTypes(enumTypes), + diagnosticGroups(groupMissing(missing, enumTypes)), + ), } } diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go index 115c317e..2e99d283 100644 --- a/vendor/github.com/nishanths/exhaustive/switch.go +++ b/vendor/github.com/nishanths/exhaustive/switch.go @@ -5,48 +5,62 @@ import ( "go/ast" "go/types" "regexp" - "strings" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" ) -// nodeVisitor is like the visitor function used by Inspector.WithStack, +// nodeVisitor is like the visitor function used by inspector.WithStack, // except that it returns an additional value: a short description of // the result of this node visit. // -// The result is typically useful in debugging or in unit tests to check +// The result value is typically useful in debugging or in unit tests to check // that the nodeVisitor function took the expected code path. type nodeVisitor func(n ast.Node, push bool, stack []ast.Node) (proceed bool, result string) -// Result values returned by a node visitor constructed via switchChecker. +// toVisitor converts a nodeVisitor to a function suitable for use +// with inspector.WithStack. +func toVisitor(v nodeVisitor) func(ast.Node, bool, []ast.Node) bool { + return func(node ast.Node, push bool, stack []ast.Node) bool { + proceed, _ := v(node, push, stack) + return proceed + } +} + +// Result values returned by node visitors. const ( - resultNotPush = "not push" - resultGeneratedFile = "generated file" - resultNoSwitchTag = "no switch tag" - resultEmptyMapLiteral = "empty map literal" - resultNotMapLiteral = "not map literal" - resultMapKeyIsNotNamedType = "map key is not named type" - resultNilMapKeyTypePkg = "nil map key type package" - resultMapKeyNotEnum = "map key not known enum type" - resultMapIgnoreComment = "map literal has ignore comment" - resultMapNoEnforceComment = "map literal has no enforce comment" - resultTagNotValue = "switch tag not value type" - resultTagNotNamed = "switch tag not named type" - resultTagNoPkg = "switch tag does not belong to regular package" - resultTagNotEnum = "switch tag not known enum type" - resultSwitchIgnoreComment = "switch statement has ignore comment" - resultSwitchNoEnforceComment = "switch statement has no enforce comment" - resultEnumMembersAccounted = "requisite enum members accounted for" - resultDefaultCaseSuffices = "default case presence satisfies exhaustiveness" - resultReportedDiagnostic = "reported diagnostic" + resultEmptyMapLiteral = "empty map literal" + resultNotMapLiteral = "not map literal" + resultKeyNilPkg = "nil map key package" + resultKeyNotEnum = "not all map key type terms are known enum types" + + resultNoSwitchTag = "no switch tag" + resultTagNotValue = "switch tag not value type" + resultTagNilPkg = "nil switch tag package" + resultTagNotEnum = "not all switch tag terms are known enum types" + + resultNotPush = "not push" + resultGeneratedFile = "generated file" + resultIgnoreComment = "has ignore comment" + resultNoEnforceComment = "has no enforce comment" + resultEnumMembersAccounted = "required enum members accounted for" + resultDefaultCaseSuffices = "default case satisfies exhaustiveness" + resultReportedDiagnostic = "reported diagnostic" + resultEnumTypes = "invalid or empty composing enum types" ) -// switchChecker returns a node visitor that checks exhaustiveness -// of enum switch statements for the supplied pass, and reports diagnostics for -// switch statements that are non-exhaustive. -// It expects to only see *ast.SwitchStmt nodes. -func switchChecker(pass *analysis.Pass, cfg switchConfig, generated generatedCache, comments commentsCache) nodeVisitor { +// switchConfig is configuration for switchChecker. +type switchConfig struct { + explicit bool + defaultSignifiesExhaustive bool + checkGenerated bool + ignoreConstant *regexp.Regexp // can be nil + ignoreType *regexp.Regexp // can be nil +} + +// switchChecker returns a node visitor that checks exhaustiveness of +// enum switch statements for the supplied pass, and reports +// diagnostics. The node visitor expects only *ast.SwitchStmt nodes. +func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, comments commentCache) nodeVisitor { return func(n ast.Node, push bool, stack []ast.Node) (bool, string) { if !push { // The proceed return value should not matter; it is ignored by @@ -57,7 +71,7 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated generatedCac file := stack[0].(*ast.File) - if !cfg.checkGeneratedFiles && generated.IsGenerated(file) { + if !cfg.checkGenerated && generated.get(file) { // Don't check this file. // Return false because the children nodes of node `n` don't have to be checked. return false, resultGeneratedFile @@ -65,20 +79,21 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated generatedCac sw := n.(*ast.SwitchStmt) - switchComments := comments.GetComments(file, pass.Fset)[sw] - if !cfg.explicitExhaustiveSwitch && containsIgnoreDirective(switchComments) { - // Skip checking of this switch statement due to ignore directive comment. - // Still return true because there may be nested switch statements - // that are not to be ignored. - return true, resultSwitchIgnoreComment + switchComments := comments.get(pass.Fset, file)[sw] + if !cfg.explicit && hasComment(switchComments, ignoreComment) { + // Skip checking of this switch statement due to ignore + // comment. Still return true because there may be nested + // switch statements that are not to be ignored. + return true, resultIgnoreComment } - if cfg.explicitExhaustiveSwitch && !containsEnforceDirective(switchComments) { - // Skip checking of this switch statement due to missing enforce directive comment. - return true, resultSwitchNoEnforceComment + if cfg.explicit && !hasComment(switchComments, enforceComment) { + // Skip checking of this switch statement due to missing + // enforce comment. + return true, resultNoEnforceComment } if sw.Tag == nil { - return true, resultNoSwitchTag + return true, resultNoSwitchTag // never possible for valid Go program? } t := pass.TypesInfo.Types[sw.Tag] @@ -86,264 +101,69 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated generatedCac return true, resultTagNotValue } - tagType, ok := t.Type.(*types.Named) - if !ok { - return true, resultTagNotNamed + es, ok := composingEnumTypes(pass, t.Type) + if !ok || len(es) == 0 { + return true, resultEnumTypes } - tagPkg := tagType.Obj().Pkg() - if tagPkg == nil { - // The Go documentation says: nil for labels and objects in the Universe scope. - // This happens for the `error` type, for example. - return true, resultTagNoPkg - } + var checkl checklist + checkl.ignoreConstant(cfg.ignoreConstant) + checkl.ignoreType(cfg.ignoreType) - enumTyp := enumType{tagType.Obj()} - members, ok := importFact(pass, enumTyp) - if !ok { - // switch tag's type is not a known enum type. - return true, resultTagNotEnum + for _, e := range es { + checkl.add(e.et, e.em, pass.Pkg == e.et.Pkg()) } - samePkg := tagPkg == pass.Pkg // do the switch statement and the switch tag type (i.e. enum type) live in the same package? - checkUnexported := samePkg // we want to include unexported members in the exhaustiveness check only if we're in the same package - checklist := makeChecklist(members, tagPkg, checkUnexported, cfg.ignoreEnumMembers) - - hasDefaultCase := analyzeSwitchClauses(sw, pass.TypesInfo, checklist.found) - - if len(checklist.remaining()) == 0 { + def := analyzeSwitchClauses(sw, pass.TypesInfo, checkl.found) + if len(checkl.remaining()) == 0 { // All enum members accounted for. // Nothing to report. return true, resultEnumMembersAccounted } - if hasDefaultCase && cfg.defaultSignifiesExhaustive { - // Though enum members are not accounted for, - // the existence of the default case signifies exhaustiveness. - // So don't report. + if def && cfg.defaultSignifiesExhaustive { + // Though enum members are not accounted for, the + // existence of the default case signifies + // exhaustiveness. So don't report. return true, resultDefaultCaseSuffices } - pass.Report(makeSwitchDiagnostic(sw, samePkg, enumTyp, members, checklist.remaining())) + pass.Report(makeSwitchDiagnostic(sw, dedupEnumTypes(toEnumTypes(es)), checkl.remaining())) return true, resultReportedDiagnostic } } -// switchConfig is configuration for switchChecker. -type switchConfig struct { - explicitExhaustiveSwitch bool - defaultSignifiesExhaustive bool - checkGeneratedFiles bool - ignoreEnumMembers *regexp.Regexp // can be nil -} - func isDefaultCase(c *ast.CaseClause) bool { return c.List == nil // see doc comment on List field } -func denotesPackage(ident *ast.Ident, info *types.Info) (*types.Package, bool) { - obj := info.ObjectOf(ident) - if obj == nil { - return nil, false - } - n, ok := obj.(*types.PkgName) - if !ok { - return nil, false - } - return n.Imported(), true -} - -// analyzeSwitchClauses analyzes the clauses in the supplied switch statement. -// The info param should typically be pass.TypesInfo. The found function is -// called for each enum member name found in the switch statement. -// The hasDefaultCase return value indicates whether the switch statement has a -// default clause. -func analyzeSwitchClauses(sw *ast.SwitchStmt, info *types.Info, found func(val constantValue)) (hasDefaultCase bool) { +// analyzeSwitchClauses analyzes the clauses in the supplied switch +// statement. The info param typically is pass.TypesInfo. The each +// function is called for each enum member name found in the switch +// statement. The hasDefaultCase return value indicates whether the +// switch statement has a default clause. +func analyzeSwitchClauses(sw *ast.SwitchStmt, info *types.Info, each func(val constantValue)) (hasDefaultCase bool) { for _, stmt := range sw.Body.List { caseCl := stmt.(*ast.CaseClause) if isDefaultCase(caseCl) { hasDefaultCase = true - continue // nothing more to do if it's the default case + continue } for _, expr := range caseCl.List { - analyzeCaseClauseExpr(expr, info, found) + if val, ok := exprConstVal(expr, info); ok { + each(val) + } } } return hasDefaultCase } -func analyzeCaseClauseExpr(e ast.Expr, info *types.Info, found func(val constantValue)) { - handleIdent := func(ident *ast.Ident) { - obj := info.Uses[ident] - if obj == nil { - return - } - if _, ok := obj.(*types.Const); !ok { - return - } - - // There are two scenarios. - // See related test cases in typealias/quux/quux.go. - // - // ### Scenario 1 - // - // Tag package and constant package are the same. - // - // For example: - // var mode fs.FileMode - // switch mode { - // case fs.ModeDir: - // } - // - // This is simple: we just use fs.ModeDir's value. - // - // ### Scenario 2 - // - // Tag package and constant package are different. - // - // For example: - // var mode fs.FileMode - // switch mode { - // case os.ModeDir: - // } - // - // Or equivalently: - // var mode os.FileMode // in effect, fs.FileMode because of type alias in package os - // switch mode { - // case os.ModeDir: - // } - // - // In this scenario, too, we accept the case clause expr constant - // value, as is. If the Go type checker is okay with the - // name being listed in the case clause, we don't care much further. - // - found(determineConstVal(ident, info)) - } - - e = astutil.Unparen(e) - switch e := e.(type) { - case *ast.Ident: - handleIdent(e) - - case *ast.SelectorExpr: - x := astutil.Unparen(e.X) - // Ensure we only see the form `pkg.Const`, and not e.g. `structVal.f` - // or `structVal.inner.f`. - // Check that X, which is everything except the rightmost *ast.Ident (or - // Sel), is also an *ast.Ident. - xIdent, ok := x.(*ast.Ident) - if !ok { - return - } - // Doesn't matter which package, just that it denotes a package. - if _, ok := denotesPackage(xIdent, info); !ok { - return - } - handleIdent(e.Sel) - } -} - -// diagnosticMissingMembers constructs the list of missing enum members, -// suitable for use in a reported diagnostic message. -// Order is the same as in enumMembers.Names. -func diagnosticMissingMembers(missingMembers map[string]struct{}, em enumMembers) []string { - missingNamesGroupedByValue := make([][]string, len(em.Names)) // empty groups will be filtered out later - firstIndex := make(map[constantValue]int, len(em.ValueToNames)) - for i, name := range em.Names { - value := em.NameToValue[name] - j, ok := firstIndex[value] - if !ok { - firstIndex[value] = i - j = i - } - - if _, missing := missingMembers[name]; missing { - missingNamesGroupedByValue[j] = append(missingNamesGroupedByValue[j], name) - } - } - - out := make([]string, 0, len(missingMembers)) - for _, names := range missingNamesGroupedByValue { - if len(names) == 0 { - continue - } - out = append(out, strings.Join(names, "|")) - } - return out -} - -// diagnosticEnumTypeName returns a string representation of an enum type for -// use in reported diagnostics. -func diagnosticEnumTypeName(enumType *types.TypeName, samePkg bool) string { - if samePkg { - return enumType.Name() - } - return enumType.Pkg().Name() + "." + enumType.Name() -} - -// Makes a "missing cases in switch" diagnostic. -// samePkg should be true if the enum type and the switch statement are defined -// in the same package. -func makeSwitchDiagnostic(sw *ast.SwitchStmt, samePkg bool, enumTyp enumType, allMembers enumMembers, missingMembers map[string]struct{}) analysis.Diagnostic { - message := fmt.Sprintf("missing cases in switch of type %s: %s", - diagnosticEnumTypeName(enumTyp.TypeName, samePkg), - strings.Join(diagnosticMissingMembers(missingMembers, allMembers), ", ")) - +func makeSwitchDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType, missing map[member]struct{}) analysis.Diagnostic { return analysis.Diagnostic{ - Pos: sw.Pos(), - End: sw.End(), - Message: message, + Pos: sw.Pos(), + End: sw.End(), + Message: fmt.Sprintf( + "missing cases in switch of type %s: %s", + diagnosticEnumTypes(enumTypes), + diagnosticGroups(groupMissing(missing, enumTypes)), + ), } } - -// A checklist holds a set of enum member names that have to be -// accounted for to satisfy exhaustiveness in an enum switch statement. -// -// The found method checks off member names from the set, based on -// constant value, when a constant value is encoutered in the switch -// statement's cases. -// -// The remaining method returns the member names not accounted for. -type checklist struct { - em enumMembers - checkl map[string]struct{} -} - -func makeChecklist(em enumMembers, enumPkg *types.Package, includeUnexported bool, ignore *regexp.Regexp) *checklist { - checkl := make(map[string]struct{}) - - add := func(memberName string) { - if memberName == "_" { - // Blank identifier is often used to skip entries in iota lists. - // Also, it can't be referenced anywhere (including in a switch - // statement's cases), so it doesn't make sense to include it - // as required member to satisfy exhaustiveness. - return - } - if !ast.IsExported(memberName) && !includeUnexported { - return - } - if ignore != nil && ignore.MatchString(enumPkg.Path()+"."+memberName) { - return - } - checkl[memberName] = struct{}{} - } - - for _, name := range em.Names { - add(name) - } - - return &checklist{ - em: em, - checkl: checkl, - } -} - -func (c *checklist) found(val constantValue) { - // Delete all of the same-valued names. - for _, name := range c.em.ValueToNames[val] { - delete(c.checkl, name) - } -} - -func (c *checklist) remaining() map[string]struct{} { - return c.checkl -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore new file mode 100644 index 00000000..7d7f8b10 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore @@ -0,0 +1,2 @@ +ginkgolinter +bin/ diff --git a/vendor/github.com/nunnatsa/ginkgolinter/LICENSE b/vendor/github.com/nunnatsa/ginkgolinter/LICENSE new file mode 100644 index 00000000..11096c5c --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Nahshon Unna Tsameret + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile new file mode 100644 index 00000000..f3707983 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/Makefile @@ -0,0 +1,14 @@ +build: + go build -o ginkgolinter ./cmd/ginkgolinter + +build-for-windows: + GOOS=windows GOARCH=amd64 go build -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter + +build-for-mac: + GOOS=darwin GOARCH=amd64 go build -o bin/ginkgolinter-amd64-darwin ./cmd/ginkgolinter + +build-for-linux: + GOOS=linux GOARCH=amd64 go build -o bin/ginkgolinter-amd64-linux ./cmd/ginkgolinter + GOOS=linux GOARCH=386 go build -o bin/ginkgolinter-386-linux ./cmd/ginkgolinter + +build-all: build build-for-linux build-for-mac build-for-windows diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md new file mode 100644 index 00000000..fa31117a --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/README.md @@ -0,0 +1,181 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/nunnatsa/ginkgolinter)](https://goreportcard.com/report/github.com/nunnatsa/ginkgolinter) +[![Coverage Status](https://coveralls.io/repos/github/nunnatsa/ginkgolinter/badge.svg?branch=main)](https://coveralls.io/github/nunnatsa/ginkgolinter?branch=main) +# ginkgo-linter +[ginkgo](https://onsi.github.io/ginkgo/) is a popular testing framework and [gomega](https://onsi.github.io/gomega/) is its assertion package. + +This is a golang linter to enforce some standards while using the ginkgo and gomega packages. + +## Install the CLI +Download the right executable from the latest release, according to your OS. + +Another option is to use go: +```shell +go install github.com/nunnatsa/ginkgolinter/cmd/ginkgolinter@latest +``` +Then add the new executable to your PATH. + +## usage +```shell +ginkgolinter [-fix] ./... +``` +Use the `-fix` flag to apply the fix suggestions to the source code. + +## Linter Checks +The linter checks the gomega assertions in golang test code. Gomega may be used together with ginkgo tests, For example: +```go +It("should test something", func() { // It is ginkgo test case function + Expect("abcd").To(HaveLen(4), "the string should have a length of 4") // Expect is the gomega assertion +}) +``` +or within a classic golang test code, like this: +```go +func TestWithGomega(t *testing.T) { + g := NewWithT(t) + g.Expect("abcd").To(HaveLen(4), "the string should have a length of 4") +} +``` + +In some cases, the gomega will be passed as a variable to function by ginkgo, for example: +```go +Eventually(func(g Gomega) error { + g.Expect("abcd").To(HaveLen(4), "the string should have a length of 4") + return nil +}).Should(Succeed()) +``` + +The linter checks the `Expect`, `ExpectWithOffset` and the `Ω` "actual" functions, with the `Should`, `ShouldNot`, `To`, `ToNot` and `NotTo` assertion functions. + +It also supports the embedded `Not()` matcher + +### Wrong Length Assertion +The linter finds assertion of the golang built-in `len` function, with all kind of matchers, while there are already gomega matchers for these usecases; We want to assert the item, rather than its length. + +There are several wrong patterns: +```go +Expect(len(x)).To(Equal(0)) // should be: Expect(x).To(BeEmpty()) +Expect(len(x)).To(BeZero()) // should be: Expect(x).To(BeEmpty()) +Expect(len(x)).To(BeNumeric(">", 0)) // should be: Expect(x).ToNot(BeEmpty()) +Expect(len(x)).To(BeNumeric(">=", 1)) // should be: Expect(x).ToNot(BeEmpty()) +Expect(len(x)).To(BeNumeric("==", 0)) // should be: Expect(x).To(BeEmpty()) +Expect(len(x)).To(BeNumeric("!=", 0)) // should be: Expect(x).ToNot(BeEmpty()) + +Expect(len(x)).To(Equal(1)) // should be: Expect(x).To(HaveLen(1)) +Expect(len(x)).To(BeNumeric("==", 2)) // should be: Expect(x).To(HaveLen(2)) +Expect(len(x)).To(BeNumeric("!=", 3)) // should be: Expect(x).ToNot(HaveLen(3)) +``` + +It also supports the embedded `Not()` matcher; e.g. + +`Ω(len(x)).Should(Not(Equal(4)))` => `Ω(x).ShouldNot(HaveLen(4))` + +Or even (double negative): + +`Ω(len(x)).To(Not(BeNumeric(">", 0)))` => `Ω(x).To(BeEmpty())` + +The output of the linter,when finding issues, looks like this: +``` +./testdata/src/a/a.go:14:5: ginkgo-linter: wrong length assertion; consider using `Expect("abcd").Should(HaveLen(4))` instead +./testdata/src/a/a.go:18:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead +./testdata/src/a/a.go:22:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead +``` + +### Wrong `nil` Assertion +The linter finds assertion of the comparison to nil, with all kind of matchers, instead of using the existing `BeNil()` matcher; We want to assert the item, rather than a comparison result. + +There are several wrong patterns: + +```go +Expect(x == nil).To(Equal(true)) // should be: Expect(x).To(BeNil()) +Expect(nil == x).To(Equal(true)) // should be: Expect(x).To(BeNil()) +Expect(x != nil).To(Equal(true)) // should be: Expect(x).ToNot(BeNil()) +Expect(nil != nil).To(Equal(true)) // should be: Expect(x).ToNot(BeNil()) + +Expect(x == nil).To(BeTrue()) // should be: Expect(x).To(BeNil()) +Expect(x == nil).To(BeFalse()) // should be: Expect(x).ToNot(BeNil()) +``` +It also supports the embedded `Not()` matcher; e.g. + +`Ω(x == nil).Should(Not(BeTrue()))` => `Ω(x).ShouldNot(BeNil())` + +Or even (double negative): + +`Ω(x != nil).Should(Not(BeTrue()))` => `Ω(x).Should(BeNil())` + +### Wrong boolean Assertion +The linter finds assertion using the `Equal` method, with the values of to `true` or `false`, instead +of using the existing `BeTrue()` or `BeFalse()` matcher. + +There are several wrong patterns: + +```go +Expect(x).To(Equal(true)) // should be: Expect(x).To(BeTrue()) +Expect(x).To(Equal(false)) // should be: Expect(x).To(BeFalse()) +``` +It also supports the embedded `Not()` matcher; e.g. + +`Ω(x).Should(Not(Equal(True)))` => `Ω(x).ShouldNot(BeBeTrue())` + +### Wrong Error Assertion +The linter finds assertion of errors compared with nil, or to be equal nil, or to be nil. The linter suggests to use `Succeed` for functions or `HaveOccurred` for error values.. + +There are several wrong patterns: + +```go +Expect(err).To(BeNil()) // should be: Expect(err).ToNot(HaveOccurred()) +Expect(err == nil).To(Equal(true)) // should be: Expect(err).ToNot(HaveOccurred()) +Expect(err == nil).To(BeFalse()) // should be: Expect(err).To(HaveOccurred()) +Expect(err != nil).To(BeTrue()) // should be: Expect(err).To(HaveOccurred()) +Expect(funcReturnsError()).To(BeNil()) // should be: Expect(funcReturnsError()).To(Succeed()) + +and so on +``` +It also supports the embedded `Not()` matcher; e.g. + +`Ω(err == nil).Should(Not(BeTrue()))` => `Ω(x).Should(HaveOccurred())` + +## Suppress the linter +### Suppress warning from command line +* Use the `--suppress-len-assertion=true` flag to suppress the wrong length assertion warning +* Use the `--suppress-nil-assertion=true` flag to suppress the wrong nil assertion warning +* Use the `--suppress-err-assertion=true` flag to suppress the wrong error assertion warning + +### Suppress warning from the code +To suppress the wrong length assertion warning, add a comment with (only) + +`ginkgo-linter:ignore-len-assert-warning`. + +To suppress the wrong nil assertion warning, add a comment with (only) + +`ginkgo-linter:ignore-nil-assert-warning`. + +To suppress the wrong error assertion warning, add a comment with (only) + +`ginkgo-linter:ignore-err-assert-warning`. + +There are two options to use these comments: +1. If the comment is at the top of the file, supress the warning for the whole file; e.g.: + ```go + package mypackage + + // ginkgo-linter:ignore-len-assert-warning + + import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + ) + + var _ = Describe("my test", func() { + It("should do something", func() { + Expect(len("abc")).Should(Equal(3)) // nothing in this file will trigger the warning + }) + }) + ``` + +2. If the comment is before a wrong length check expression, the warning is suppressed for this expression only; for example: + ```golang + It("should test something", func() { + // ginkgo-linter:ignore-nil-assert-warning + Expect(x == nil).Should(BeTrue()) // this line will not trigger the warning + Expect(x == nil).Should(BeTrue()) // this line will trigger the warning + } + ``` diff --git a/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go new file mode 100644 index 00000000..fc089be1 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go @@ -0,0 +1,662 @@ +package ginkgolinter + +import ( + "bytes" + "flag" + "fmt" + "go/ast" + "go/printer" + "go/token" + gotypes "go/types" + + "github.com/go-toolsmith/astcopy" + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/gomegahandler" + "github.com/nunnatsa/ginkgolinter/reverseassertion" + "github.com/nunnatsa/ginkgolinter/types" +) + +// The ginkgolinter enforces standards of using ginkgo and gomega. +// +// The current checks are: +// * enforce right length assertion - warn for assertion of len(something): +// +// This check finds the following patterns and suggests an alternative +// * Expect(len(something)).To(Equal(number)) ===> Expect(x).To(HaveLen(number)) +// * ExpectWithOffset(1, len(something)).ShouldNot(Equal(0)) ===> ExpectWithOffset(1, something).ShouldNot(BeEmpty()) +// * Ω(len(something)).NotTo(BeZero()) ===> Ω(something).NotTo(BeEmpty()) +// * Expect(len(something)).To(BeNumerically(">", 0)) ===> Expect(something).ToNot(BeEmpty()) +// * Expect(len(something)).To(BeNumerically(">=", 1)) ===> Expect(something).ToNot(BeEmpty()) +// * Expect(len(something)).To(BeNumerically("==", number)) ===> Expect(something).To(HaveLen(number)) +// +// * enforce right nil assertion - warn for assertion of x == nil: +// This check finds the following patterns and suggests an alternative +// * Expect(x == nil).Should(Equal(true)) ===> Expect(x).Should(BeNil()) +// * Expect(nil == x).Should(BeTrue()) ===> Expect(x).Should(BeNil()) +// * Expect(x != nil).Should(Equal(false)) ===> Expect(x).Should(BeNil()) +// * Expect(nil == x).Should(BeFalse()) ===> Expect(x).Should(BeNil()) +// * Expect(x).Should(Equal(nil) // ===> Expect(x).Should(BeNil()) + +const ( + linterName = "ginkgo-linter" + wrongLengthWarningTemplate = linterName + ": wrong length assertion; consider using `%s` instead" + wrongNilWarningTemplate = linterName + ": wrong nil assertion; consider using `%s` instead" + wrongBoolWarningTemplate = linterName + ": wrong boolean assertion; consider using `%s` instead" + wrongErrWarningTemplate = linterName + ": wrong error assertion; consider using `%s` instead" + beEmpty = "BeEmpty" + beNil = "BeNil" + beTrue = "BeTrue" + beFalse = "BeFalse" + equal = "Equal" + not = "Not" + haveLen = "HaveLen" + succeed = "Succeed" + haveOccurred = "HaveOccurred" + expect = "Expect" + omega = "Ω" + expectWithOffset = "ExpectWithOffset" +) + +// Analyzer is the interface to go_vet +var Analyzer = NewAnalyzer() + +type ginkgoLinter struct { + suppress *types.Suppress +} + +// NewAnalyzer returns an Analyzer - the package interface with nogo +func NewAnalyzer() *analysis.Analyzer { + linter := ginkgoLinter{ + suppress: &types.Suppress{ + Len: false, + Nil: false, + Err: false, + }, + } + + a := &analysis.Analyzer{ + Name: "ginkgolinter", + Doc: `enforces standards of using ginkgo and gomega +currently, the linter searches for following: +* wrong length assertions. We want to assert the item rather than its length. +For example: + Expect(len(x)).Should(Equal(1)) +This should be replaced with: + Expect(x)).Should(HavelLen(1)) + +* wrong nil assertions. We want to assert the item rather than a comparison result. +For example: + Expect(x == nil).Should(BeTrue()) +This should be replaced with: + Expect(x).Should(BeNil()) + `, + Run: linter.run, + RunDespiteErrors: true, + } + + a.Flags.Init("ginkgolinter", flag.ExitOnError) + a.Flags.Var(&linter.suppress.Len, "suppress-len-assertion", "Suppress warning for wrong length assertions") + a.Flags.Var(&linter.suppress.Nil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions") + a.Flags.Var(&linter.suppress.Err, "suppress-err-assertion", "Suppress warning for wrong error assertions") + + return a +} + +// main assertion function +func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) { + if l.suppress.AllTrue() { + return nil, nil + } + + for _, file := range pass.Files { + fileSuppress := l.suppress.Clone() + + cm := ast.NewCommentMap(pass.Fset, file, file.Comments) + + fileSuppress.UpdateFromFile(cm) + if fileSuppress.AllTrue() { + continue + } + + handler := gomegahandler.GetGomegaHandler(file) + if handler == nil { // no gomega import => no use in gomega in this file; nothing to do here + continue + } + + ast.Inspect(file, func(n ast.Node) bool { + + stmt, ok := n.(*ast.ExprStmt) + if !ok { + return true + } + + exprSuppress := fileSuppress.Clone() + + if comments, ok := cm[stmt]; ok { + exprSuppress.UpdateFromComment(comments) + } + + // search for function calls + assertionExp, ok := stmt.X.(*ast.CallExpr) + if !ok { + return true + } + + assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr) + if !ok || !isAssertionFunc(assertionFunc.Sel.Name) { + return true + } + + actualArg := getActualArg(assertionFunc, handler) + if actualArg == nil { + return true + } + + return checkExpression(pass, exprSuppress, actualArg, assertionExp, handler) + + }) + } + return nil, nil +} + +func checkExpression(pass *analysis.Pass, exprSuppress types.Suppress, actualArg ast.Expr, assertionExp *ast.CallExpr, handler gomegahandler.Handler) bool { + assertionExp = astcopy.CallExpr(assertionExp) + oldExpr := goFmt(pass.Fset, assertionExp) + if !bool(exprSuppress.Len) && isActualIsLenFunc(actualArg) { + + return checkLengthMatcher(assertionExp, pass, handler, oldExpr) + } else { + if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil { + if isExprError(pass, nilable) { + if exprSuppress.Err { + return true + } + } else if exprSuppress.Nil { + return true + } + + return checkNilMatcher(assertionExp, pass, nilable, handler, compOp == token.NEQ, oldExpr) + + } else if isExprError(pass, actualArg) { + return bool(exprSuppress.Err) || checkNilError(pass, assertionExp, handler, actualArg, oldExpr) + + } else { + return simplifyEqual(pass, exprSuppress, assertionExp, handler, actualArg, oldExpr) + } + } +} + +// Check if the "actual" argument is a call to the golang built-in len() function +func isActualIsLenFunc(actualArg ast.Expr) bool { + lenArgExp, ok := actualArg.(*ast.CallExpr) + if !ok { + return false + } + + lenFunc, ok := lenArgExp.Fun.(*ast.Ident) + return ok && lenFunc.Name == "len" +} + +// Check if matcher function is in one of the patterns we want to avoid +func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, oldExp string) bool { + matcher, ok := exp.Args[0].(*ast.CallExpr) + if !ok { + return true + } + + matcherFuncName, ok := handler.GetActualFuncName(matcher) + if !ok { + return true + } + + switch matcherFuncName { + case equal: + handleEqualMatcher(matcher, pass, exp, handler, oldExp) + return false + + case "BeZero": + handleBeZero(pass, exp, handler, oldExp) + return false + + case "BeNumerically": + return handleBeNumerically(matcher, pass, exp, handler, oldExp) + + case not: + reverseAssertionFuncLogic(exp) + exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] + return checkLengthMatcher(exp, pass, handler, oldExp) + + default: + return true + } +} + +// Check if matcher function is in one of the patterns we want to avoid +func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, oldExp string) bool { + matcher, ok := exp.Args[0].(*ast.CallExpr) + if !ok { + return true + } + + matcherFuncName, ok := handler.GetActualFuncName(matcher) + if !ok { + return true + } + + switch matcherFuncName { + case equal: + handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, oldExp) + + case beTrue: + handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp) + + case beFalse: + reverseAssertionFuncLogic(exp) + handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, oldExp) + + case not: + reverseAssertionFuncLogic(exp) + exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] + return checkNilMatcher(exp, pass, nilable, handler, notEqual, oldExp) + + default: + return true + } + return false +} + +func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string) bool { + if len(assertionExp.Args) == 0 { + return true + } + + equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr) + if !ok { + return true + } + + funcName, ok := handler.GetActualFuncName(equalFuncExpr) + if !ok { + return true + } + + switch funcName { + case beNil: // no additional processing needed. + case equal: + + if len(equalFuncExpr.Args) == 0 { + return true + } + + nilable, ok := equalFuncExpr.Args[0].(*ast.Ident) + if !ok || nilable.Name != "nil" { + return true + } + + case not: + reverseAssertionFuncLogic(assertionExp) + assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0] + return checkNilError(pass, assertionExp, handler, actualArg, oldExpr) + default: + return true + } + + var newFuncName string + if _, ok := actualArg.(*ast.CallExpr); ok { + newFuncName = succeed + } else { + reverseAssertionFuncLogic(assertionExp) + newFuncName = haveOccurred + } + + handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName)) + equalFuncExpr.Args = nil + + report(pass, assertionExp, wrongErrWarningTemplate, oldExpr) + return false +} + +// handle Equal(nil), Equal(true) and Equal(false) +func simplifyEqual(pass *analysis.Pass, exprSuppress types.Suppress, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string) bool { + if len(assertionExp.Args) == 0 { + return true + } + + equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr) + if !ok { + return true + } + + funcName, ok := handler.GetActualFuncName(equalFuncExpr) + if !ok { + return true + } + + switch funcName { + case equal: + if len(equalFuncExpr.Args) == 0 { + return true + } + + token, ok := equalFuncExpr.Args[0].(*ast.Ident) + if !ok { + return true + } + + var replacement string + var template string + switch token.Name { + case "nil": + if exprSuppress.Nil { + return true + } + replacement = beNil + template = wrongNilWarningTemplate + case "true": + replacement = beTrue + template = wrongBoolWarningTemplate + case "false": + replacement = beFalse + template = wrongBoolWarningTemplate + default: + return true + } + + handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement)) + equalFuncExpr.Args = nil + + report(pass, assertionExp, template, oldExpr) + + return false + + case not: + reverseAssertionFuncLogic(assertionExp) + assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0] + return simplifyEqual(pass, exprSuppress, assertionExp, handler, actualArg, oldExpr) + default: + return true + } +} + +// checks that the function is an assertion's actual function and return the "actual" parameter. If the function +// is not assertion's actual function, return nil. +func getActualArg(assertionFunc *ast.SelectorExpr, handler gomegahandler.Handler) ast.Expr { + actualExpr, ok := assertionFunc.X.(*ast.CallExpr) + if !ok { + return nil + } + + funcName, ok := handler.GetActualFuncName(actualExpr) + if !ok { + return nil + } + + switch funcName { + case expect, omega: + return actualExpr.Args[0] + case expectWithOffset: + return actualExpr.Args[1] + default: + return nil + } +} + +// Replace the len function call by its parameter, to create a fix suggestion +func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) { + name, ok := handler.GetActualFuncName(actualExpr) + if !ok { + return + } + + switch name { + case expect, omega: + arg := actualExpr.Args[0] + if isActualIsLenFunc(arg) { + // replace the len function call by its parameter, to create a fix suggestion + actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0] + } + case expectWithOffset: + arg := actualExpr.Args[1] + if isActualIsLenFunc(arg) { + // replace the len function call by its parameter, to create a fix suggestion + actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0] + } + } +} + +// Replace the nil comparison with the compared object, to create a fix suggestion +func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr) bool { + actualFuncName, ok := handler.GetActualFuncName(actualExpr) + if !ok { + return false + } + + switch actualFuncName { + case expect, omega: + actualExpr.Args[0] = nilable + return true + + case expectWithOffset: + actualExpr.Args[1] = nilable + return true + + default: + return false + } +} + +// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number +func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) bool { + opExp, ok1 := matcher.Args[0].(*ast.BasicLit) + valExp, ok2 := matcher.Args[1].(*ast.BasicLit) + + if ok1 && ok2 { + op := opExp.Value + val := valExp.Value + + if (op == `">"` && val == "0") || (op == `">="` && val == "1") { + reverseAssertionFuncLogic(exp) + handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) + exp.Args[0].(*ast.CallExpr).Args = nil + reportLengthAssertion(pass, exp, handler, oldExp) + return false + } else if op == `"=="` { + chooseNumericMatcher(exp, handler, valExp) + reportLengthAssertion(pass, exp, handler, oldExp) + + return false + } else if op == `"!="` { + reverseAssertionFuncLogic(exp) + chooseNumericMatcher(exp, handler, valExp) + reportLengthAssertion(pass, exp, handler, oldExp) + + return false + } + } + return true +} + +func chooseNumericMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp *ast.BasicLit) { + caller := exp.Args[0].(*ast.CallExpr) + if valExp.Value == "0" { + handler.ReplaceFunction(caller, ast.NewIdent(beEmpty)) + exp.Args[0].(*ast.CallExpr).Args = nil + } else { + handler.ReplaceFunction(caller, ast.NewIdent(haveLen)) + exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp} + } +} + +func reverseAssertionFuncLogic(exp *ast.CallExpr) { + assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel + assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name) +} + +func handleEqualMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) { + equalTo, ok := matcher.Args[0].(*ast.BasicLit) + if ok { + chooseNumericMatcher(exp, handler, equalTo) + } else { + handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen)) + exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]} + } + reportLengthAssertion(pass, exp, handler, oldExp) +} + +func handleBeZero(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) { + exp.Args[0].(*ast.CallExpr).Args = nil + handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) + reportLengthAssertion(pass, exp, handler, oldExp) +} + +func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) { + equalTo, ok := matcher.Args[0].(*ast.Ident) + if !ok { + return + } + + if equalTo.Name == "false" { + reverseAssertionFuncLogic(exp) + } else if equalTo.Name != "true" { + return + } + + newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) + + handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) + exp.Args[0].(*ast.CallExpr).Args = nil + + reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError) +} + +func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExp string) { + newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) + handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) + exp.Args[0].(*ast.CallExpr).Args = nil + + reportNilAssertion(pass, exp, handler, nilable, notEqual, oldExp, isItError) +} + +func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) { + newFuncName := beNil + isItError := isExprError(pass, nilable) + if isItError { + if _, ok := nilable.(*ast.CallExpr); ok { + newFuncName = succeed + } else { + reverseAssertionFuncLogic(exp) + newFuncName = haveOccurred + } + } + + return newFuncName, isItError +} +func isAssertionFunc(name string) bool { + switch name { + case "To", "ToNot", "NotTo", "Should", "ShouldNot": + return true + } + return false +} + +func reportLengthAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) { + replaceLenActualArg(expr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr), handler) + + report(pass, expr, wrongLengthWarningTemplate, oldExpr) +} + +func reportNilAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExpr string, isItError bool) { + changed := replaceNilActualArg(expr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr), handler, nilable) + if !changed { + return + } + + if notEqual { + reverseAssertionFuncLogic(expr) + } + template := wrongNilWarningTemplate + if isItError { + template = wrongErrWarningTemplate + } + + report(pass, expr, template, oldExpr) +} + +func report(pass *analysis.Pass, expr *ast.CallExpr, messageTemplate, oldExpr string) { + newExp := goFmt(pass.Fset, expr) + pass.Report(analysis.Diagnostic{ + Pos: expr.Pos(), + Message: fmt.Sprintf(messageTemplate, newExp), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: fmt.Sprintf("should replace %s with %s", oldExpr, newExp), + TextEdits: []analysis.TextEdit{ + { + Pos: expr.Pos(), + End: expr.End(), + NewText: []byte(newExp), + }, + }, + }, + }, + }) +} + +func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) { + bin, ok := actualArg.(*ast.BinaryExpr) + if !ok { + return nil, token.ILLEGAL + } + + if bin.Op == token.EQL || bin.Op == token.NEQ { + if isNil(bin.Y) { + return bin.X, bin.Op + } else if isNil(bin.X) { + return bin.Y, bin.Op + } + } + + return nil, token.ILLEGAL +} + +func isNil(expr ast.Expr) bool { + nilObject, ok := expr.(*ast.Ident) + return ok && nilObject.Name == "nil" && nilObject.Obj == nil +} + +func goFmt(fset *token.FileSet, x ast.Expr) string { + var b bytes.Buffer + _ = printer.Fprint(&b, fset, x) + return b.String() +} + +var errorType *gotypes.Interface + +func init() { + errorType = gotypes.Universe.Lookup("error").Type().Underlying().(*gotypes.Interface) +} + +func isError(t gotypes.Type) bool { + return gotypes.Implements(t, errorType) +} + +func isExprError(pass *analysis.Pass, expr ast.Expr) bool { + actualArgType := pass.TypesInfo.TypeOf(expr) + switch t := actualArgType.(type) { + case *gotypes.Named: + if isError(actualArgType) { + return true + } + case *gotypes.Tuple: + if t.Len() > 0 { + switch t0 := t.At(0).Type().(type) { + case *gotypes.Named, *gotypes.Pointer: + if isError(t0) { + return true + } + } + } + } + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go new file mode 100644 index 00000000..d57ba291 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go @@ -0,0 +1,167 @@ +package gomegahandler + +import ( + "go/ast" + "go/token" +) + +// Handler provide different handling, depend on the way gomega was imported, whether +// in imported with "." name, custom name or without any name. +type Handler interface { + // GetActualFuncName returns the name of the gomega function, e.g. `Expect` + GetActualFuncName(*ast.CallExpr) (string, bool) + // ReplaceFunction replaces the function with another one, for fix suggestions + ReplaceFunction(*ast.CallExpr, *ast.Ident) + + getDefFuncName(expr *ast.CallExpr) string + + getFieldType(field *ast.Field) string +} + +// GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file +func GetGomegaHandler(file *ast.File) Handler { + for _, imp := range file.Imports { + if imp.Path.Value != `"github.com/onsi/gomega"` { + continue + } + + switch name := imp.Name.String(); { + case name == ".": + return dotHandler{} + case name == "": // import with no local name + return nameHandler("gomega") + default: + return nameHandler(name) + } + } + + return nil // no gomega import; this file does not use gomega +} + +// dotHandler is used when importing gomega with dot; i.e. +// import . "github.com/onsi/gomega" +type dotHandler struct{} + +// GetActualFuncName returns the name of the gomega function, e.g. `Expect` +func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { + switch actualFunc := expr.Fun.(type) { + case *ast.Ident: + return actualFunc.Name, true + case *ast.SelectorExpr: + if isGomegaVar(actualFunc.X, h) { + return actualFunc.Sel.Name, true + } + } + return "", false +} + +// ReplaceFunction replaces the function with another one, for fix suggestions +func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { + caller.Fun = newExpr +} + +func (dotHandler) getDefFuncName(expr *ast.CallExpr) string { + if f, ok := expr.Fun.(*ast.Ident); ok { + return f.Name + } + return "" +} + +func (dotHandler) getFieldType(field *ast.Field) string { + switch t := field.Type.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + if name, ok := t.X.(*ast.Ident); ok { + return name.Name + } + } + return "" +} + +// nameHandler is used when importing gomega without name; i.e. +// import "github.com/onsi/gomega" +// +// or with a custom name; e.g. +// import customname "github.com/onsi/gomega" +type nameHandler string + +// GetActualFuncName returns the name of the gomega function, e.g. `Expect` +func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { + selector, ok := expr.Fun.(*ast.SelectorExpr) + if !ok { + return "", false + } + + x, ok := selector.X.(*ast.Ident) + if !ok { + return "", false + } + + if x.Name != string(g) { + if !isGomegaVar(x, g) { + return "", false + } + } + + return selector.Sel.Name, true +} + +// ReplaceFunction replaces the function with another one, for fix suggestions +func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { + caller.Fun.(*ast.SelectorExpr).Sel = newExpr +} + +func (g nameHandler) getDefFuncName(expr *ast.CallExpr) string { + if sel, ok := expr.Fun.(*ast.SelectorExpr); ok { + if f, ok := sel.X.(*ast.Ident); ok && f.Name == string(g) { + return sel.Sel.Name + } + } + return "" +} + +func (g nameHandler) getFieldType(field *ast.Field) string { + switch t := field.Type.(type) { + case *ast.SelectorExpr: + if id, ok := t.X.(*ast.Ident); ok { + if id.Name == string(g) { + return t.Sel.Name + } + } + case *ast.StarExpr: + if sel, ok := t.X.(*ast.SelectorExpr); ok { + if x, ok := sel.X.(*ast.Ident); ok && x.Name == string(g) { + return sel.Sel.Name + } + } + + } + return "" +} + +func isGomegaVar(x ast.Expr, handler Handler) bool { + if i, ok := x.(*ast.Ident); ok { + if i.Obj != nil && i.Obj.Kind == ast.Var { + switch decl := i.Obj.Decl.(type) { + case *ast.AssignStmt: + if decl.Tok == token.DEFINE { + if defFunc, ok := decl.Rhs[0].(*ast.CallExpr); ok { + fName := handler.getDefFuncName(defFunc) + switch fName { + case "NewGomega", "NewWithT", "NewGomegaWithT": + return true + } + } + } + case *ast.Field: + name := handler.getFieldType(decl) + switch name { + case "Gomega", "WithT", "GomegaWithT": + return true + } + } + } + } + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go b/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go new file mode 100644 index 00000000..42c914e5 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go @@ -0,0 +1,17 @@ +package reverseassertion + +var reverseLogicAssertions = map[string]string{ + "To": "ToNot", + "ToNot": "To", + "NotTo": "To", + "Should": "ShouldNot", + "ShouldNot": "Should", +} + +// ChangeAssertionLogic get gomega assertion function name, and returns the reverse logic function name +func ChangeAssertionLogic(funcName string) string { + if revFunc, ok := reverseLogicAssertions[funcName]; ok { + return revFunc + } + return funcName +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go b/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go new file mode 100644 index 00000000..be510c4e --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go @@ -0,0 +1,32 @@ +package types + +import ( + "errors" + "strings" +) + +// Boolean is a bool, implementing the flag.Value interface, to be used as a flag var. +type Boolean bool + +func (b *Boolean) Set(value string) error { + if b == nil { + return errors.New("trying to set nil parameter") + } + switch strings.ToLower(value) { + case "true": + *b = true + case "false": + *b = false + default: + return errors.New(value + " is not a Boolean value") + + } + return nil +} + +func (b Boolean) String() string { + if b { + return "true" + } + return "false" +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go b/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go new file mode 100644 index 00000000..a703eb2d --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go @@ -0,0 +1,67 @@ +package types + +import ( + "strings" + + "go/ast" +) + +const ( + suppressPrefix = "ginkgo-linter:" + suppressLengthAssertionWarning = suppressPrefix + "ignore-len-assert-warning" + suppressNilAssertionWarning = suppressPrefix + "ignore-nil-assert-warning" + suppressErrAssertionWarning = suppressPrefix + "ignore-err-assert-warning" +) + +type Suppress struct { + Len Boolean + Nil Boolean + Err Boolean +} + +func (s Suppress) AllTrue() bool { + return bool(s.Len && s.Nil && s.Err) +} + +func (s Suppress) Clone() Suppress { + return Suppress{ + Len: s.Len, + Nil: s.Nil, + Err: s.Err, + } +} + +func (s *Suppress) UpdateFromComment(commentGroup []*ast.CommentGroup) { + for _, cmntList := range commentGroup { + if s.AllTrue() { + break + } + + for _, cmnt := range cmntList.List { + commentLines := strings.Split(cmnt.Text, "\n") + for _, comment := range commentLines { + comment = strings.TrimPrefix(comment, "//") + comment = strings.TrimPrefix(comment, "/*") + comment = strings.TrimSuffix(comment, "*/") + comment = strings.TrimSpace(comment) + + s.Len = s.Len || (comment == suppressLengthAssertionWarning) + s.Nil = s.Nil || (comment == suppressNilAssertionWarning) + s.Err = s.Err || (comment == suppressErrAssertionWarning) + } + } + } +} + +func (s *Suppress) UpdateFromFile(cm ast.CommentMap) { + + for key, commentGroup := range cm { + if s.AllTrue() { + break + } + + if _, ok := key.(*ast.GenDecl); ok { + s.UpdateFromComment(commentGroup) + } + } +} diff --git a/vendor/github.com/phayes/checkstyle/.scrutinizer.yml b/vendor/github.com/phayes/checkstyle/.scrutinizer.yml deleted file mode 100644 index d9284b6b..00000000 --- a/vendor/github.com/phayes/checkstyle/.scrutinizer.yml +++ /dev/null @@ -1,15 +0,0 @@ -build: - dependencies: - before: - - 'source <(curl -fsSL https://raw.githubusercontent.com/phayes/go-scrutinize/master/install-golang)' - - tests: - override: - - - command: 'cd $PROJECTPATH && go-scrutinize' - coverage: - file: 'coverage.xml' - format: 'clover' - analysis: - file: 'checkstyle_report.xml' - format: 'general-checkstyle' \ No newline at end of file diff --git a/vendor/github.com/phayes/checkstyle/LICENSE b/vendor/github.com/phayes/checkstyle/LICENSE deleted file mode 100644 index 6dc912f3..00000000 --- a/vendor/github.com/phayes/checkstyle/LICENSE +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2017, Patrick D Hayes -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/phayes/checkstyle/README.md b/vendor/github.com/phayes/checkstyle/README.md deleted file mode 100644 index 358cf675..00000000 --- a/vendor/github.com/phayes/checkstyle/README.md +++ /dev/null @@ -1,44 +0,0 @@ -# checkstyle -[![GoDoc](https://godoc.org/github.com/phayes/checkstyle?status.svg)](https://godoc.org/github.com/phayes/checkstyle) -[![Go Report Card](https://goreportcard.com/badge/github.com/phayes/checkstyle)](https://goreportcard.com/report/github.com/phayes/checkstyle) -[![Build Status](https://scrutinizer-ci.com/g/phayes/checkstyle/badges/build.png?b=master)](https://scrutinizer-ci.com/g/phayes/checkstyle/build-status/master) - -Read and write checksyle_report.xml files with golang - -Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. - -Example usage: - -```go - -import "github.com/phayes/checkstyle" - -// Print XML into human readable format -checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") -if err != nil { - log.Fatal(err) -} -for _, file := range checkStyle.File { - fmt.Println(File.Name) - for _, codingError := range file.Error { - fmt.Println("\t", codingError.Line, codingError.Message) - } -} - -// Create a new XML file from scratch -check := checkstyle.New() - -// Ensure that a file has been added -file := check.EnsureFile("/path/to/file") - -// Create an error on line 10 -codingError := checkstyle.NewError(10, "format", "line must end with a full stop") - -// Add the error to the file -file.AddError(codingError) - -// Output XML -fmt.Print(check) -``` - -For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html diff --git a/vendor/github.com/phayes/checkstyle/checkstyle.go b/vendor/github.com/phayes/checkstyle/checkstyle.go deleted file mode 100644 index cabbd4b4..00000000 --- a/vendor/github.com/phayes/checkstyle/checkstyle.go +++ /dev/null @@ -1,112 +0,0 @@ -package checkstyle - -import "encoding/xml" -import "io/ioutil" - -// DefaultCheckStyleVersion defines the default "version" attribute on "" lememnt -var DefaultCheckStyleVersion = "1.0.0" - -// Severity defines a checkstyle severity code -type Severity string - -var ( - SeverityError Severity = "error" - SeverityInfo Severity = "info" - SeverityWarning Severity = "warning" - SeverityIgnore Severity = "ignore" - SeverityNone Severity -) - -// CheckStyle represents a xml element found in a checkstyle_report.xml file. -type CheckStyle struct { - XMLName xml.Name `xml:"checkstyle"` - Version string `xml:"version,attr"` - File []*File `xml:"file"` -} - -// AddFile adds a checkstyle.File with the given filename. -func (cs *CheckStyle) AddFile(csf *File) { - cs.File = append(cs.File, csf) -} - -// GetFile gets a CheckStyleFile with the given filename. -func (cs *CheckStyle) GetFile(filename string) (csf *File, ok bool) { - for _, file := range cs.File { - if file.Name == filename { - csf = file - ok = true - return - } - } - return -} - -// EnsureFile ensures that a CheckStyleFile with the given name exists -// Returns either an exiting CheckStyleFile (if a file with that name exists) -// or a new CheckStyleFile (if a file with that name does not exists) -func (cs *CheckStyle) EnsureFile(filename string) (csf *File) { - csf, ok := cs.GetFile(filename) - if !ok { - csf = NewFile(filename) - cs.AddFile(csf) - } - return csf -} - -// String implements Stringer. Returns as xml. -func (cs *CheckStyle) String() string { - checkStyleXML, err := xml.Marshal(cs) - if err != nil { - panic(err) - } - return string(checkStyleXML) -} - -// New returns a new CheckStyle -func New() *CheckStyle { - return &CheckStyle{Version: DefaultCheckStyleVersion, File: []*File{}} -} - -// File represents a xml element. -type File struct { - XMLName xml.Name `xml:"file"` - Name string `xml:"name,attr"` - Error []*Error `xml:"error"` -} - -// AddError adds a checkstyle.Error to the file. -func (csf *File) AddError(cse *Error) { - csf.Error = append(csf.Error, cse) -} - -// NewFile creates a new checkstyle.File -func NewFile(filename string) *File { - return &File{Name: filename, Error: []*Error{}} -} - -// Error represents a xml element -type Error struct { - XMLName xml.Name `xml:"error"` - Line int `xml:"line,attr"` - Column int `xml:"column,attr,omitempty"` - Severity Severity `xml:"severity,attr,omitempty"` - Message string `xml:"message,attr"` - Source string `xml:"source,attr"` -} - -// NewError creates a new checkstyle.Error -// Note that line starts at 0, and column starts at 1 -func NewError(line int, column int, severity Severity, message string, source string) *Error { - return &Error{Line: line, Column: column, Severity: severity, Message: message, Source: source} -} - -// ReadFile reads a checkfile.xml file and returns a CheckStyle object. -func ReadFile(filename string) (*CheckStyle, error) { - checkStyleXML, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - checkStyle := New() - err = xml.Unmarshal(checkStyleXML, checkStyle) - return checkStyle, err -} diff --git a/vendor/github.com/phayes/checkstyle/godoc.go b/vendor/github.com/phayes/checkstyle/godoc.go deleted file mode 100644 index c9662fe9..00000000 --- a/vendor/github.com/phayes/checkstyle/godoc.go +++ /dev/null @@ -1,36 +0,0 @@ -/* -Package checkstyle allows the parsing of generation of checkstyle XML files. - -Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. - -Example usage: - // Print XML into human readable format - checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") - if err != nil { - log.Fatal(err) - } - for _, file := range checkStyle.File { - fmt.Println(File.Name) - for _, codingError := range file.Error { - fmt.Println("\t", codingError.Line, codingError.Message) - } - } - - // Create a new XML file from scratch - check := checkstyle.New() - - // Ensure that a file has been added - file := check.EnsureFile("/path/to/file") - - // Create an error on line 10, column 5 - codingError := checkstyle.NewError(10, 5, checkstyle.SeverityWarning, "format", "line must end with a full stop") - - // Add the error to the file - file.AddError(codingError) - - // Output XML - fmt.Print(check) - -For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html -*/ -package checkstyle diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go index 7fe4c38c..c9dcf5e5 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go @@ -40,6 +40,7 @@ var allowedErrors = []struct { {err: "io.ErrClosedPipe", fun: "(*io.PipeWriter).Write"}, {err: "io.ErrShortBuffer", fun: "io.ReadAtLeast"}, {err: "io.ErrUnexpectedEOF", fun: "io.ReadAtLeast"}, + {err: "io.EOF", fun: "io.ReadFull"}, {err: "io.ErrUnexpectedEOF", fun: "io.ReadFull"}, // pkg/net/http {err: "http.ErrServerClosed", fun: "(*net/http.Server).ListenAndServe"}, diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go index fb065ced..b9ebe6ef 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -41,9 +41,9 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { continue } - // For any arguments that are errors, check whether the wrapping verb - // is used. Only one %w verb may be used in a single format string at a - // time, so we stop after finding a correct %w. + // For any arguments that are errors, check whether the wrapping verb is used. %w may occur + // for multiple errors in one Errorf invocation. We raise an issue if at least one error + // does not have a corresponding wrapping verb. var lintArg ast.Expr args := call.Args[1:] for i := 0; i < len(args) && i < len(formatVerbs); i++ { @@ -52,12 +52,12 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { } if formatVerbs[i] == "w" { - lintArg = nil - break + continue } if lintArg == nil { lintArg = args[i] + break } } if lintArg != nil { @@ -85,8 +85,8 @@ func isErrorStringCall(info types.Info, expr ast.Expr) bool { } // printfFormatStringVerbs returns a normalized list of all the verbs that are used per argument to -// the printf function. The index of each returned element corresponds to index of the respective -// argument. +// the printf function. The index of each returned element corresponds to the index of the +// respective argument. func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, bool) { if len(call.Args) <= 1 { return nil, false diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go index a5e6ca4d..88feef92 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go @@ -131,6 +131,7 @@ func (e *engine) Run(ctx *RunContext, buildContext *build.Context, f *ast.File) } // engineState is a shared state inside the engine. +// Its access is synchronized, unlike the RunnerState which should be thread-local. type engineState struct { env *quasigo.Env diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go index 604ae4a1..7320ab7f 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go @@ -23,9 +23,16 @@ func filterFailure(reason string) matchFilterResult { return matchFilterResult(reason) } -func exprListFilterApply(src string, list gogrep.ExprSlice, fn func(ast.Expr) bool) matchFilterResult { - for i := 0; i < list.Len(); i++ { - if !fn(list.At(i).(ast.Expr)) { +func asExprSlice(x ast.Node) *gogrep.NodeSlice { + if x, ok := x.(*gogrep.NodeSlice); ok && x.Kind == gogrep.ExprNodeSlice { + return x + } + return nil +} + +func exprListFilterApply(src string, list []ast.Expr, fn func(ast.Expr) bool) matchFilterResult { + for _, e := range list { + if !fn(e) { return filterFailure(src) } } @@ -99,12 +106,11 @@ func makeFileNameMatchesFilter(src string, re textmatch.Pattern) filterFunc { func makePureFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return isPure(params.ctx.Types, x) }) } - n := params.subExpr(varname) if isPure(params.ctx.Types, n) { return filterSuccess @@ -115,8 +121,8 @@ func makePureFilter(src, varname string) filterFunc { func makeConstFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return isConstant(params.ctx.Types, x) }) } @@ -131,8 +137,8 @@ func makeConstFilter(src, varname string) filterFunc { func makeConstSliceFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return isConstantSlice(params.ctx.Types, x) }) } @@ -147,8 +153,8 @@ func makeConstSliceFilter(src, varname string) filterFunc { func makeAddressableFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return isAddressable(params.ctx.Types, x) }) } @@ -163,8 +169,8 @@ func makeAddressableFilter(src, varname string) filterFunc { func makeComparableFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return types.Comparable(params.typeofNode(x)) }) } @@ -212,8 +218,8 @@ func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc { func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return xtypes.Implements(params.typeofNode(x), iface) }) } @@ -322,8 +328,8 @@ func makeRootSinkTypeIsFilter(src string, pat *typematch.Pattern) filterFunc { func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc { if underlying { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return pat.MatchIdentical(params.typematchState, params.typeofNode(x).Underlying()) }) } @@ -336,8 +342,8 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte } return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return pat.MatchIdentical(params.typematchState, params.typeofNode(x)) }) } @@ -351,8 +357,8 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return types.ConvertibleTo(params.typeofNode(x), dstType) }) } @@ -367,8 +373,8 @@ func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filter func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { return types.AssignableTo(params.typeofNode(x), dstType) }) } @@ -395,6 +401,28 @@ func makeLineFilter(src, varname string, op token.Token, rhsVarname string) filt } } +func makeObjectIsVariadicParamFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + if params.currentFunc == nil { + return filterFailure(src) + } + funcObj, ok := params.ctx.Types.ObjectOf(params.currentFunc.Name).(*types.Func) + if !ok { + return filterFailure(src) + } + funcSig := funcObj.Type().(*types.Signature) + if !funcSig.Variadic() { + return filterFailure(src) + } + paramObj := funcSig.Params().At(funcSig.Params().Len() - 1) + obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname))) + if paramObj != obj { + return filterFailure(src) + } + return filterSuccess + } +} + func makeObjectIsGlobalFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname))) @@ -433,15 +461,21 @@ func makeLineConstFilter(src, varname string, op token.Token, rhsValue constant. func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { typ := params.typeofNode(x) + if isTypeParam(typ) { + return false + } lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ)) return constant.Compare(lhsValue, op, rhsValue) }) } typ := params.typeofNode(params.subExpr(varname)) + if isTypeParam(typ) { + return filterFailure(src) + } lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ)) if constant.Compare(lhsValue, op, rhsValue) { return filterSuccess @@ -453,8 +487,11 @@ func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue const func makeTypeSizeFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { return func(params *filterParams) matchFilterResult { lhsTyp := params.typeofNode(params.subExpr(varname)) - lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(lhsTyp)) rhsTyp := params.typeofNode(params.subExpr(rhsVarname)) + if isTypeParam(lhsTyp) || isTypeParam(rhsTyp) { + return filterFailure(src) + } + lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(lhsTyp)) rhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(rhsTyp)) if constant.Compare(lhsValue, op, rhsValue) { return filterSuccess @@ -465,8 +502,8 @@ func makeTypeSizeFilter(src, varname string, op token.Token, rhsVarname string) func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { lhsValue := intValueOf(params.ctx.Types, x) return lhsValue != nil && constant.Compare(lhsValue, op, rhsValue) }) @@ -606,8 +643,8 @@ func makeObjectIsFilter(src, varname, objectName string) filterFunc { } return func(params *filterParams) matchFilterResult { - if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { - return exprListFilterApply(src, list, func(x ast.Expr) bool { + if list := asExprSlice(params.subNode(varname)); list != nil { + return exprListFilterApply(src, list.GetExprSlice(), func(x ast.Expr) bool { ident := identOf(x) return ident != nil && predicate(params.ctx.Types.ObjectOf(ident)) }) diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go index c9401c02..bc2a5ee5 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go @@ -88,189 +88,195 @@ const ( // $Value type: string FilterVarObjectIsGlobalOp FilterOp = 23 + // m[$Value].Object.IsVariadicParam() + // $Value type: string + FilterVarObjectIsVariadicParamOp FilterOp = 24 + // m[$Value].Type.Is($Args[0]) // $Value type: string - FilterVarTypeIsOp FilterOp = 24 + FilterVarTypeIsOp FilterOp = 25 // m[$Value].Type.IdenticalTo($Args[0]) // $Value type: string - FilterVarTypeIdenticalToOp FilterOp = 25 + FilterVarTypeIdenticalToOp FilterOp = 26 // m[$Value].Type.Underlying().Is($Args[0]) // $Value type: string - FilterVarTypeUnderlyingIsOp FilterOp = 26 + FilterVarTypeUnderlyingIsOp FilterOp = 27 // m[$Value].Type.OfKind($Args[0]) // $Value type: string - FilterVarTypeOfKindOp FilterOp = 27 + FilterVarTypeOfKindOp FilterOp = 28 // m[$Value].Type.Underlying().OfKind($Args[0]) // $Value type: string - FilterVarTypeUnderlyingOfKindOp FilterOp = 28 + FilterVarTypeUnderlyingOfKindOp FilterOp = 29 // m[$Value].Type.ConvertibleTo($Args[0]) // $Value type: string - FilterVarTypeConvertibleToOp FilterOp = 29 + FilterVarTypeConvertibleToOp FilterOp = 30 // m[$Value].Type.AssignableTo($Args[0]) // $Value type: string - FilterVarTypeAssignableToOp FilterOp = 30 + FilterVarTypeAssignableToOp FilterOp = 31 // m[$Value].Type.Implements($Args[0]) // $Value type: string - FilterVarTypeImplementsOp FilterOp = 31 + FilterVarTypeImplementsOp FilterOp = 32 // m[$Value].Type.HasMethod($Args[0]) // $Value type: string - FilterVarTypeHasMethodOp FilterOp = 32 + FilterVarTypeHasMethodOp FilterOp = 33 // m[$Value].Text.Matches($Args[0]) // $Value type: string - FilterVarTextMatchesOp FilterOp = 33 + FilterVarTextMatchesOp FilterOp = 34 // m[$Value].Contains($Args[0]) // $Value type: string - FilterVarContainsOp FilterOp = 34 + FilterVarContainsOp FilterOp = 35 // m.Deadcode() - FilterDeadcodeOp FilterOp = 35 + FilterDeadcodeOp FilterOp = 36 // m.GoVersion().Eq($Value) // $Value type: string - FilterGoVersionEqOp FilterOp = 36 + FilterGoVersionEqOp FilterOp = 37 // m.GoVersion().LessThan($Value) // $Value type: string - FilterGoVersionLessThanOp FilterOp = 37 + FilterGoVersionLessThanOp FilterOp = 38 // m.GoVersion().GreaterThan($Value) // $Value type: string - FilterGoVersionGreaterThanOp FilterOp = 38 + FilterGoVersionGreaterThanOp FilterOp = 39 // m.GoVersion().LessEqThan($Value) // $Value type: string - FilterGoVersionLessEqThanOp FilterOp = 39 + FilterGoVersionLessEqThanOp FilterOp = 40 // m.GoVersion().GreaterEqThan($Value) // $Value type: string - FilterGoVersionGreaterEqThanOp FilterOp = 40 + FilterGoVersionGreaterEqThanOp FilterOp = 41 // m.File.Imports($Value) // $Value type: string - FilterFileImportsOp FilterOp = 41 + FilterFileImportsOp FilterOp = 42 // m.File.PkgPath.Matches($Value) // $Value type: string - FilterFilePkgPathMatchesOp FilterOp = 42 + FilterFilePkgPathMatchesOp FilterOp = 43 // m.File.Name.Matches($Value) // $Value type: string - FilterFileNameMatchesOp FilterOp = 43 + FilterFileNameMatchesOp FilterOp = 44 // $Value holds a function name // $Value type: string - FilterFilterFuncRefOp FilterOp = 44 + FilterFilterFuncRefOp FilterOp = 45 // $Value holds a string constant // $Value type: string - FilterStringOp FilterOp = 45 + FilterStringOp FilterOp = 46 // $Value holds an int64 constant // $Value type: int64 - FilterIntOp FilterOp = 46 + FilterIntOp FilterOp = 47 // m[`$$`].Node.Parent().Is($Args[0]) - FilterRootNodeParentIsOp FilterOp = 47 + FilterRootNodeParentIsOp FilterOp = 48 // m[`$$`].SinkType.Is($Args[0]) - FilterRootSinkTypeIsOp FilterOp = 48 + FilterRootSinkTypeIsOp FilterOp = 49 ) var filterOpNames = map[FilterOp]string{ - FilterInvalidOp: `Invalid`, - FilterNotOp: `Not`, - FilterAndOp: `And`, - FilterOrOp: `Or`, - FilterEqOp: `Eq`, - FilterNeqOp: `Neq`, - FilterGtOp: `Gt`, - FilterLtOp: `Lt`, - FilterGtEqOp: `GtEq`, - FilterLtEqOp: `LtEq`, - FilterVarAddressableOp: `VarAddressable`, - FilterVarComparableOp: `VarComparable`, - FilterVarPureOp: `VarPure`, - FilterVarConstOp: `VarConst`, - FilterVarConstSliceOp: `VarConstSlice`, - FilterVarTextOp: `VarText`, - FilterVarLineOp: `VarLine`, - FilterVarValueIntOp: `VarValueInt`, - FilterVarTypeSizeOp: `VarTypeSize`, - FilterVarTypeHasPointersOp: `VarTypeHasPointers`, - FilterVarFilterOp: `VarFilter`, - FilterVarNodeIsOp: `VarNodeIs`, - FilterVarObjectIsOp: `VarObjectIs`, - FilterVarObjectIsGlobalOp: `VarObjectIsGlobal`, - FilterVarTypeIsOp: `VarTypeIs`, - FilterVarTypeIdenticalToOp: `VarTypeIdenticalTo`, - FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, - FilterVarTypeOfKindOp: `VarTypeOfKind`, - FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`, - FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, - FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, - FilterVarTypeImplementsOp: `VarTypeImplements`, - FilterVarTypeHasMethodOp: `VarTypeHasMethod`, - FilterVarTextMatchesOp: `VarTextMatches`, - FilterVarContainsOp: `VarContains`, - FilterDeadcodeOp: `Deadcode`, - FilterGoVersionEqOp: `GoVersionEq`, - FilterGoVersionLessThanOp: `GoVersionLessThan`, - FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`, - FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`, - FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`, - FilterFileImportsOp: `FileImports`, - FilterFilePkgPathMatchesOp: `FilePkgPathMatches`, - FilterFileNameMatchesOp: `FileNameMatches`, - FilterFilterFuncRefOp: `FilterFuncRef`, - FilterStringOp: `String`, - FilterIntOp: `Int`, - FilterRootNodeParentIsOp: `RootNodeParentIs`, - FilterRootSinkTypeIsOp: `RootSinkTypeIs`, + FilterInvalidOp: `Invalid`, + FilterNotOp: `Not`, + FilterAndOp: `And`, + FilterOrOp: `Or`, + FilterEqOp: `Eq`, + FilterNeqOp: `Neq`, + FilterGtOp: `Gt`, + FilterLtOp: `Lt`, + FilterGtEqOp: `GtEq`, + FilterLtEqOp: `LtEq`, + FilterVarAddressableOp: `VarAddressable`, + FilterVarComparableOp: `VarComparable`, + FilterVarPureOp: `VarPure`, + FilterVarConstOp: `VarConst`, + FilterVarConstSliceOp: `VarConstSlice`, + FilterVarTextOp: `VarText`, + FilterVarLineOp: `VarLine`, + FilterVarValueIntOp: `VarValueInt`, + FilterVarTypeSizeOp: `VarTypeSize`, + FilterVarTypeHasPointersOp: `VarTypeHasPointers`, + FilterVarFilterOp: `VarFilter`, + FilterVarNodeIsOp: `VarNodeIs`, + FilterVarObjectIsOp: `VarObjectIs`, + FilterVarObjectIsGlobalOp: `VarObjectIsGlobal`, + FilterVarObjectIsVariadicParamOp: `VarObjectIsVariadicParam`, + FilterVarTypeIsOp: `VarTypeIs`, + FilterVarTypeIdenticalToOp: `VarTypeIdenticalTo`, + FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, + FilterVarTypeOfKindOp: `VarTypeOfKind`, + FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`, + FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, + FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, + FilterVarTypeImplementsOp: `VarTypeImplements`, + FilterVarTypeHasMethodOp: `VarTypeHasMethod`, + FilterVarTextMatchesOp: `VarTextMatches`, + FilterVarContainsOp: `VarContains`, + FilterDeadcodeOp: `Deadcode`, + FilterGoVersionEqOp: `GoVersionEq`, + FilterGoVersionLessThanOp: `GoVersionLessThan`, + FilterGoVersionGreaterThanOp: `GoVersionGreaterThan`, + FilterGoVersionLessEqThanOp: `GoVersionLessEqThan`, + FilterGoVersionGreaterEqThanOp: `GoVersionGreaterEqThan`, + FilterFileImportsOp: `FileImports`, + FilterFilePkgPathMatchesOp: `FilePkgPathMatches`, + FilterFileNameMatchesOp: `FileNameMatches`, + FilterFilterFuncRefOp: `FilterFuncRef`, + FilterStringOp: `String`, + FilterIntOp: `Int`, + FilterRootNodeParentIsOp: `RootNodeParentIs`, + FilterRootSinkTypeIsOp: `RootSinkTypeIs`, } var filterOpFlags = map[FilterOp]uint64{ - FilterAndOp: flagIsBinaryExpr, - FilterOrOp: flagIsBinaryExpr, - FilterEqOp: flagIsBinaryExpr, - FilterNeqOp: flagIsBinaryExpr, - FilterGtOp: flagIsBinaryExpr, - FilterLtOp: flagIsBinaryExpr, - FilterGtEqOp: flagIsBinaryExpr, - FilterLtEqOp: flagIsBinaryExpr, - FilterVarAddressableOp: flagHasVar, - FilterVarComparableOp: flagHasVar, - FilterVarPureOp: flagHasVar, - FilterVarConstOp: flagHasVar, - FilterVarConstSliceOp: flagHasVar, - FilterVarTextOp: flagHasVar, - FilterVarLineOp: flagHasVar, - FilterVarValueIntOp: flagHasVar, - FilterVarTypeSizeOp: flagHasVar, - FilterVarTypeHasPointersOp: flagHasVar, - FilterVarFilterOp: flagHasVar, - FilterVarNodeIsOp: flagHasVar, - FilterVarObjectIsOp: flagHasVar, - FilterVarObjectIsGlobalOp: flagHasVar, - FilterVarTypeIsOp: flagHasVar, - FilterVarTypeIdenticalToOp: flagHasVar, - FilterVarTypeUnderlyingIsOp: flagHasVar, - FilterVarTypeOfKindOp: flagHasVar, - FilterVarTypeUnderlyingOfKindOp: flagHasVar, - FilterVarTypeConvertibleToOp: flagHasVar, - FilterVarTypeAssignableToOp: flagHasVar, - FilterVarTypeImplementsOp: flagHasVar, - FilterVarTypeHasMethodOp: flagHasVar, - FilterVarTextMatchesOp: flagHasVar, - FilterVarContainsOp: flagHasVar, - FilterStringOp: flagIsBasicLit, - FilterIntOp: flagIsBasicLit, + FilterAndOp: flagIsBinaryExpr, + FilterOrOp: flagIsBinaryExpr, + FilterEqOp: flagIsBinaryExpr, + FilterNeqOp: flagIsBinaryExpr, + FilterGtOp: flagIsBinaryExpr, + FilterLtOp: flagIsBinaryExpr, + FilterGtEqOp: flagIsBinaryExpr, + FilterLtEqOp: flagIsBinaryExpr, + FilterVarAddressableOp: flagHasVar, + FilterVarComparableOp: flagHasVar, + FilterVarPureOp: flagHasVar, + FilterVarConstOp: flagHasVar, + FilterVarConstSliceOp: flagHasVar, + FilterVarTextOp: flagHasVar, + FilterVarLineOp: flagHasVar, + FilterVarValueIntOp: flagHasVar, + FilterVarTypeSizeOp: flagHasVar, + FilterVarTypeHasPointersOp: flagHasVar, + FilterVarFilterOp: flagHasVar, + FilterVarNodeIsOp: flagHasVar, + FilterVarObjectIsOp: flagHasVar, + FilterVarObjectIsGlobalOp: flagHasVar, + FilterVarObjectIsVariadicParamOp: flagHasVar, + FilterVarTypeIsOp: flagHasVar, + FilterVarTypeIdenticalToOp: flagHasVar, + FilterVarTypeUnderlyingIsOp: flagHasVar, + FilterVarTypeOfKindOp: flagHasVar, + FilterVarTypeUnderlyingOfKindOp: flagHasVar, + FilterVarTypeConvertibleToOp: flagHasVar, + FilterVarTypeAssignableToOp: flagHasVar, + FilterVarTypeImplementsOp: flagHasVar, + FilterVarTypeHasMethodOp: flagHasVar, + FilterVarTextMatchesOp: flagHasVar, + FilterVarContainsOp: flagHasVar, + FilterStringOp: flagIsBasicLit, + FilterIntOp: flagIsBasicLit, } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go index d3b74090..aecb975d 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go @@ -55,6 +55,7 @@ func main() { {name: "VarNodeIs", comment: "m[$Value].Node.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarObjectIs", comment: "m[$Value].Object.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarObjectIsGlobal", comment: "m[$Value].Object.IsGlobal()", valueType: "string", flags: flagHasVar}, + {name: "VarObjectIsVariadicParam", comment: "m[$Value].Object.IsVariadicParam()", valueType: "string", flags: flagHasVar}, {name: "VarTypeIs", comment: "m[$Value].Type.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeIdenticalTo", comment: "m[$Value].Type.IdenticalTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeUnderlyingIs", comment: "m[$Value].Type.Underlying().Is($Args[0])", valueType: "string", flags: flagHasVar}, diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go index c07a19f5..d7166891 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go @@ -699,6 +699,8 @@ func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilte result.fn = makeConstFilter(result.src, filter.Value.(string)) case ir.FilterVarObjectIsGlobalOp: result.fn = makeObjectIsGlobalFilter(result.src, filter.Value.(string)) + case ir.FilterVarObjectIsVariadicParamOp: + result.fn = makeObjectIsVariadicParamFilter(result.src, filter.Value.(string)) case ir.FilterVarConstSliceOp: result.fn = makeConstSliceFilter(result.src, filter.Value.(string)) case ir.FilterVarAddressableOp: diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go index 646091fe..4eb90d51 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go @@ -746,6 +746,8 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { return ir.FilterExpr{Op: ir.FilterVarObjectIsOp, Value: op.varName, Args: args} case "Object.IsGlobal": return ir.FilterExpr{Op: ir.FilterVarObjectIsGlobalOp, Value: op.varName} + case "Object.IsVariadicParam": + return ir.FilterExpr{Op: ir.FilterVarObjectIsVariadicParamOp, Value: op.varName} case "SinkType.Is": if op.varName != "$$" { // TODO: remove this restriction. diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go index 3bf3bf5a..b0909f75 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go @@ -6,41 +6,14 @@ import ( "github.com/quasilyte/gogrep" ) -// matchData is used to handle both regexp and AST match sets in the same way. -type matchData interface { - // TODO: don't use gogrep.CapturedNode type here. - - Node() ast.Node - CaptureList() []gogrep.CapturedNode - CapturedByName(name string) (ast.Node, bool) -} - -type commentMatchData struct { - node ast.Node - capture []gogrep.CapturedNode -} - -func (m commentMatchData) Node() ast.Node { return m.node } - -func (m commentMatchData) CaptureList() []gogrep.CapturedNode { return m.capture } - -func (m commentMatchData) CapturedByName(name string) (ast.Node, bool) { - for _, c := range m.capture { - if c.Name == name { - return c.Node, true - } - } - return nil, false -} - -type astMatchData struct { +type matchData struct { match gogrep.MatchData } -func (m astMatchData) Node() ast.Node { return m.match.Node } +func (m matchData) Node() ast.Node { return m.match.Node } -func (m astMatchData) CaptureList() []gogrep.CapturedNode { return m.match.Capture } +func (m matchData) CaptureList() []gogrep.CapturedNode { return m.match.Capture } -func (m astMatchData) CapturedByName(name string) (ast.Node, bool) { +func (m matchData) CapturedByName(name string) (ast.Node, bool) { return m.match.CapturedByName(name) } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go index b0f02f0a..4ba741ee 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/nodepath.go @@ -10,8 +10,8 @@ type nodePath struct { stack []ast.Node } -func newNodePath() nodePath { - return nodePath{stack: make([]ast.Node, 0, 32)} +func newNodePath() *nodePath { + return &nodePath{stack: make([]ast.Node, 0, 32)} } func (p nodePath) String() string { @@ -22,15 +22,15 @@ func (p nodePath) String() string { return strings.Join(parts, "/") } -func (p nodePath) Parent() ast.Node { +func (p *nodePath) Parent() ast.Node { return p.NthParent(1) } -func (p nodePath) Current() ast.Node { +func (p *nodePath) Current() ast.Node { return p.NthParent(0) } -func (p nodePath) NthParent(n int) ast.Node { +func (p *nodePath) NthParent(n int) ast.Node { index := uint(len(p.stack) - n - 1) if index < uint(len(p.stack)) { return p.stack[index] diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go index 1a2e2f05..41fbc899 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go @@ -8,6 +8,9 @@ import ( "io" "github.com/quasilyte/go-ruleguard/ruleguard/ir" + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" + "github.com/quasilyte/gogrep" ) // Engine is the main ruleguard package API object. @@ -88,6 +91,21 @@ type LoadContext struct { Fset *token.FileSet } +type RunnerState struct { + gogrepState gogrep.MatcherState + gogrepSubState gogrep.MatcherState + nodePath *nodePath + evalEnv *quasigo.EvalEnv + typematchState *typematch.MatcherState + + object *rulesRunner +} + +// NewRunnerState creates a state object that can be used with RunContext. +func NewRunnerState(e *Engine) *RunnerState { + return newRunnerState(e.impl.state) +} + type RunContext struct { Debug string DebugImports bool @@ -115,6 +133,20 @@ type RunContext struct { // Note that this value is ignored for Suggest templates. // Ruleguard doesn't truncate suggested replacement candidates. TruncateLen int + + // State is an object that contains reusable resources needed for the rules to be executed. + // + // If nil, a new state will be allocated. + // + // The State object access is not synchronized. + // State should not be shared between multiple goroutines. + // There are 3 patterns that are safe: + // 1. For single-threaded programs, you can use a single state. + // 2. For controlled concurrency with workers, you can use a per-worker state. + // 3. For uncontrolled concurrency you can use a sync.Pool of states. + // + // Reusing the state properly can increase the performance significantly. + State *RunnerState } type ReportData struct { diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go index 92f6cc34..c76b6db3 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go @@ -56,47 +56,77 @@ type rulesRunner struct { // For named submatches we can't use it as the node can be located // deeper into the tree than the current node. // In those cases we need a more complicated algorithm. - nodePath nodePath + nodePath *nodePath filterParams filterParams } +func newRunnerState(es *engineState) *RunnerState { + gogrepState := gogrep.NewMatcherState() + gogrepSubState := gogrep.NewMatcherState() + state := &RunnerState{ + gogrepState: gogrepState, + gogrepSubState: gogrepSubState, + nodePath: newNodePath(), + evalEnv: es.env.GetEvalEnv(), + typematchState: typematch.NewMatcherState(), + object: &rulesRunner{}, + } + return state +} + +func (state *RunnerState) Reset() { + state.nodePath.stack = state.nodePath.stack[:0] + state.evalEnv.Stack.Reset() +} + func newRulesRunner(ctx *RunContext, buildContext *build.Context, state *engineState, rules *goRuleSet) *rulesRunner { + runnerState := ctx.State + if runnerState == nil { + runnerState = newRunnerState(state) + } else { + runnerState.Reset() + } + importer := newGoImporter(state, goImporterConfig{ fset: ctx.Fset, debugImports: ctx.DebugImports, debugPrint: ctx.DebugPrint, buildContext: buildContext, }) - gogrepState := gogrep.NewMatcherState() + gogrepState := runnerState.gogrepState gogrepState.Types = ctx.Types - gogrepSubState := gogrep.NewMatcherState() + gogrepSubState := runnerState.gogrepSubState gogrepSubState.Types = ctx.Types - evalEnv := state.env.GetEvalEnv() - rr := &rulesRunner{ + evalEnv := runnerState.evalEnv + + rr := runnerState.object + *rr = rulesRunner{ bgContext: context.Background(), ctx: ctx, importer: importer, rules: rules, gogrepState: gogrepState, gogrepSubState: gogrepSubState, - nodePath: newNodePath(), + nodePath: runnerState.nodePath, truncateLen: ctx.TruncateLen, filterParams: filterParams{ - typematchState: typematch.NewMatcherState(), + typematchState: runnerState.typematchState, env: evalEnv, importer: importer, ctx: ctx, }, } + evalEnv.Stack.Push(&rr.filterParams) if ctx.TruncateLen == 0 { rr.truncateLen = 60 } rr.filterParams.nodeText = rr.nodeText rr.filterParams.nodeString = rr.nodeString - rr.filterParams.nodePath = &rr.nodePath + rr.filterParams.nodePath = rr.nodePath rr.filterParams.gogrepSubState = &rr.gogrepSubState + return rr } @@ -160,7 +190,7 @@ func (rr *rulesRunner) run(f *ast.File) error { if rr.rules.universal.categorizedNum != 0 { var inspector astWalker - inspector.nodePath = &rr.nodePath + inspector.nodePath = rr.nodePath inspector.filterParams = &rr.filterParams inspector.Walk(f, func(n ast.Node, tag nodetag.Value) { rr.runRules(n, tag) @@ -183,7 +213,7 @@ func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { file := rr.ctx.Fset.File(comment.Pos()) for _, rule := range rr.rules.universal.commentRules { - var m commentMatchData + var m matchData if rule.captureGroups { result := rule.pat.FindStringSubmatchIndex(comment.Text) if result == nil { @@ -200,13 +230,13 @@ func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { // Consider this pattern: `(?Pfoo)|(bar)`. // If we have `bar` input string, will remain empty. if beginPos < 0 || endPos < 0 { - m.capture = append(m.capture, gogrep.CapturedNode{ + m.match.Capture = append(m.match.Capture, gogrep.CapturedNode{ Name: name, Node: &ast.Comment{Slash: comment.Pos()}, }) continue } - m.capture = append(m.capture, gogrep.CapturedNode{ + m.match.Capture = append(m.match.Capture, gogrep.CapturedNode{ Name: name, Node: &ast.Comment{ Slash: file.Pos(beginPos + file.Offset(comment.Pos())), @@ -214,7 +244,7 @@ func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { }, }) } - m.node = &ast.Comment{ + m.match.Node = &ast.Comment{ Slash: file.Pos(result[0] + file.Offset(comment.Pos())), Text: comment.Text[result[0]:result[1]], } @@ -224,7 +254,7 @@ func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { if result == nil { continue } - m.node = &ast.Comment{ + m.match.Node = &ast.Comment{ Slash: file.Pos(result[0] + file.Offset(comment.Pos())), Text: comment.Text[result[0]:result[1]], } @@ -307,7 +337,7 @@ func (rr *rulesRunner) reject(rule goRule, reason string, m matchData) { } } -func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m commentMatchData) bool { +func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m matchData) bool { if rule.base.filter.fn != nil { rr.filterParams.match = m filterResult := rule.base.filter.fn(&rr.filterParams) @@ -345,13 +375,13 @@ func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m commentMatchData func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { if rule.filter.fn != nil || rule.do != nil { - rr.filterParams.match = astMatchData{match: m} + rr.filterParams.match = matchData{match: m} } if rule.filter.fn != nil { filterResult := rule.filter.fn(&rr.filterParams) if !filterResult.Matched() { - rr.reject(rule, filterResult.RejectReason(), astMatchData{match: m}) + rr.reject(rule, filterResult.RejectReason(), matchData{match: m}) return false } } @@ -379,9 +409,9 @@ func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { suggestText = rr.filterParams.suggestString } } else { - messageText = rr.renderMessage(rule.msg, astMatchData{match: m}, true) + messageText = rr.renderMessage(rule.msg, matchData{match: m}, true) if rule.suggestion != "" { - suggestText = rr.renderMessage(rule.suggestion, astMatchData{match: m}, false) + suggestText = rr.renderMessage(rule.suggestion, matchData{match: m}, false) } } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go index b7474037..4b740b20 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go @@ -507,9 +507,14 @@ func (p *Pattern) matchIdentical(state *MatcherState, sub *pattern, typ types.Ty } pkgPath := sub.value.([2]string)[0] typeName := sub.value.([2]string)[1] - // obj.Pkg().Path() may be in a vendor directory. - path := strings.SplitAfter(obj.Pkg().Path(), "/vendor/") - return path[len(path)-1] == pkgPath && typeName == obj.Name() + if typeName != obj.Name() { + return false + } + objPath := obj.Pkg().Path() + if vendorPos := strings.Index(objPath, "/vendor/"); vendorPos != -1 { + objPath = objPath[vendorPos+len("/vendor/"):] + } + return objPath == pkgPath case opFuncNoSeq: typ, ok := typ.(*types.Signature) diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go index 962e9da2..d3226db2 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go @@ -9,6 +9,8 @@ import ( "regexp/syntax" "strconv" "strings" + + "golang.org/x/exp/typeparams" ) var invalidType = types.Typ[types.Invalid] @@ -295,3 +297,8 @@ func identOf(e ast.Expr) *ast.Ident { return nil } } + +func isTypeParam(typ types.Type) bool { + _, ok := typ.(*typeparams.TypeParam) + return ok +} diff --git a/vendor/github.com/quasilyte/gogrep/README.md b/vendor/github.com/quasilyte/gogrep/README.md index b6c2c47c..ecf0dc4c 100644 --- a/vendor/github.com/quasilyte/gogrep/README.md +++ b/vendor/github.com/quasilyte/gogrep/README.md @@ -24,7 +24,7 @@ $ go get github.com/quasilyte/gogrep To get a gogrep command-line tool, install the `cmd/gogrep` Go submodule. ```bash -$ go install github.com/quasilyte/cmd/gogrep +$ go install github.com/quasilyte/gogrep/cmd/gogrep@latest ``` See [docs/gogrep_cli.md](_docs/gogrep_cli.md) to learn how to use it. diff --git a/vendor/github.com/quasilyte/gogrep/compile.go b/vendor/github.com/quasilyte/gogrep/compile.go index a00a39cc..31b60dfa 100644 --- a/vendor/github.com/quasilyte/gogrep/compile.go +++ b/vendor/github.com/quasilyte/gogrep/compile.go @@ -122,16 +122,19 @@ func (c *compiler) compileNode(n ast.Node) { c.compileStmt(n) case *ast.ValueSpec: c.compileValueSpec(n) - case stmtSlice: - c.compileStmtSlice(n) - case declSlice: - c.compileDeclSlice(n) - case ExprSlice: - c.compileExprSlice(n) case *rangeClause: c.compileRangeClause(n) case *rangeHeader: c.compileRangeHeader(n) + case *NodeSlice: + switch n.Kind { + case StmtNodeSlice: + c.compileStmtSlice(n.stmtSlice) + case DeclNodeSlice: + c.compileDeclSlice(n.declSlice) + case ExprNodeSlice: + c.compileExprSlice(n.exprSlice) + } default: panic(c.errorf(n, "compileNode: unexpected %T", n)) } @@ -1191,7 +1194,7 @@ func (c *compiler) compileSendStmt(n *ast.SendStmt) { c.compileExpr(n.Value) } -func (c *compiler) compileDeclSlice(decls declSlice) { +func (c *compiler) compileDeclSlice(decls []ast.Decl) { c.emitInstOp(opMultiDecl) for _, n := range decls { c.compileDecl(n) @@ -1199,7 +1202,7 @@ func (c *compiler) compileDeclSlice(decls declSlice) { c.emitInstOp(opEnd) } -func (c *compiler) compileStmtSlice(stmts stmtSlice) { +func (c *compiler) compileStmtSlice(stmts []ast.Stmt) { c.emitInstOp(opMultiStmt) insideStmtList := c.insideStmtList c.insideStmtList = true @@ -1210,7 +1213,7 @@ func (c *compiler) compileStmtSlice(stmts stmtSlice) { c.emitInstOp(opEnd) } -func (c *compiler) compileExprSlice(exprs ExprSlice) { +func (c *compiler) compileExprSlice(exprs []ast.Expr) { c.emitInstOp(opMultiExpr) for _, n := range exprs { c.compileExpr(n) diff --git a/vendor/github.com/quasilyte/gogrep/gogrep.go b/vendor/github.com/quasilyte/gogrep/gogrep.go index 313a9a25..47a03f9b 100644 --- a/vendor/github.com/quasilyte/gogrep/gogrep.go +++ b/vendor/github.com/quasilyte/gogrep/gogrep.go @@ -11,7 +11,7 @@ import ( ) func IsEmptyNodeSlice(n ast.Node) bool { - if list, ok := n.(NodeSlice); ok { + if list, ok := n.(*NodeSlice); ok { return list.Len() == 0 } return false @@ -62,6 +62,9 @@ type MatcherState struct { // actual matching phase) capture []CapturedNode + nodeSlices []NodeSlice + nodeSlicesUsed int + pc int partial PartialNode @@ -69,7 +72,8 @@ type MatcherState struct { func NewMatcherState() MatcherState { return MatcherState{ - capture: make([]CapturedNode, 0, 8), + capture: make([]CapturedNode, 0, 8), + nodeSlices: make([]NodeSlice, 16), } } @@ -143,34 +147,37 @@ func Compile(config CompileConfig) (*Pattern, PatternInfo, error) { } func Walk(root ast.Node, fn func(n ast.Node) bool) { - switch root := root.(type) { - case ExprSlice: - for _, e := range root { - ast.Inspect(e, fn) - } - case stmtSlice: - for _, e := range root { - ast.Inspect(e, fn) - } - case fieldSlice: - for _, e := range root { - ast.Inspect(e, fn) - } - case identSlice: - for _, e := range root { - ast.Inspect(e, fn) + if root, ok := root.(*NodeSlice); ok { + switch root.Kind { + case ExprNodeSlice: + for _, e := range root.exprSlice { + ast.Inspect(e, fn) + } + case StmtNodeSlice: + for _, e := range root.stmtSlice { + ast.Inspect(e, fn) + } + case FieldNodeSlice: + for _, e := range root.fieldSlice { + ast.Inspect(e, fn) + } + case IdentNodeSlice: + for _, e := range root.identSlice { + ast.Inspect(e, fn) + } + case SpecNodeSlice: + for _, e := range root.specSlice { + ast.Inspect(e, fn) + } + default: + for _, e := range root.declSlice { + ast.Inspect(e, fn) + } } - case specSlice: - for _, e := range root { - ast.Inspect(e, fn) - } - case declSlice: - for _, e := range root { - ast.Inspect(e, fn) - } - default: - ast.Inspect(root, fn) + return } + + ast.Inspect(root, fn) } func newPatternInfo() PatternInfo { diff --git a/vendor/github.com/quasilyte/gogrep/match.go b/vendor/github.com/quasilyte/gogrep/match.go index d4e3243a..d4b317b9 100644 --- a/vendor/github.com/quasilyte/gogrep/match.go +++ b/vendor/github.com/quasilyte/gogrep/match.go @@ -45,8 +45,36 @@ func (m *matcher) resetCapture(state *MatcherState) { } } +func (m *matcher) toStmtSlice(state *MatcherState, nodes ...ast.Node) *NodeSlice { + slice := m.allocNodeSlice(state) + var stmts []ast.Stmt + for _, node := range nodes { + switch x := node.(type) { + case nil: + case ast.Stmt: + stmts = append(stmts, x) + case ast.Expr: + stmts = append(stmts, &ast.ExprStmt{X: x}) + default: + panic(fmt.Sprintf("unexpected node type: %T", x)) + } + } + slice.assignStmtSlice(stmts) + return slice +} + +func (m *matcher) allocNodeSlice(state *MatcherState) *NodeSlice { + if state.nodeSlicesUsed < len(state.nodeSlices) { + i := state.nodeSlicesUsed + state.nodeSlicesUsed++ + return &state.nodeSlices[i] + } + return &NodeSlice{} +} + func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchData)) { state.pc = 0 + state.nodeSlicesUsed = 0 inst := m.nextInst(state) switch inst.op { case opMultiStmt: @@ -91,24 +119,32 @@ func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchDa } func (m *matcher) walkDeclSlice(state *MatcherState, decls []ast.Decl, accept func(MatchData)) { - m.walkNodeSlice(state, declSlice(decls), accept) + slice := m.allocNodeSlice(state) + slice.assignDeclSlice(decls) + m.walkNodeSlice(state, slice, accept) } func (m *matcher) walkExprSlice(state *MatcherState, exprs []ast.Expr, accept func(MatchData)) { - m.walkNodeSlice(state, ExprSlice(exprs), accept) + slice := m.allocNodeSlice(state) + slice.assignExprSlice(exprs) + m.walkNodeSlice(state, slice, accept) } func (m *matcher) walkStmtSlice(state *MatcherState, stmts []ast.Stmt, accept func(MatchData)) { - m.walkNodeSlice(state, stmtSlice(stmts), accept) + slice := m.allocNodeSlice(state) + slice.assignStmtSlice(stmts) + m.walkNodeSlice(state, slice, accept) } -func (m *matcher) walkNodeSlice(state *MatcherState, nodes NodeSlice, accept func(MatchData)) { +func (m *matcher) walkNodeSlice(state *MatcherState, nodes *NodeSlice, accept func(MatchData)) { sliceLen := nodes.Len() from := 0 + tmpSlice := m.allocNodeSlice(state) for { state.pc = 1 // FIXME: this is a kludge m.resetCapture(state) - matched, offset := m.matchNodeList(state, nodes.slice(from, sliceLen), true) + nodes.SliceInto(tmpSlice, from, sliceLen) + matched, offset := m.matchNodeList(state, tmpSlice, true) if matched == nil { break } @@ -422,11 +458,11 @@ func (m *matcher) matchNodeWithInst(state *MatcherState, inst instruction, n ast case opIfNamedOptStmt: n, ok := n.(*ast.IfStmt) return ok && n.Else == nil && m.matchNode(state, n.Body) && - m.matchNamed(state, m.stringValue(inst), toStmtSlice(n.Cond, n.Init)) + m.matchNamed(state, m.stringValue(inst), m.toStmtSlice(state, n.Cond, n.Init)) case opIfNamedOptElseStmt: n, ok := n.(*ast.IfStmt) return ok && n.Else != nil && m.matchNode(state, n.Body) && m.matchNode(state, n.Else) && - m.matchNamed(state, m.stringValue(inst), toStmtSlice(n.Cond, n.Init)) + m.matchNamed(state, m.stringValue(inst), m.toStmtSlice(state, n.Cond, n.Init)) case opCaseClause: n, ok := n.(*ast.CaseClause) @@ -641,33 +677,43 @@ func (m *matcher) matchArgList(state *MatcherState, exprs []ast.Expr) bool { } func (m *matcher) matchStmtSlice(state *MatcherState, stmts []ast.Stmt) bool { - matched, _ := m.matchNodeList(state, stmtSlice(stmts), false) + slice := m.allocNodeSlice(state) + slice.assignStmtSlice(stmts) + matched, _ := m.matchNodeList(state, slice, false) return matched != nil } func (m *matcher) matchExprSlice(state *MatcherState, exprs []ast.Expr) bool { - matched, _ := m.matchNodeList(state, ExprSlice(exprs), false) + slice := m.allocNodeSlice(state) + slice.assignExprSlice(exprs) + matched, _ := m.matchNodeList(state, slice, false) return matched != nil } func (m *matcher) matchFieldSlice(state *MatcherState, fields []*ast.Field) bool { - matched, _ := m.matchNodeList(state, fieldSlice(fields), false) + slice := m.allocNodeSlice(state) + slice.assignFieldSlice(fields) + matched, _ := m.matchNodeList(state, slice, false) return matched != nil } func (m *matcher) matchIdentSlice(state *MatcherState, idents []*ast.Ident) bool { - matched, _ := m.matchNodeList(state, identSlice(idents), false) + slice := m.allocNodeSlice(state) + slice.assignIdentSlice(idents) + matched, _ := m.matchNodeList(state, slice, false) return matched != nil } func (m *matcher) matchSpecSlice(state *MatcherState, specs []ast.Spec) bool { - matched, _ := m.matchNodeList(state, specSlice(specs), false) + slice := m.allocNodeSlice(state) + slice.assignSpecSlice(specs) + matched, _ := m.matchNodeList(state, slice, false) return matched != nil } // matchNodeList matches two lists of nodes. It uses a common algorithm to match // wildcard patterns with any number of nodes without recursion. -func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bool) (matched ast.Node, offset int) { +func (m *matcher) matchNodeList(state *MatcherState, nodes *NodeSlice, partial bool) (matched ast.Node, offset int) { sliceLen := nodes.Len() inst := m.nextInst(state) if inst.op == opEnd { @@ -727,7 +773,9 @@ func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bo case "", "_": return true } - return m.matchNamed(state, wildName, nodes.slice(wildStart, j)) + slice := m.allocNodeSlice(state) + nodes.SliceInto(slice, wildStart, j) + return m.matchNamed(state, wildName, slice) } for ; inst.op != opEnd || j < sliceLen; inst = m.nextInst(state) { if inst.op != opEnd { @@ -776,7 +824,9 @@ func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bo if !wouldMatch() { return nil, -1 } - return nodes.slice(partialStart, partialEnd), partialEnd + 1 + slice := m.allocNodeSlice(state) + nodes.SliceInto(slice, partialStart, partialEnd) + return slice, partialEnd + 1 } func (m *matcher) matchRangeClause(state *MatcherState, n ast.Node, accept func(MatchData)) { @@ -919,58 +969,56 @@ func equalNodes(x, y ast.Node) bool { if x == nil || y == nil { return x == y } - switch x := x.(type) { - case stmtSlice: - y, ok := y.(stmtSlice) - if !ok || len(x) != len(y) { + if x, ok := x.(*NodeSlice); ok { + y, ok := y.(*NodeSlice) + if !ok || x.Kind != y.Kind || x.Len() != y.Len() { return false } - for i := range x { - if !astequal.Stmt(x[i], y[i]) { - return false + switch x.Kind { + case ExprNodeSlice: + for i, n1 := range x.exprSlice { + n2 := y.exprSlice[i] + if !astequal.Expr(n1, n2) { + return false + } } - } - return true - case ExprSlice: - y, ok := y.(ExprSlice) - if !ok || len(x) != len(y) { - return false - } - for i := range x { - if !astequal.Expr(x[i], y[i]) { - return false + case StmtNodeSlice: + for i, n1 := range x.stmtSlice { + n2 := y.stmtSlice[i] + if !astequal.Stmt(n1, n2) { + return false + } } - } - return true - case declSlice: - y, ok := y.(declSlice) - if !ok || len(x) != len(y) { - return false - } - for i := range x { - if !astequal.Decl(x[i], y[i]) { - return false + case FieldNodeSlice: + for i, n1 := range x.fieldSlice { + n2 := y.fieldSlice[i] + if !astequal.Node(n1, n2) { + return false + } + } + case IdentNodeSlice: + for i, n1 := range x.identSlice { + n2 := y.identSlice[i] + if n1.Name != n2.Name { + return false + } + } + case SpecNodeSlice: + for i, n1 := range x.specSlice { + n2 := y.specSlice[i] + if !astequal.Node(n1, n2) { + return false + } + } + case DeclNodeSlice: + for i, n1 := range x.declSlice { + n2 := y.declSlice[i] + if !astequal.Decl(n1, n2) { + return false + } } } return true - - default: - return astequal.Node(x, y) - } -} - -func toStmtSlice(nodes ...ast.Node) stmtSlice { - var stmts []ast.Stmt - for _, node := range nodes { - switch x := node.(type) { - case nil: - case ast.Stmt: - stmts = append(stmts, x) - case ast.Expr: - stmts = append(stmts, &ast.ExprStmt{X: x}) - default: - panic(fmt.Sprintf("unexpected node type: %T", x)) - } } - return stmtSlice(stmts) + return astequal.Node(x, y) } diff --git a/vendor/github.com/quasilyte/gogrep/parse.go b/vendor/github.com/quasilyte/gogrep/parse.go index aa5ffbf9..3c6854bd 100644 --- a/vendor/github.com/quasilyte/gogrep/parse.go +++ b/vendor/github.com/quasilyte/gogrep/parse.go @@ -174,7 +174,9 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) { if len(cl.Elts) == 1 { return cl.Elts[0], nil } - return ExprSlice(cl.Elts), nil + slice := &NodeSlice{} + slice.assignExprSlice(cl.Elts) + return slice, nil } // then try as statements @@ -185,7 +187,9 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) { if len(bl.List) == 1 { return bl.List[0], nil } - return stmtSlice(bl.List), nil + slice := &NodeSlice{} + slice.assignStmtSlice(bl.List) + return slice, nil } // Statements is what covers most cases, so it will give // the best overall error message. Show positions @@ -199,7 +203,9 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) { if len(f.Decls) == 1 { return f.Decls[0], nil } - return declSlice(f.Decls), nil + slice := &NodeSlice{} + slice.assignDeclSlice(f.Decls) + return slice, nil } // try as a whole file diff --git a/vendor/github.com/quasilyte/gogrep/slices.go b/vendor/github.com/quasilyte/gogrep/slices.go index 13775a81..fb969b51 100644 --- a/vendor/github.com/quasilyte/gogrep/slices.go +++ b/vendor/github.com/quasilyte/gogrep/slices.go @@ -5,54 +5,146 @@ import ( "go/token" ) -type NodeSlice interface { - At(i int) ast.Node - Len() int - slice(from, to int) NodeSlice - ast.Node -} +type NodeSliceKind uint32 + +const ( + ExprNodeSlice NodeSliceKind = iota + StmtNodeSlice + FieldNodeSlice + IdentNodeSlice + SpecNodeSlice + DeclNodeSlice +) + +type NodeSlice struct { + Kind NodeSliceKind -type ( - ExprSlice []ast.Expr + exprSlice []ast.Expr stmtSlice []ast.Stmt fieldSlice []*ast.Field identSlice []*ast.Ident specSlice []ast.Spec declSlice []ast.Decl -) +} + +func (s *NodeSlice) GetExprSlice() []ast.Expr { return s.exprSlice } +func (s *NodeSlice) GetStmtSlice() []ast.Stmt { return s.stmtSlice } +func (s *NodeSlice) GetFieldSlice() []*ast.Field { return s.fieldSlice } +func (s *NodeSlice) GetIdentSlice() []*ast.Ident { return s.identSlice } +func (s *NodeSlice) GetSpecSlice() []ast.Spec { return s.specSlice } +func (s *NodeSlice) GetDeclSlice() []ast.Decl { return s.declSlice } + +func (s *NodeSlice) assignExprSlice(xs []ast.Expr) { + s.Kind = ExprNodeSlice + s.exprSlice = xs +} + +func (s *NodeSlice) assignStmtSlice(xs []ast.Stmt) { + s.Kind = StmtNodeSlice + s.stmtSlice = xs +} + +func (s *NodeSlice) assignFieldSlice(xs []*ast.Field) { + s.Kind = FieldNodeSlice + s.fieldSlice = xs +} + +func (s *NodeSlice) assignIdentSlice(xs []*ast.Ident) { + s.Kind = IdentNodeSlice + s.identSlice = xs +} + +func (s *NodeSlice) assignSpecSlice(xs []ast.Spec) { + s.Kind = SpecNodeSlice + s.specSlice = xs +} + +func (s *NodeSlice) assignDeclSlice(xs []ast.Decl) { + s.Kind = DeclNodeSlice + s.declSlice = xs +} + +func (s *NodeSlice) Len() int { + switch s.Kind { + case ExprNodeSlice: + return len(s.exprSlice) + case StmtNodeSlice: + return len(s.stmtSlice) + case FieldNodeSlice: + return len(s.fieldSlice) + case IdentNodeSlice: + return len(s.identSlice) + case SpecNodeSlice: + return len(s.specSlice) + default: + return len(s.declSlice) + } +} -func (l ExprSlice) Len() int { return len(l) } -func (l ExprSlice) At(i int) ast.Node { return l[i] } -func (l ExprSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l ExprSlice) Pos() token.Pos { return l[0].Pos() } -func (l ExprSlice) End() token.Pos { return l[len(l)-1].End() } - -func (l stmtSlice) Len() int { return len(l) } -func (l stmtSlice) At(i int) ast.Node { return l[i] } -func (l stmtSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l stmtSlice) Pos() token.Pos { return l[0].Pos() } -func (l stmtSlice) End() token.Pos { return l[len(l)-1].End() } - -func (l fieldSlice) Len() int { return len(l) } -func (l fieldSlice) At(i int) ast.Node { return l[i] } -func (l fieldSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l fieldSlice) Pos() token.Pos { return l[0].Pos() } -func (l fieldSlice) End() token.Pos { return l[len(l)-1].End() } - -func (l identSlice) Len() int { return len(l) } -func (l identSlice) At(i int) ast.Node { return l[i] } -func (l identSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l identSlice) Pos() token.Pos { return l[0].Pos() } -func (l identSlice) End() token.Pos { return l[len(l)-1].End() } - -func (l specSlice) Len() int { return len(l) } -func (l specSlice) At(i int) ast.Node { return l[i] } -func (l specSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l specSlice) Pos() token.Pos { return l[0].Pos() } -func (l specSlice) End() token.Pos { return l[len(l)-1].End() } - -func (l declSlice) Len() int { return len(l) } -func (l declSlice) At(i int) ast.Node { return l[i] } -func (l declSlice) slice(i, j int) NodeSlice { return l[i:j] } -func (l declSlice) Pos() token.Pos { return l[0].Pos() } -func (l declSlice) End() token.Pos { return l[len(l)-1].End() } +func (s *NodeSlice) At(i int) ast.Node { + switch s.Kind { + case ExprNodeSlice: + return s.exprSlice[i] + case StmtNodeSlice: + return s.stmtSlice[i] + case FieldNodeSlice: + return s.fieldSlice[i] + case IdentNodeSlice: + return s.identSlice[i] + case SpecNodeSlice: + return s.specSlice[i] + default: + return s.declSlice[i] + } +} + +func (s *NodeSlice) SliceInto(dst *NodeSlice, i, j int) { + switch s.Kind { + case ExprNodeSlice: + dst.assignExprSlice(s.exprSlice[i:j]) + case StmtNodeSlice: + dst.assignStmtSlice(s.stmtSlice[i:j]) + case FieldNodeSlice: + dst.assignFieldSlice(s.fieldSlice[i:j]) + case IdentNodeSlice: + dst.assignIdentSlice(s.identSlice[i:j]) + case SpecNodeSlice: + dst.assignSpecSlice(s.specSlice[i:j]) + default: + dst.assignDeclSlice(s.declSlice[i:j]) + } +} + +func (s *NodeSlice) Pos() token.Pos { + switch s.Kind { + case ExprNodeSlice: + return s.exprSlice[0].Pos() + case StmtNodeSlice: + return s.stmtSlice[0].Pos() + case FieldNodeSlice: + return s.fieldSlice[0].Pos() + case IdentNodeSlice: + return s.identSlice[0].Pos() + case SpecNodeSlice: + return s.specSlice[0].Pos() + default: + return s.declSlice[0].Pos() + } +} + +func (s *NodeSlice) End() token.Pos { + switch s.Kind { + case ExprNodeSlice: + return s.exprSlice[len(s.exprSlice)-1].End() + case StmtNodeSlice: + return s.stmtSlice[len(s.stmtSlice)-1].End() + case FieldNodeSlice: + return s.fieldSlice[len(s.fieldSlice)-1].End() + case IdentNodeSlice: + return s.identSlice[len(s.identSlice)-1].End() + case SpecNodeSlice: + return s.specSlice[len(s.specSlice)-1].End() + default: + return s.declSlice[len(s.declSlice)-1].End() + } +} diff --git a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml index 0fbf6c04..a0e6fd55 100644 --- a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml +++ b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml @@ -60,7 +60,6 @@ linters: enable: - asciicheck - bodyclose - - deadcode - dogsled - dupl - durationcheck @@ -100,7 +99,6 @@ linters: - rowserrcheck - sqlclosecheck - staticcheck - - structcheck - stylecheck - testpackage - thelper @@ -109,6 +107,5 @@ linters: - unconvert - unparam - unused - - varcheck - whitespace - wsl diff --git a/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml b/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml index 3daecfd7..f3675a9c 100644 --- a/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml +++ b/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml @@ -3,12 +3,12 @@ builds: env: - CGO_ENABLED=0 archives: -- replacements: - darwin: Darwin - linux: Linux - windows: Windows - 386: i386 - amd64: x86_64 +- name_template: >- + {{ .ProjectName }}_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else if eq .Arch "386" }}i386 + {{- else }}{{ .Arch }}{{ end }} checksum: name_template: 'checksums.txt' dockers: @@ -21,7 +21,6 @@ dockers: dockerfile: Dockerfile.goreleaser build_flag_templates: - "--pull" - - "--build-arg=gomodguard_VERSION={{.Version}}" - "--label=org.opencontainers.image.created={{.Date}}" - "--label=org.opencontainers.image.name={{.ProjectName}}" - "--label=org.opencontainers.image.revision={{.FullCommit}}" diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile b/vendor/github.com/ryancurrah/gomodguard/Dockerfile index 719a0ebd..2f1d3340 100644 --- a/vendor/github.com/ryancurrah/gomodguard/Dockerfile +++ b/vendor/github.com/ryancurrah/gomodguard/Dockerfile @@ -1,16 +1,12 @@ -ARG GO_VERSION=1.14.2 -ARG ALPINE_VERSION=3.11 -ARG gomodguard_VERSION= - # ---- Build container -FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS builder +FROM golang:alpine AS builder WORKDIR /gomodguard COPY . . RUN apk add --no-cache git RUN go build -o gomodguard cmd/gomodguard/main.go # ---- App container -FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} +FROM golang:alpine WORKDIR / RUN apk --no-cache add ca-certificates COPY --from=builder gomodguard/gomodguard / diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser b/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser index 57a042a6..ccaaa895 100644 --- a/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser +++ b/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser @@ -1,9 +1,5 @@ -ARG GO_VERSION=1.14.2 -ARG ALPINE_VERSION=3.11 -ARG gomodguard_VERSION= - # ---- App container -FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} +FROM golang:alpine WORKDIR / RUN apk --no-cache add ca-certificates COPY gomodguard /gomodguard diff --git a/vendor/github.com/ryancurrah/gomodguard/Makefile b/vendor/github.com/ryancurrah/gomodguard/Makefile index 76667579..5235d5aa 100644 --- a/vendor/github.com/ryancurrah/gomodguard/Makefile +++ b/vendor/github.com/ryancurrah/gomodguard/Makefile @@ -24,6 +24,10 @@ cover: dockerrun: dockerbuild docker run -v "${current_dir}/.gomodguard.yaml:/.gomodguard.yaml" ryancurrah/gomodguard:latest +.PHONY: snapshot +snapshot: + goreleaser --rm-dist --snapshot + .PHONY: release release: goreleaser --rm-dist @@ -39,4 +43,4 @@ install-tools-mac: .PHONY: install-go-tools install-go-tools: - go get github.com/t-yuki/gocover-cobertura + go install -v github.com/t-yuki/gocover-cobertura diff --git a/vendor/github.com/ryancurrah/gomodguard/README.md b/vendor/github.com/ryancurrah/gomodguard/README.md index 8e2e4168..4945f010 100644 --- a/vendor/github.com/ryancurrah/gomodguard/README.md +++ b/vendor/github.com/ryancurrah/gomodguard/README.md @@ -115,7 +115,7 @@ Resulting checkstyle file ## Install ``` -go get -u github.com/ryancurrah/gomodguard/cmd/gomodguard +go install github.com/ryancurrah/gomodguard/cmd/gomodguard ``` ## Develop diff --git a/vendor/github.com/ryancurrah/gomodguard/allowed.go b/vendor/github.com/ryancurrah/gomodguard/allowed.go new file mode 100644 index 00000000..5b0d26f8 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/allowed.go @@ -0,0 +1,39 @@ +package gomodguard + +import "strings" + +// Allowed is a list of modules and module +// domains that are allowed to be used. +type Allowed struct { + Modules []string `yaml:"modules"` + Domains []string `yaml:"domains"` +} + +// IsAllowedModule returns true if the given module +// name is in the allowed modules list. +func (a *Allowed) IsAllowedModule(moduleName string) bool { + allowedModules := a.Modules + + for i := range allowedModules { + if strings.TrimSpace(moduleName) == strings.TrimSpace(allowedModules[i]) { + return true + } + } + + return false +} + +// IsAllowedModuleDomain returns true if the given modules domain is +// in the allowed module domains list. +func (a *Allowed) IsAllowedModuleDomain(moduleName string) bool { + allowedDomains := a.Domains + + for i := range allowedDomains { + if strings.HasPrefix(strings.TrimSpace(strings.ToLower(moduleName)), + strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { + return true + } + } + + return false +} diff --git a/vendor/github.com/ryancurrah/gomodguard/blocked.go b/vendor/github.com/ryancurrah/gomodguard/blocked.go new file mode 100644 index 00000000..2a6e5c21 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/blocked.go @@ -0,0 +1,189 @@ +package gomodguard + +import ( + "fmt" + "strings" + + "github.com/Masterminds/semver" +) + +// Blocked is a list of modules that are +// blocked and not to be used. +type Blocked struct { + Modules BlockedModules `yaml:"modules"` + Versions BlockedVersions `yaml:"versions"` + LocalReplaceDirectives bool `yaml:"local_replace_directives"` +} + +// BlockedVersion has a version constraint a reason why the the module version is blocked. +type BlockedVersion struct { + Version string `yaml:"version"` + Reason string `yaml:"reason"` +} + +// IsLintedModuleVersionBlocked returns true if a version constraint is specified and the +// linted module version matches the constraint. +func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool { + if r.Version == "" { + return false + } + + constraint, err := semver.NewConstraint(r.Version) + if err != nil { + return false + } + + version, err := semver.NewVersion(lintedModuleVersion) + if err != nil { + return false + } + + meet := constraint.Check(version) + + return meet +} + +// Message returns the reason why the module version is blocked. +func (r *BlockedVersion) Message(lintedModuleVersion string) string { + var sb strings.Builder + + // Add version contraint to message. + _, _ = fmt.Fprintf(&sb, "version `%s` is blocked because it does not meet the version constraint `%s`.", + lintedModuleVersion, r.Version) + + if r.Reason == "" { + return sb.String() + } + + // Add reason to message. + _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) + + return sb.String() +} + +// BlockedModule has alternative modules to use and a reason why the module is blocked. +type BlockedModule struct { + Recommendations []string `yaml:"recommendations"` + Reason string `yaml:"reason"` +} + +// IsCurrentModuleARecommendation returns true if the current module is in the Recommendations list. +// +// If the current go.mod file being linted is a recommended module of a +// blocked module and it imports that blocked module, do not set as blocked. +// This could mean that the linted module is a wrapper for that blocked module. +func (r *BlockedModule) IsCurrentModuleARecommendation(currentModuleName string) bool { + if r == nil { + return false + } + + for n := range r.Recommendations { + if strings.TrimSpace(currentModuleName) == strings.TrimSpace(r.Recommendations[n]) { + return true + } + } + + return false +} + +// Message returns the reason why the module is blocked and a list of recommended modules if provided. +func (r *BlockedModule) Message() string { + var sb strings.Builder + + // Add recommendations to message + for i := range r.Recommendations { + switch { + case len(r.Recommendations) == 1: + _, _ = fmt.Fprintf(&sb, "`%s` is a recommended module.", r.Recommendations[i]) + case (i+1) != len(r.Recommendations) && (i+1) == (len(r.Recommendations)-1): + _, _ = fmt.Fprintf(&sb, "`%s` ", r.Recommendations[i]) + case (i + 1) != len(r.Recommendations): + _, _ = fmt.Fprintf(&sb, "`%s`, ", r.Recommendations[i]) + default: + _, _ = fmt.Fprintf(&sb, "and `%s` are recommended modules.", r.Recommendations[i]) + } + } + + if r.Reason == "" { + return sb.String() + } + + // Add reason to message + if sb.Len() == 0 { + _, _ = fmt.Fprintf(&sb, "%s.", strings.TrimRight(r.Reason, ".")) + } else { + _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) + } + + return sb.String() +} + +// HasRecommendations returns true if the blocked package has +// recommended modules. +func (r *BlockedModule) HasRecommendations() bool { + if r == nil { + return false + } + + return len(r.Recommendations) > 0 +} + +// BlockedVersions a list of blocked modules by a version constraint. +type BlockedVersions []map[string]BlockedVersion + +// Get returns the module names that are blocked. +func (b BlockedVersions) Get() []string { + modules := make([]string, len(b)) + + for n := range b { + for module := range b[n] { + modules[n] = module + break + } + } + + return modules +} + +// GetBlockReason returns a block version if one is set for the provided linted module name. +func (b BlockedVersions) GetBlockReason(lintedModuleName string) *BlockedVersion { + for _, blockedModule := range b { + for blockedModuleName, blockedVersion := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedVersion + } + } + } + + return nil +} + +// BlockedModules a list of blocked modules. +type BlockedModules []map[string]BlockedModule + +// Get returns the module names that are blocked. +func (b BlockedModules) Get() []string { + modules := make([]string, len(b)) + + for n := range b { + for module := range b[n] { + modules[n] = module + break + } + } + + return modules +} + +// GetBlockReason returns a block module if one is set for the provided linted module name. +func (b BlockedModules) GetBlockReason(lintedModuleName string) *BlockedModule { + for _, blockedModule := range b { + for blockedModuleName, blockedModule := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedModule + } + } + } + + return nil +} diff --git a/vendor/github.com/ryancurrah/gomodguard/cmd.go b/vendor/github.com/ryancurrah/gomodguard/cmd.go deleted file mode 100644 index a26fac89..00000000 --- a/vendor/github.com/ryancurrah/gomodguard/cmd.go +++ /dev/null @@ -1,247 +0,0 @@ -package gomodguard - -import ( - "flag" - "fmt" - "io/ioutil" - "log" - "os" - "path/filepath" - "strings" - - "github.com/go-xmlfmt/xmlfmt" - "github.com/mitchellh/go-homedir" - "github.com/phayes/checkstyle" - "gopkg.in/yaml.v2" -) - -const ( - errFindingHomedir = "unable to find home directory, %w" - errReadingConfigFile = "could not read config file: %w" - errParsingConfigFile = "could not parse config file: %w" -) - -var ( - configFile = ".gomodguard.yaml" - logger = log.New(os.Stderr, "", 0) - errFindingConfigFile = fmt.Errorf("could not find config file") -) - -// Run the gomodguard linter. Returns the exit code to use. -//nolint:funlen -func Run() int { - var ( - args []string - help bool - noTest bool - report string - reportFile string - issuesExitCode int - cwd, _ = os.Getwd() - ) - - flag.BoolVar(&help, "h", false, "Show this help text") - flag.BoolVar(&help, "help", false, "") - flag.BoolVar(&noTest, "n", false, "Don't lint test files") - flag.BoolVar(&noTest, "no-test", false, "") - flag.StringVar(&report, "r", "", "Report results to one of the following formats: checkstyle. "+ - "A report file destination must also be specified") - flag.StringVar(&report, "report", "", "") - flag.StringVar(&reportFile, "f", "", "Report results to the specified file. A report type must also be specified") - flag.StringVar(&reportFile, "file", "", "") - flag.IntVar(&issuesExitCode, "i", 2, "Exit code when issues were found") - flag.IntVar(&issuesExitCode, "issues-exit-code", 2, "") - flag.Parse() - - report = strings.TrimSpace(strings.ToLower(report)) - - if help { - showHelp() - return 0 - } - - if report != "" && report != "checkstyle" { - logger.Fatalf("error: invalid report type '%s'", report) - } - - if report != "" && reportFile == "" { - logger.Fatalf("error: a report file must be specified when a report is enabled") - } - - if report == "" && reportFile != "" { - logger.Fatalf("error: a report type must be specified when a report file is enabled") - } - - args = flag.Args() - if len(args) == 0 { - args = []string{"./..."} - } - - config, err := GetConfig(configFile) - if err != nil { - logger.Fatalf("error: %s", err) - } - - filteredFiles := GetFilteredFiles(cwd, noTest, args) - - processor, err := NewProcessor(config) - if err != nil { - logger.Fatalf("error: %s", err) - } - - logger.Printf("info: allowed modules, %+v", config.Allowed.Modules) - logger.Printf("info: allowed module domains, %+v", config.Allowed.Domains) - logger.Printf("info: blocked modules, %+v", config.Blocked.Modules.Get()) - logger.Printf("info: blocked modules with version constraints, %+v", config.Blocked.Versions.Get()) - - results := processor.ProcessFiles(filteredFiles) - - if report == "checkstyle" { - err := WriteCheckstyle(reportFile, results) - if err != nil { - logger.Fatalf("error: %s", err) - } - } - - for _, r := range results { - fmt.Println(r.String()) - } - - if len(results) > 0 { - return issuesExitCode - } - - return 0 -} - -// GetConfig from YAML file. -func GetConfig(configFile string) (*Configuration, error) { - config := Configuration{} - - home, err := homedir.Dir() - if err != nil { - return nil, fmt.Errorf(errFindingHomedir, err) - } - - cfgFile := "" - homeDirCfgFile := filepath.Join(home, configFile) - - switch { - case fileExists(configFile): - cfgFile = configFile - case fileExists(homeDirCfgFile): - cfgFile = homeDirCfgFile - default: - return nil, fmt.Errorf("%w: %s %s", errFindingConfigFile, configFile, homeDirCfgFile) - } - - data, err := ioutil.ReadFile(cfgFile) - if err != nil { - return nil, fmt.Errorf(errReadingConfigFile, err) - } - - err = yaml.Unmarshal(data, &config) - if err != nil { - return nil, fmt.Errorf(errParsingConfigFile, err) - } - - return &config, nil -} - -// GetFilteredFiles returns files based on search string arguments and filters. -func GetFilteredFiles(cwd string, skipTests bool, args []string) []string { - var ( - foundFiles = []string{} - filteredFiles = []string{} - ) - - for _, f := range args { - if strings.HasSuffix(f, "/...") { - dir, _ := filepath.Split(f) - - foundFiles = append(foundFiles, expandGoWildcard(dir)...) - - continue - } - - if _, err := os.Stat(f); err == nil { - foundFiles = append(foundFiles, f) - } - } - - // Use relative path to print shorter names, sort out test foundFiles if chosen. - for _, f := range foundFiles { - if skipTests { - if strings.HasSuffix(f, "_test.go") { - continue - } - } - - if relativePath, err := filepath.Rel(cwd, f); err == nil { - filteredFiles = append(filteredFiles, relativePath) - - continue - } - - filteredFiles = append(filteredFiles, f) - } - - return filteredFiles -} - -// showHelp text for command line. -func showHelp() { - helpText := `Usage: gomodguard [files...] -Also supports package syntax but will use it in relative path, i.e. ./pkg/... -Flags:` - fmt.Println(helpText) - flag.PrintDefaults() -} - -// WriteCheckstyle takes the results and writes them to a checkstyle formated file. -func WriteCheckstyle(checkstyleFilePath string, results []Issue) error { - check := checkstyle.New() - - for i := range results { - file := check.EnsureFile(results[i].FileName) - file.AddError(checkstyle.NewError(results[i].LineNumber, 1, checkstyle.SeverityError, results[i].Reason, - "gomodguard")) - } - - checkstyleXML := fmt.Sprintf("\n%s", check.String()) - - err := ioutil.WriteFile(checkstyleFilePath, []byte(xmlfmt.FormatXML(checkstyleXML, "", " ")), 0644) // nolint:gosec - if err != nil { - return err - } - - return nil -} - -// fileExists returns true if the file path provided exists. -func fileExists(filename string) bool { - info, err := os.Stat(filename) - if os.IsNotExist(err) { - return false - } - - return !info.IsDir() -} - -// expandGoWildcard path provided. -func expandGoWildcard(root string) []string { - foundFiles := []string{} - - _ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { - // Only append go foundFiles. - if !strings.HasSuffix(info.Name(), ".go") { - return nil - } - - foundFiles = append(foundFiles, path) - - return nil - }) - - return foundFiles -} diff --git a/vendor/github.com/ryancurrah/gomodguard/issue.go b/vendor/github.com/ryancurrah/gomodguard/issue.go new file mode 100644 index 00000000..d60fc3a8 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/issue.go @@ -0,0 +1,20 @@ +package gomodguard + +import ( + "fmt" + "go/token" +) + +// Issue represents the result of one error. +type Issue struct { + FileName string + LineNumber int + Position token.Position + Reason string +} + +// String returns the filename, line +// number and reason of a Issue. +func (r *Issue) String() string { + return fmt.Sprintf("%s:%d:1 %s", r.FileName, r.LineNumber, r.Reason) +} diff --git a/vendor/github.com/ryancurrah/gomodguard/gomodguard.go b/vendor/github.com/ryancurrah/gomodguard/processor.go similarity index 51% rename from vendor/github.com/ryancurrah/gomodguard/gomodguard.go rename to vendor/github.com/ryancurrah/gomodguard/processor.go index efd0d17e..8457e3b0 100644 --- a/vendor/github.com/ryancurrah/gomodguard/gomodguard.go +++ b/vendor/github.com/ryancurrah/gomodguard/processor.go @@ -7,14 +7,11 @@ import ( "fmt" "go/parser" "go/token" - "io/ioutil" "os" "os/exec" "regexp" "strings" - "github.com/Masterminds/semver" - "golang.org/x/mod/modfile" ) @@ -33,248 +30,17 @@ var ( "local replace directive." // startsWithVersion is used to test when a string begins with the version identifier of a module, - // after having stripped the prefix base module name. IE "github.com/foo/bar/v2/baz" => "/v2/baz" + // after having stripped the prefix base module name. IE "github.com/foo/bar/v2/baz" => "v2/baz" // probably indicates that the module is actually github.com/foo/bar/v2, not github.com/foo/bar. - startsWithVersion = regexp.MustCompile(`^\/v[0-9]+`) + startsWithVersion = regexp.MustCompile(`^v[0-9]+`) ) -// BlockedVersion has a version constraint a reason why the the module version is blocked. -type BlockedVersion struct { - Version string `yaml:"version"` - Reason string `yaml:"reason"` -} - -// IsLintedModuleVersionBlocked returns true if a version constraint is specified and the -// linted module version matches the constraint. -func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool { - if r.Version == "" { - return false - } - - constraint, err := semver.NewConstraint(r.Version) - if err != nil { - return false - } - - version, err := semver.NewVersion(lintedModuleVersion) - if err != nil { - return false - } - - meet := constraint.Check(version) - - return meet -} - -// Message returns the reason why the module version is blocked. -func (r *BlockedVersion) Message(lintedModuleVersion string) string { - var sb strings.Builder - - // Add version contraint to message. - _, _ = fmt.Fprintf(&sb, "version `%s` is blocked because it does not meet the version constraint `%s`.", - lintedModuleVersion, r.Version) - - if r.Reason == "" { - return sb.String() - } - - // Add reason to message. - _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) - - return sb.String() -} - -// BlockedModule has alternative modules to use and a reason why the module is blocked. -type BlockedModule struct { - Recommendations []string `yaml:"recommendations"` - Reason string `yaml:"reason"` -} - -// IsCurrentModuleARecommendation returns true if the current module is in the Recommendations list. -// -// If the current go.mod file being linted is a recommended module of a -// blocked module and it imports that blocked module, do not set as blocked. -// This could mean that the linted module is a wrapper for that blocked module. -func (r *BlockedModule) IsCurrentModuleARecommendation(currentModuleName string) bool { - if r == nil { - return false - } - - for n := range r.Recommendations { - if strings.TrimSpace(currentModuleName) == strings.TrimSpace(r.Recommendations[n]) { - return true - } - } - - return false -} - -// Message returns the reason why the module is blocked and a list of recommended modules if provided. -func (r *BlockedModule) Message() string { - var sb strings.Builder - - // Add recommendations to message - for i := range r.Recommendations { - switch { - case len(r.Recommendations) == 1: - _, _ = fmt.Fprintf(&sb, "`%s` is a recommended module.", r.Recommendations[i]) - case (i+1) != len(r.Recommendations) && (i+1) == (len(r.Recommendations)-1): - _, _ = fmt.Fprintf(&sb, "`%s` ", r.Recommendations[i]) - case (i + 1) != len(r.Recommendations): - _, _ = fmt.Fprintf(&sb, "`%s`, ", r.Recommendations[i]) - default: - _, _ = fmt.Fprintf(&sb, "and `%s` are recommended modules.", r.Recommendations[i]) - } - } - - if r.Reason == "" { - return sb.String() - } - - // Add reason to message - if sb.Len() == 0 { - _, _ = fmt.Fprintf(&sb, "%s.", strings.TrimRight(r.Reason, ".")) - } else { - _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) - } - - return sb.String() -} - -// HasRecommendations returns true if the blocked package has -// recommended modules. -func (r *BlockedModule) HasRecommendations() bool { - if r == nil { - return false - } - - return len(r.Recommendations) > 0 -} - -// BlockedVersions a list of blocked modules by a version constraint. -type BlockedVersions []map[string]BlockedVersion - -// Get returns the module names that are blocked. -func (b BlockedVersions) Get() []string { - modules := make([]string, len(b)) - - for n := range b { - for module := range b[n] { - modules[n] = module - break - } - } - - return modules -} - -// GetBlockReason returns a block version if one is set for the provided linted module name. -func (b BlockedVersions) GetBlockReason(lintedModuleName string) *BlockedVersion { - for _, blockedModule := range b { - for blockedModuleName, blockedVersion := range blockedModule { - if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { - return &blockedVersion - } - } - } - - return nil -} - -// BlockedModules a list of blocked modules. -type BlockedModules []map[string]BlockedModule - -// Get returns the module names that are blocked. -func (b BlockedModules) Get() []string { - modules := make([]string, len(b)) - - for n := range b { - for module := range b[n] { - modules[n] = module - break - } - } - - return modules -} - -// GetBlockReason returns a block module if one is set for the provided linted module name. -func (b BlockedModules) GetBlockReason(lintedModuleName string) *BlockedModule { - for _, blockedModule := range b { - for blockedModuleName, blockedModule := range blockedModule { - if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { - return &blockedModule - } - } - } - - return nil -} - -// Allowed is a list of modules and module -// domains that are allowed to be used. -type Allowed struct { - Modules []string `yaml:"modules"` - Domains []string `yaml:"domains"` -} - -// IsAllowedModule returns true if the given module -// name is in the allowed modules list. -func (a *Allowed) IsAllowedModule(moduleName string) bool { - allowedModules := a.Modules - - for i := range allowedModules { - if strings.TrimSpace(moduleName) == strings.TrimSpace(allowedModules[i]) { - return true - } - } - - return false -} - -// IsAllowedModuleDomain returns true if the given modules domain is -// in the allowed module domains list. -func (a *Allowed) IsAllowedModuleDomain(moduleName string) bool { - allowedDomains := a.Domains - - for i := range allowedDomains { - if strings.HasPrefix(strings.TrimSpace(strings.ToLower(moduleName)), - strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { - return true - } - } - - return false -} - -// Blocked is a list of modules that are -// blocked and not to be used. -type Blocked struct { - Modules BlockedModules `yaml:"modules"` - Versions BlockedVersions `yaml:"versions"` - LocalReplaceDirectives bool `yaml:"local_replace_directives"` -} - // Configuration of gomodguard allow and block lists. type Configuration struct { Allowed Allowed `yaml:"allowed"` Blocked Blocked `yaml:"blocked"` } -// Issue represents the result of one error. -type Issue struct { - FileName string - LineNumber int - Position token.Position - Reason string -} - -// String returns the filename, line -// number and reason of a Issue. -func (r *Issue) String() string { - return fmt.Sprintf("%s:%d:1 %s", r.FileName, r.LineNumber, r.Reason) -} - // Processor processes Go files. type Processor struct { Config *Configuration @@ -308,7 +74,7 @@ func NewProcessor(config *Configuration) (*Processor, error) { // and lints them. func (p *Processor) ProcessFiles(filenames []string) (issues []Issue) { for _, filename := range filenames { - data, err := ioutil.ReadFile(filename) + data, err := os.ReadFile(filename) if err != nil { issues = append(issues, Issue{ FileName: filename, @@ -443,14 +209,7 @@ func (p *Processor) SetBlockedModules() { //nolint:gocognit,funlen // isBlockedPackageFromModFile returns the block reason if the package is blocked. func (p *Processor) isBlockedPackageFromModFile(packageName string) []string { for blockedModuleName, blockReasons := range p.blockedModulesFromModFile { - if strings.HasPrefix(strings.TrimSpace(packageName), strings.TrimSpace(blockedModuleName)) { - // Test if a versioned module matched its base version - // ie github.com/foo/bar/v2 matched github.com/foo/bar, even though the former may be allowed. - suffix := strings.TrimPrefix(strings.TrimSpace(packageName), strings.TrimSpace(blockedModuleName)) - if startsWithVersion.MatchString(suffix) { - continue - } - + if isPackageInModule(packageName, blockedModuleName) { formattedReasons := make([]string, 0, len(blockReasons)) for _, blockReason := range blockReasons { @@ -470,7 +229,7 @@ func loadGoModFile() ([]byte, error) { _ = cmd.Start() if stdout == nil { - return ioutil.ReadFile(goModFilename) + return os.ReadFile(goModFilename) } buf := new(bytes.Buffer) @@ -480,20 +239,53 @@ func loadGoModFile() ([]byte, error) { err := json.Unmarshal(buf.Bytes(), &goEnv) if err != nil { - return ioutil.ReadFile(goModFilename) + return os.ReadFile(goModFilename) } if _, ok := goEnv["GOMOD"]; !ok { - return ioutil.ReadFile(goModFilename) + return os.ReadFile(goModFilename) } if _, err = os.Stat(goEnv["GOMOD"]); os.IsNotExist(err) { - return ioutil.ReadFile(goModFilename) + return os.ReadFile(goModFilename) } if goEnv["GOMOD"] == "/dev/null" { return nil, errors.New("current working directory must have a go.mod file") } - return ioutil.ReadFile(goEnv["GOMOD"]) + return os.ReadFile(goEnv["GOMOD"]) +} + +// isPackageInModule determines if a package is apart of the specified go module. +func isPackageInModule(pkg, mod string) bool { + // Split pkg and mod paths into parts + pkgPart := strings.Split(pkg, "/") + modPart := strings.Split(mod, "/") + + pkgPartMatches := 0 + + // Count number of times pkg path matches the mod path + for i, m := range modPart { + if len(pkgPart) > i && pkgPart[i] == m { + pkgPartMatches++ + } + } + + // If pkgPartMatches are not the same length as modPart + // than the package is not in this module + if pkgPartMatches != len(modPart) { + return false + } + + if len(pkgPart) > len(modPart) { + // If pkgPart path starts with a major version + // than the package is not in this module as + // major versions are completely different modules + if startsWithVersion.MatchString(pkgPart[len(modPart)]) { + return false + } + } + + return true } diff --git a/vendor/github.com/ryancurrah/gomodguard/tools.go b/vendor/github.com/ryancurrah/gomodguard/tools.go new file mode 100644 index 00000000..d56bcc74 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/tools.go @@ -0,0 +1,5 @@ +//go:build tools + +package gomodguard + +import _ "github.com/t-yuki/gocover-cobertura" diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go index bc42dfb3..c22817ca 100644 --- a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go +++ b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go @@ -14,6 +14,19 @@ const ( closeMethod = "Close" ) +type action uint8 + +const ( + actionUnhandled action = iota + actionHandled + actionReturned + actionPassed + actionClosed + actionUnvaluedCall + actionUnvaluedDefer + actionNoOp +) + var ( sqlPackages = []string{ "database/sql", @@ -33,7 +46,10 @@ func NewAnalyzer() *analysis.Analyzer { } func run(pass *analysis.Pass) (interface{}, error) { - pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + pssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + if !ok { + return nil, nil + } // Build list of types we are looking for targetTypes := getTargetTypes(pssa, sqlPackages) @@ -168,16 +184,16 @@ func checkClosed(refs *[]ssa.Instruction, targetTypes []*types.Pointer) bool { action := getAction(ref, targetTypes) switch action { - case "closed": + case actionClosed: return true - case "passed": + case actionPassed: // Passed and not used after if numInstrs == idx+1 { return true } - case "returned": + case actionReturned: return true - case "handled": + case actionHandled: return true default: // log.Printf(action) @@ -187,51 +203,61 @@ func checkClosed(refs *[]ssa.Instruction, targetTypes []*types.Pointer) bool { return false } -func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) string { +func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) action { switch instr := instr.(type) { case *ssa.Defer: if instr.Call.Value == nil { - return "unvalued defer" + return actionUnvaluedDefer } name := instr.Call.Value.Name() if name == closeMethod { - return "closed" + return actionClosed } case *ssa.Call: if instr.Call.Value == nil { - return "unvalued call" + return actionUnvaluedCall } isTarget := false - receiver := instr.Call.StaticCallee().Signature.Recv() - if receiver != nil { - isTarget = isTargetType(receiver.Type(), targetTypes) + staticCallee := instr.Call.StaticCallee() + if staticCallee != nil { + receiver := instr.Call.StaticCallee().Signature.Recv() + if receiver != nil { + isTarget = isTargetType(receiver.Type(), targetTypes) + } } name := instr.Call.Value.Name() if isTarget && name == closeMethod { - return "closed" + return actionClosed } if !isTarget { - return "passed" + return actionPassed } case *ssa.Phi: - return "passed" + return actionPassed case *ssa.MakeInterface: - return "passed" + return actionPassed case *ssa.Store: + // A Row/Stmt is stored in a struct, which may be closed later + // by a different flow. + if _, ok := instr.Addr.(*ssa.FieldAddr); ok { + return actionReturned + } + if len(*instr.Addr.Referrers()) == 0 { - return "noop" + return actionNoOp } for _, aRef := range *instr.Addr.Referrers() { if c, ok := aRef.(*ssa.MakeClosure); ok { - f := c.Fn.(*ssa.Function) - for _, b := range f.Blocks { - if checkClosed(&b.Instrs, targetTypes) { - return "handled" + if f, ok := c.Fn.(*ssa.Function); ok { + for _, b := range f.Blocks { + if checkClosed(&b.Instrs, targetTypes) { + return actionHandled + } } } } @@ -241,21 +267,21 @@ func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) string { for _, targetType := range targetTypes { if types.Identical(instrType, targetType) { if checkClosed(instr.Referrers(), targetTypes) { - return "handled" + return actionHandled } } } case *ssa.FieldAddr: if checkClosed(instr.Referrers(), targetTypes) { - return "handled" + return actionHandled } case *ssa.Return: - return "returned" + return actionReturned default: // log.Printf("%s", instr) } - return "unhandled" + return actionUnhandled } func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []*types.Pointer, inDefer bool) { @@ -280,10 +306,10 @@ func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes [ for _, aRef := range *instr.Addr.Referrers() { if c, ok := aRef.(*ssa.MakeClosure); ok { - f := c.Fn.(*ssa.Function) - - for _, b := range f.Blocks { - checkDeferred(pass, &b.Instrs, targetTypes, true) + if f, ok := c.Fn.(*ssa.Function); ok { + for _, b := range f.Blocks { + checkDeferred(pass, &b.Instrs, targetTypes, true) + } } } } diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/README.md b/vendor/github.com/sanposhiho/wastedassign/v2/README.md index cd2deeda..6b736f7f 100644 --- a/vendor/github.com/sanposhiho/wastedassign/v2/README.md +++ b/vendor/github.com/sanposhiho/wastedassign/v2/README.md @@ -39,10 +39,19 @@ $ go vet -vettool=`which wastedassign` sample.go ## Installation + +### Go version < 1.16 + ``` go get -u github.com/sanposhiho/wastedassign/v2/cmd/wastedassign ``` +### Go version 1.16+ + +``` +go install github.com/sanposhiho/wastedassign/v2/cmd/wastedassign@latest +``` + ## Usage ``` diff --git a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go index b55a10be..4d6ab3cc 100644 --- a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go +++ b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/analyzer.go @@ -25,6 +25,7 @@ const ( SQLIsolationLevelFlag = "sql-isolation-level" TLSSignatureSchemeFlag = "tls-signature-scheme" ConstantKindFlag = "constant-kind" + SyslogPriorityFlag = "syslog-priority" ) // New returns new usestdlibvars analyzer. @@ -47,10 +48,11 @@ func flags() flag.FlagSet { flags.Bool(TimeLayoutFlag, false, "suggest the use of time.Layout") flags.Bool(CryptoHashFlag, false, "suggest the use of crypto.Hash.String()") flags.Bool(RPCDefaultPathFlag, false, "suggest the use of rpc.DefaultXXPath") - flags.Bool(OSDevNullFlag, false, "suggest the use of os.DevNull") + flags.Bool(OSDevNullFlag, false, "[DEPRECATED] suggest the use of os.DevNull") flags.Bool(SQLIsolationLevelFlag, false, "suggest the use of sql.LevelXX.String()") flags.Bool(TLSSignatureSchemeFlag, false, "suggest the use of tls.SignatureScheme.String()") flags.Bool(ConstantKindFlag, false, "suggest the use of constant.Kind.String()") + flags.Bool(SyslogPriorityFlag, false, "[DEPRECATED] suggest the use of syslog.Priority") return *flags } @@ -120,6 +122,11 @@ func run(pass *analysis.Pass) (interface{}, error) { return } + switch cond.Op { + case token.LSS, token.GTR, token.LEQ, token.GEQ: + return + } + x, ok := cond.X.(*ast.SelectorExpr) if !ok { return @@ -238,6 +245,27 @@ func funArgs(pass *analysis.Pass, x *ast.Ident, fun *ast.SelectorExpr, args []as checkHTTPMethod(pass, basicLit) } } + case "syslog": + if !lookupFlag(pass, SyslogPriorityFlag) { + return + } + + switch fun.Sel.Name { + case "New": + if basicLit := getBasicLitFromArgs(args, 2, 0, token.INT); basicLit != nil { + checkSyslogPriority(pass, basicLit) + } + + case "Dial": + if basicLit := getBasicLitFromArgs(args, 4, 2, token.INT); basicLit != nil { + checkSyslogPriority(pass, basicLit) + } + + case "NewLogger": + if basicLit := getBasicLitFromArgs(args, 2, 0, token.INT); basicLit != nil { + checkSyslogPriority(pass, basicLit) + } + } default: // w.WriteHeader(http.StatusOk) if fun.Sel.Name == "WriteHeader" { @@ -438,13 +466,7 @@ func checkRPCDefaultPath(pass *analysis.Pass, basicLit *ast.BasicLit) { } } -func checkOSDevNull(pass *analysis.Pass, basicLit *ast.BasicLit) { - currentVal := getBasicLitValue(basicLit) - - if newVal, ok := mapping.OSDevNull[currentVal]; ok { - report(pass, basicLit.Pos(), currentVal, newVal) - } -} +func checkOSDevNull(pass *analysis.Pass, basicLit *ast.BasicLit) {} func checkSQLIsolationLevel(pass *analysis.Pass, basicLit *ast.BasicLit) { currentVal := getBasicLitValue(basicLit) @@ -470,6 +492,8 @@ func checkConstantKind(pass *analysis.Pass, basicLit *ast.BasicLit) { } } +func checkSyslogPriority(pass *analysis.Pass, basicLit *ast.BasicLit) {} + // getBasicLitFromArgs gets the *ast.BasicLit of a function argument. // // Arguments: diff --git a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go index 506166b5..b081edea 100644 --- a/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go +++ b/vendor/github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping/mapping.go @@ -7,7 +7,6 @@ import ( "go/constant" "net/http" "net/rpc" - "os" "strconv" "time" ) @@ -164,10 +163,6 @@ var TimeLayout = map[string]string{ time.StampNano: "time.StampNano", } -var OSDevNull = map[string]string{ - os.DevNull: "os.DevNull", -} - var SQLIsolationLevel = map[string]string{ // sql.LevelDefault.String(): "sql.LevelDefault.String()", sql.LevelReadUncommitted.String(): "sql.LevelReadUncommitted.String()", diff --git a/vendor/github.com/securego/gosec/v2/.goreleaser.yml b/vendor/github.com/securego/gosec/v2/.goreleaser.yml index 539be565..25a81b52 100644 --- a/vendor/github.com/securego/gosec/v2/.goreleaser.yml +++ b/vendor/github.com/securego/gosec/v2/.goreleaser.yml @@ -18,6 +18,7 @@ builds: goarch: - amd64 - arm64 + - s390x ldflags: -X main.Version={{.Version}} -X main.GitTag={{.Tag}} -X main.BuildDate={{.Date}} env: - CGO_ENABLED=0 diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile index 5dbfd776..093c8a99 100644 --- a/vendor/github.com/securego/gosec/v2/Makefile +++ b/vendor/github.com/securego/gosec/v2/Makefile @@ -2,17 +2,23 @@ GIT_TAG?= $(shell git describe --always --tags) BIN = gosec FMT_CMD = $(gofmt -s -l -w $(find . -type f -name '*.go' -not -path './vendor/*') | tee /dev/stderr) IMAGE_REPO = securego -BUILD_DATE ?= $(shell date +%Y-%m-%d) +DATE_FMT=+%Y-%m-%d +ifdef SOURCE_DATE_EPOCH + BUILD_DATE ?= $(shell date -u -d "@$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u -r "$(SOURCE_DATE_EPOCH)" "$(DATE_FMT)" 2>/dev/null || date -u "$(DATE_FMT)") +else + BUILD_DATE ?= $(shell date "$(DATE_FMT)") +endif BUILDFLAGS := "-w -s -X 'main.Version=$(GIT_TAG)' -X 'main.GitTag=$(GIT_TAG)' -X 'main.BuildDate=$(BUILD_DATE)'" CGO_ENABLED = 0 GO := GO111MODULE=on go GO_NOMOD :=GO111MODULE=off go GOPATH ?= $(shell $(GO) env GOPATH) GOBIN ?= $(GOPATH)/bin -GOLINT ?= $(GOBIN)/golint GOSEC ?= $(GOBIN)/gosec GINKGO ?= $(GOBIN)/ginkgo -GO_VERSION = 1.18 +GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2) +GOVULN_MIN_VERSION = 17 +GO_VERSION = 1.20 default: $(MAKE) build @@ -22,7 +28,12 @@ install-test-deps: $(GO_NOMOD) get -u golang.org/x/crypto/ssh $(GO_NOMOD) get -u github.com/lib/pq -test: install-test-deps build fmt lint sec +install-govulncheck: + @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ + go install golang.org/x/vuln/cmd/govulncheck@latest; \ + fi + +test: install-test-deps build fmt vet sec govulncheck $(GINKGO) -v --fail-fast fmt: @@ -30,10 +41,7 @@ fmt: @FORMATTED=`$(GO) fmt ./...` @([ ! -z "$(FORMATTED)" ] && printf "Fixed unformatted files:\n$(FORMATTED)") || true -lint: - @echo "LINTING: golint" - $(GO_NOMOD) get -u golang.org/x/lint/golint - $(GOLINT) -set_exit_status ./... +vet: @echo "VETTING" $(GO) vet ./... @@ -45,6 +53,12 @@ sec: @echo "SECURITY SCANNING" ./$(BIN) ./... +govulncheck: install-govulncheck + @echo "CHECKING VULNERABILITIES" + @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ + govulncheck ./...; \ + fi + test-coverage: install-test-deps go test -race -v -count=1 -coverprofile=coverage.out ./... diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md index cc824393..bf7aa088 100644 --- a/vendor/github.com/securego/gosec/v2/README.md +++ b/vendor/github.com/securego/gosec/v2/README.md @@ -21,7 +21,7 @@ You may obtain a copy of the License [here](http://www.apache.org/licenses/LICEN [![Docs](https://readthedocs.org/projects/docs/badge/?version=latest)](https://securego.io/) [![Downloads](https://img.shields.io/github/downloads/securego/gosec/total.svg)](https://github.com/securego/gosec/releases) [![Docker Pulls](https://img.shields.io/docker/pulls/securego/gosec.svg)](https://hub.docker.com/r/securego/gosec/tags) -[![Slack](http://securego.herokuapp.com/badge.svg)](http://securego.herokuapp.com) +[![Slack](https://img.shields.io/badge/Slack-4A154B?style=for-the-badge&logo=slack&logoColor=white)](http://securego.slack.com) ## Install diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go index 0f9fef2d..5f778791 100644 --- a/vendor/github.com/securego/gosec/v2/analyzer.go +++ b/vendor/github.com/securego/gosec/v2/analyzer.go @@ -172,9 +172,9 @@ func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error for { select { case s := <-j: - packages, err := gosec.load(s, config) + pkgs, err := gosec.load(s, config) select { - case r <- result{pkgPath: s, pkgs: packages, err: err}: + case r <- result{pkgPath: s, pkgs: pkgs, err: err}: case <-quit: // we've been told to stop, probably an error while // processing a previous result. @@ -296,7 +296,6 @@ func (gosec *Analyzer) Check(pkg *packages.Package) { gosec.context.Pkg = pkg.Types gosec.context.PkgFiles = pkg.Syntax gosec.context.Imports = NewImportTracker() - gosec.context.Imports.TrackFile(file) gosec.context.PassedValues = make(map[string]interface{}) ast.Walk(gosec, file) gosec.stats.NumFiles++ @@ -434,6 +433,12 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { } return gosec } + switch i := n.(type) { + case *ast.File: + // Using ast.File instead of ast.ImportSpec, so that we can track + // all imports at once. + gosec.context.Imports.TrackFile(i) + } // Get any new rule exclusions. ignoredRules := gosec.ignore(n) @@ -453,9 +458,6 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { // Push the new set onto the stack. gosec.context.Ignores = append([]map[string][]SuppressionInfo{ignores}, gosec.context.Ignores...) - // Track aliased and initialization imports - gosec.context.Imports.TrackImport(n) - for _, rule := range gosec.ruleset.RegisteredFor(n) { // Check if all rules are ignored. generalSuppressions, generalIgnored := ignores[aliasOfAllRules] diff --git a/vendor/github.com/securego/gosec/v2/cwe/data.go b/vendor/github.com/securego/gosec/v2/cwe/data.go index 0e377b96..ff1ad3c7 100644 --- a/vendor/github.com/securego/gosec/v2/cwe/data.go +++ b/vendor/github.com/securego/gosec/v2/cwe/data.go @@ -129,6 +129,11 @@ var ( Description: "The software constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component.", Name: "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", }, + { + ID: "676", + Description: "The program invokes a potentially dangerous function that could introduce a vulnerability if it is used incorrectly, but the function can also be used safely.", + Name: "Use of Potentially Dangerous Function", + }, } ) diff --git a/vendor/github.com/securego/gosec/v2/entrypoint.sh b/vendor/github.com/securego/gosec/v2/entrypoint.sh index af2acd4b..bc6ad6a2 100644 --- a/vendor/github.com/securego/gosec/v2/entrypoint.sh +++ b/vendor/github.com/securego/gosec/v2/entrypoint.sh @@ -4,4 +4,8 @@ # provides all arguments concatenated as a single string. ARGS=("$@") +if [[ ! -z "${GITHUB_AUTHENTICATION_TOKEN}" ]]; then + git config --global --add url."https://x-access-token:${GITHUB_AUTHENTICATION_TOKEN}@github.com/".insteadOf "https://github.com/" +fi + /bin/gosec ${ARGS[*]} diff --git a/vendor/github.com/securego/gosec/v2/helpers.go b/vendor/github.com/securego/gosec/v2/helpers.go index 437d0324..62ede054 100644 --- a/vendor/github.com/securego/gosec/v2/helpers.go +++ b/vendor/github.com/securego/gosec/v2/helpers.go @@ -37,12 +37,9 @@ import ( // // node, matched := MatchCallByPackage(n, ctx, "math/rand", "Read") func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*ast.CallExpr, bool) { - importedName, found := GetImportedName(pkg, c) + importedNames, found := GetImportedNames(pkg, c) if !found { - importedName, found = GetAliasedName(pkg, c) - if !found { - return nil, false - } + return nil, false } if callExpr, ok := n.(*ast.CallExpr); ok { @@ -50,7 +47,10 @@ func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*a if err != nil { return nil, false } - if packageName == importedName { + for _, in := range importedNames { + if packageName != in { + continue + } for _, name := range names { if callName == name { return callExpr, true @@ -247,48 +247,23 @@ func GetBinaryExprOperands(be *ast.BinaryExpr) []ast.Node { return result } -// GetImportedName returns the name used for the package within the -// code. It will ignore initialization only imports. -func GetImportedName(path string, ctx *Context) (string, bool) { - importName, imported := ctx.Imports.Imported[path] - if !imported { - return "", false - } - - if _, initonly := ctx.Imports.InitOnly[path]; initonly { - return "", false - } - - return importName, true -} - -// GetAliasedName returns the aliased name used for the package within the -// code. It will ignore initialization only imports. -func GetAliasedName(path string, ctx *Context) (string, bool) { - importName, imported := ctx.Imports.Aliased[path] - if !imported { - return "", false - } - - if _, initonly := ctx.Imports.InitOnly[path]; initonly { - return "", false - } - - return importName, true +// GetImportedNames returns the name(s)/alias(es) used for the package within +// the code. It ignores initialization-only imports. +func GetImportedNames(path string, ctx *Context) (names []string, found bool) { + importNames, imported := ctx.Imports.Imported[path] + return importNames, imported } // GetImportPath resolves the full import path of an identifier based on // the imports in the current context(including aliases). func GetImportPath(name string, ctx *Context) (string, bool) { for path := range ctx.Imports.Imported { - if imported, ok := GetImportedName(path, ctx); ok && imported == name { - return path, true - } - } - - for path := range ctx.Imports.Aliased { - if imported, ok := GetAliasedName(path, ctx); ok && imported == name { - return path, true + if imported, ok := GetImportedNames(path, ctx); ok { + for _, n := range imported { + if n == name { + return path, true + } + } } } diff --git a/vendor/github.com/securego/gosec/v2/import_tracker.go b/vendor/github.com/securego/gosec/v2/import_tracker.go index cbb8c551..30e7c009 100644 --- a/vendor/github.com/securego/gosec/v2/import_tracker.go +++ b/vendor/github.com/securego/gosec/v2/import_tracker.go @@ -22,54 +22,51 @@ import ( // by a source file. It is able to differentiate between plain imports, aliased // imports and init only imports. type ImportTracker struct { - Imported map[string]string - Aliased map[string]string - InitOnly map[string]bool + // Imported is a map of Imported with their associated names/aliases. + Imported map[string][]string } // NewImportTracker creates an empty Import tracker instance func NewImportTracker() *ImportTracker { return &ImportTracker{ - make(map[string]string), - make(map[string]string), - make(map[string]bool), + Imported: make(map[string][]string), } } // TrackFile track all the imports used by the supplied file func (t *ImportTracker) TrackFile(file *ast.File) { for _, imp := range file.Imports { - path := strings.Trim(imp.Path.Value, `"`) - parts := strings.Split(path, "/") - if len(parts) > 0 { - name := parts[len(parts)-1] - t.Imported[path] = name - } + t.TrackImport(imp) } } // TrackPackages tracks all the imports used by the supplied packages func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) { for _, pkg := range pkgs { - t.Imported[pkg.Path()] = pkg.Name() + t.Imported[pkg.Path()] = []string{pkg.Name()} } } -// TrackImport tracks imports and handles the 'unsafe' import -func (t *ImportTracker) TrackImport(n ast.Node) { - if imported, ok := n.(*ast.ImportSpec); ok { - path := strings.Trim(imported.Path.Value, `"`) - if imported.Name != nil { - if imported.Name.Name == "_" { - // Initialization only import - t.InitOnly[path] = true - } else { - // Aliased import - t.Aliased[path] = imported.Name.Name - } - } - if path == "unsafe" { - t.Imported[path] = path +// TrackImport tracks imports. +func (t *ImportTracker) TrackImport(imported *ast.ImportSpec) { + importPath := strings.Trim(imported.Path.Value, `"`) + if imported.Name != nil { + if imported.Name.Name == "_" { + // Initialization only import + } else { + // Aliased import + t.Imported[importPath] = append(t.Imported[importPath], imported.Name.String()) } + } else { + t.Imported[importPath] = append(t.Imported[importPath], importName(importPath)) + } +} + +func importName(importPath string) string { + parts := strings.Split(importPath, "/") + name := importPath + if len(parts) > 0 { + name = parts[len(parts)-1] } + return name } diff --git a/vendor/github.com/securego/gosec/v2/install.sh b/vendor/github.com/securego/gosec/v2/install.sh index 0da55d37..2b6403cb 100644 --- a/vendor/github.com/securego/gosec/v2/install.sh +++ b/vendor/github.com/securego/gosec/v2/install.sh @@ -280,11 +280,13 @@ http_copy() { github_release() { owner_repo=$1 version=$2 - test -z "$version" && version="latest" - giturl="https://github.com/${owner_repo}/releases/${version}" + giturl="https://api.github.com/repos/${owner_repo}/releases/tags/${version}" + if [ -z "${version}" ]; then + giturl="https://api.github.com/repos/${owner_repo}/releases/latest" + fi json=$(http_copy "$giturl" "Accept:application/json") test -z "$json" && return 1 - version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//') + version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name": *"//' | sed 's/".*//') test -z "$version" && return 1 echo "$version" } diff --git a/vendor/github.com/securego/gosec/v2/issue.go b/vendor/github.com/securego/gosec/v2/issue.go index 32b9bc0c..d8faf4bf 100644 --- a/vendor/github.com/securego/gosec/v2/issue.go +++ b/vendor/github.com/securego/gosec/v2/issue.go @@ -66,6 +66,7 @@ var ruleToCWE = map[string]string{ "G111": "22", "G112": "400", "G113": "190", + "G114": "676", "G201": "89", "G202": "89", "G203": "79", diff --git a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go index 13b42070..141a4a93 100644 --- a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go +++ b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go @@ -57,6 +57,34 @@ func NewDeferredClosing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { typ: "os.File", methods: []string{"Close"}, }, + { + typ: "io.ReadCloser", + methods: []string{"Close"}, + }, + { + typ: "io.WriteCloser", + methods: []string{"Close"}, + }, + { + typ: "io.ReadWriteCloser", + methods: []string{"Close"}, + }, + { + typ: "io.ReadSeekCloser", + methods: []string{"Close"}, + }, + { + typ: "io.Closer", + methods: []string{"Close"}, + }, + { + typ: "net.Conn", + methods: []string{"Close"}, + }, + { + typ: "net.Listener", + methods: []string{"Close"}, + }, }, MetaData: gosec.MetaData{ ID: id, diff --git a/vendor/github.com/securego/gosec/v2/rules/fileperms.go b/vendor/github.com/securego/gosec/v2/rules/fileperms.go index a379a8c0..e89b5636 100644 --- a/vendor/github.com/securego/gosec/v2/rules/fileperms.go +++ b/vendor/github.com/securego/gosec/v2/rules/fileperms.go @@ -50,11 +50,15 @@ func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMod return mode } +func modeIsSubset(subset int64, superset int64) bool { + return (subset | superset) == superset +} + func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { for _, pkg := range r.pkgs { if callexpr, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched { modeArg := callexpr.Args[len(callexpr.Args)-1] - if mode, err := gosec.GetInt(modeArg); err == nil && mode > r.mode { + if mode, err := gosec.GetInt(modeArg); err == nil && !modeIsSubset(mode, r.mode) { return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil } } diff --git a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go index cf2e6638..b9e57565 100644 --- a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go +++ b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go @@ -101,12 +101,19 @@ func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Contex func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.Context) (*gosec.Issue, error) { if binaryExpr.Op == token.EQL || binaryExpr.Op == token.NEQ { - if ident, ok := binaryExpr.X.(*ast.Ident); ok { - if r.pattern.MatchString(ident.Name) { - if val, err := gosec.GetString(binaryExpr.Y); err == nil { - if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return gosec.NewIssue(ctx, binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil - } + ident, ok := binaryExpr.X.(*ast.Ident) + if !ok { + ident, _ = binaryExpr.Y.(*ast.Ident) + } + + if ident != nil && r.pattern.MatchString(ident.Name) { + valueNode := binaryExpr.Y + if !ok { + valueNode = binaryExpr.X + } + if val, err := gosec.GetString(valueNode); err == nil { + if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { + return gosec.NewIssue(ctx, binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil } } } diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go index 579f2fa4..8dcf0532 100644 --- a/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ b/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -24,8 +24,9 @@ import ( type readfile struct { gosec.MetaData gosec.CallList - pathJoin gosec.CallList - clean gosec.CallList + pathJoin gosec.CallList + clean gosec.CallList + cleanedVar map[any]ast.Node } // ID returns the identifier for this rule @@ -57,8 +58,11 @@ func (r *readfile) isJoinFunc(n ast.Node, c *gosec.Context) bool { return false } -// isFilepathClean checks if there is a filepath.Clean before assigning to a variable +// isFilepathClean checks if there is a filepath.Clean for given variable func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { + if _, ok := r.cleanedVar[n.Obj.Decl]; ok { + return true + } if n.Obj.Kind != ast.Var { return false } @@ -72,9 +76,21 @@ func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { return false } +// trackFilepathClean tracks back the declaration of variable from filepath.Clean argument +func (r *readfile) trackFilepathClean(n ast.Node) { + if clean, ok := n.(*ast.CallExpr); ok && len(clean.Args) > 0 { + if ident, ok := clean.Args[0].(*ast.Ident); ok { + r.cleanedVar[ident.Obj.Decl] = n + } + } +} + // Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { - if node := r.ContainsPkgCallExpr(n, c, false); node != nil { + if node := r.clean.ContainsPkgCallExpr(n, c, false); node != nil { + r.trackFilepathClean(n) + return nil, nil + } else if node := r.ContainsPkgCallExpr(n, c, false); node != nil { for _, arg := range node.Args { // handles path joining functions in Arg // eg. os.Open(filepath.Join("/tmp/", file)) @@ -116,6 +132,7 @@ func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { Severity: gosec.Medium, Confidence: gosec.High, }, + cleanedVar: map[any]ast.Node{}, } rule.pathJoin.Add("path/filepath", "Join") rule.pathJoin.Add("path", "Join") diff --git a/vendor/github.com/securego/gosec/v2/rules/tls.go b/vendor/github.com/securego/gosec/v2/rules/tls.go index 76dfd84f..1cc3a298 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tls.go +++ b/vendor/github.com/securego/gosec/v2/rules/tls.go @@ -63,31 +63,51 @@ func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) return nil } -func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Context) *gosec.Issue { - if ident, ok := n.Key.(*ast.Ident); ok { +func (t *insecureConfigTLS) processTLSConf(n ast.Node, c *gosec.Context) *gosec.Issue { + if kve, ok := n.(*ast.KeyValueExpr); ok { + issue := t.processTLSConfVal(kve.Key, kve.Value, c) + if issue != nil { + return issue + } + } else if assign, ok := n.(*ast.AssignStmt); ok { + if len(assign.Lhs) < 1 || len(assign.Rhs) < 1 { + return nil + } + if selector, ok := assign.Lhs[0].(*ast.SelectorExpr); ok { + issue := t.processTLSConfVal(selector.Sel, assign.Rhs[0], c) + if issue != nil { + return issue + } + } + } + return nil +} + +func (t *insecureConfigTLS) processTLSConfVal(key ast.Expr, value ast.Expr, c *gosec.Context) *gosec.Issue { + if ident, ok := key.(*ast.Ident); ok { switch ident.Name { case "InsecureSkipVerify": - if node, ok := n.Value.(*ast.Ident); ok { + if node, ok := value.(*ast.Ident); ok { if node.Name != "false" { - return gosec.NewIssue(c, n, t.ID(), "TLS InsecureSkipVerify set true.", gosec.High, gosec.High) + return gosec.NewIssue(c, value, t.ID(), "TLS InsecureSkipVerify set true.", gosec.High, gosec.High) } } else { // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, n, t.ID(), "TLS InsecureSkipVerify may be true.", gosec.High, gosec.Low) + return gosec.NewIssue(c, value, t.ID(), "TLS InsecureSkipVerify may be true.", gosec.High, gosec.Low) } case "PreferServerCipherSuites": - if node, ok := n.Value.(*ast.Ident); ok { + if node, ok := value.(*ast.Ident); ok { if node.Name == "false" { - return gosec.NewIssue(c, n, t.ID(), "TLS PreferServerCipherSuites set false.", gosec.Medium, gosec.High) + return gosec.NewIssue(c, value, t.ID(), "TLS PreferServerCipherSuites set false.", gosec.Medium, gosec.High) } } else { // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, n, t.ID(), "TLS PreferServerCipherSuites may be false.", gosec.Medium, gosec.Low) + return gosec.NewIssue(c, value, t.ID(), "TLS PreferServerCipherSuites may be false.", gosec.Medium, gosec.Low) } case "MinVersion": - if d, ok := n.Value.(*ast.Ident); ok { + if d, ok := value.(*ast.Ident); ok { obj := d.Obj if obj == nil { for _, f := range c.PkgFiles { @@ -107,7 +127,7 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont tObj := imp.Scope().Lookup(sel) if cst, ok := tObj.(*types.Const); ok { // ..got the value check if this can be translated - if minVersion, err := strconv.ParseInt(cst.Val().String(), 10, 64); err == nil { + if minVersion, err := strconv.ParseInt(cst.Val().String(), 0, 64); err == nil { t.actualMinVersion = minVersion } } @@ -118,10 +138,10 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont t.actualMinVersion = ival } } - } else if ival, ierr := gosec.GetInt(n.Value); ierr == nil { + } else if ival, ierr := gosec.GetInt(value); ierr == nil { t.actualMinVersion = ival } else { - if se, ok := n.Value.(*ast.SelectorExpr); ok { + if se, ok := value.(*ast.SelectorExpr); ok { if pkg, ok := se.X.(*ast.Ident); ok { if ip, ok := gosec.GetImportPath(pkg.Name, c); ok && ip == "crypto/tls" { t.actualMinVersion = t.mapVersion(se.Sel.Name) @@ -131,10 +151,10 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont } case "MaxVersion": - if ival, ierr := gosec.GetInt(n.Value); ierr == nil { + if ival, ierr := gosec.GetInt(value); ierr == nil { t.actualMaxVersion = ival } else { - if se, ok := n.Value.(*ast.SelectorExpr); ok { + if se, ok := value.(*ast.SelectorExpr); ok { if pkg, ok := se.X.(*ast.Ident); ok { if ip, ok := gosec.GetImportPath(pkg.Name, c); ok && ip == "crypto/tls" { t.actualMaxVersion = t.mapVersion(se.Sel.Name) @@ -144,7 +164,7 @@ func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Cont } case "CipherSuites": - if ret := t.processTLSCipherSuites(n.Value, c); ret != nil { + if ret := t.processTLSCipherSuites(value, c); ret != nil { return ret } @@ -192,17 +212,27 @@ func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, e actualType := c.Info.TypeOf(complit.Type) if actualType != nil && actualType.String() == t.requiredType { for _, elt := range complit.Elts { - if kve, ok := elt.(*ast.KeyValueExpr); ok { - issue := t.processTLSConfVal(kve, c) - if issue != nil { - return issue, nil - } + issue := t.processTLSConf(elt, c) + if issue != nil { + return issue, nil } } issue := t.checkVersion(complit, c) t.resetVersion() return issue, nil } + } else { + if assign, ok := n.(*ast.AssignStmt); ok && len(assign.Lhs) > 0 { + if selector, ok := assign.Lhs[0].(*ast.SelectorExpr); ok { + actualType := c.Info.TypeOf(selector.X) + if actualType != nil && actualType.String() == t.requiredType { + issue := t.processTLSConf(assign, c) + if issue != nil { + return issue, nil + } + } + } + } } return nil, nil } diff --git a/vendor/github.com/securego/gosec/v2/rules/tls_config.go b/vendor/github.com/securego/gosec/v2/rules/tls_config.go index 5d68593d..9bb17c24 100644 --- a/vendor/github.com/securego/gosec/v2/rules/tls_config.go +++ b/vendor/github.com/securego/gosec/v2/rules/tls_config.go @@ -19,7 +19,7 @@ func NewModernTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { "TLS_AES_256_GCM_SHA384", "TLS_CHACHA20_POLY1305_SHA256", }, - }, []ast.Node{(*ast.CompositeLit)(nil)} + }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} } // NewIntermediateTLSCheck creates a check for Intermediate TLS ciphers @@ -45,7 +45,7 @@ func NewIntermediateTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.No "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", }, - }, []ast.Node{(*ast.CompositeLit)(nil)} + }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} } // NewOldTLSCheck creates a check for Old TLS ciphers @@ -88,5 +88,5 @@ func NewOldTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { "TLS_RSA_WITH_AES_256_CBC_SHA", "TLS_RSA_WITH_3DES_EDE_CBC_SHA", }, - }, []ast.Node{(*ast.CompositeLit)(nil)} + }, []ast.Node{(*ast.CompositeLit)(nil), (*ast.AssignStmt)(nil)} } diff --git a/vendor/github.com/sivchari/tenv/tenv.go b/vendor/github.com/sivchari/tenv/tenv.go index 12db04cf..fcff98d0 100644 --- a/vendor/github.com/sivchari/tenv/tenv.go +++ b/vendor/github.com/sivchari/tenv/tenv.go @@ -171,6 +171,9 @@ func targetRunner(params []*ast.Field, fileName string) (string, bool) { } case *ast.SelectorExpr: if checkSelectorExprTarget(typ) { + if len(p.Names) == 0 { + return "", false + } argName := p.Names[0].Name return argName, true } diff --git a/vendor/github.com/sourcegraph/go-diff/LICENSE b/vendor/github.com/sourcegraph/go-diff/LICENSE index 0733b6e5..5ba1c443 100644 --- a/vendor/github.com/sourcegraph/go-diff/LICENSE +++ b/vendor/github.com/sourcegraph/go-diff/LICENSE @@ -33,3 +33,14 @@ in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.go b/vendor/github.com/sourcegraph/go-diff/diff/diff.go index 0f465b9e..81aa6557 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/diff.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/diff.go @@ -120,6 +120,10 @@ const onlyInMessage = "Only in %s: %s\n" // See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. const diffTimeParseLayout = "2006-01-02 15:04:05 -0700" +// Apple's diff is based on freebsd diff, which uses a timestamp format that does +// not include the timezone offset. +const diffTimeParseWithoutTZLayout = "2006-01-02 15:04:05" + // diffTimeFormatLayout is the layout used to format (i.e., print) the time in unified diff file // header timestamps. // See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. diff --git a/vendor/github.com/sourcegraph/go-diff/diff/parse.go b/vendor/github.com/sourcegraph/go-diff/diff/parse.go index 8d5cfc23..48eeb967 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/parse.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/parse.go @@ -23,14 +23,14 @@ func ParseMultiFileDiff(diff []byte) ([]*FileDiff, error) { // NewMultiFileDiffReader returns a new MultiFileDiffReader that reads // a multi-file unified diff from r. func NewMultiFileDiffReader(r io.Reader) *MultiFileDiffReader { - return &MultiFileDiffReader{reader: bufio.NewReader(r)} + return &MultiFileDiffReader{reader: newLineReader(r)} } // MultiFileDiffReader reads a multi-file unified diff. type MultiFileDiffReader struct { line int offset int64 - reader *bufio.Reader + reader *lineReader // TODO(sqs): line and offset tracking in multi-file diffs is broken; add tests and fix @@ -46,6 +46,14 @@ type MultiFileDiffReader struct { // all hunks) from r. If there are no more files in the diff, it // returns error io.EOF. func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { + fd, _, err := r.ReadFileWithTrailingContent() + return fd, err +} + +// ReadFileWithTrailingContent reads the next file unified diff (including +// headers and all hunks) from r, also returning any trailing content. If there +// are no more files in the diff, it returns error io.EOF. +func (r *MultiFileDiffReader) ReadFileWithTrailingContent() (*FileDiff, string, error) { fr := &FileDiffReader{ line: r.line, offset: r.offset, @@ -59,23 +67,33 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { switch e := err.(type) { case *ParseError: if e.Err == ErrNoFileHeader || e.Err == ErrExtendedHeadersEOF { - return nil, io.EOF + // Any non-diff content preceding a valid diff is included in the + // extended headers of the following diff. In this way, mixed diff / + // non-diff content can be parsed. Trailing non-diff content is + // different: it doesn't make sense to return a FileDiff with only + // extended headers populated. Instead, we return any trailing content + // in case the caller needs it. + trailing := "" + if fd != nil { + trailing = strings.Join(fd.Extended, "\n") + } + return nil, trailing, io.EOF } - return nil, err + return nil, "", err case OverflowError: r.nextFileFirstLine = []byte(e) - return fd, nil + return fd, "", nil default: - return nil, err + return nil, "", err } } // FileDiff is added/deleted file // No further collection of hunks needed if fd.NewName == "" { - return fd, nil + return fd, "", nil } // Before reading hunks, check to see if there are any. If there @@ -85,9 +103,9 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { // caused by the lack of any hunks, or a malformatted hunk, so we // need to perform the check here. hr := fr.HunksReader() - line, err := readLine(r.reader) + line, err := r.reader.readLine() if err != nil && err != io.EOF { - return fd, err + return fd, "", err } line = bytes.TrimSuffix(line, []byte{'\n'}) if bytes.HasPrefix(line, hunkPrefix) { @@ -101,10 +119,10 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { // This just means we finished reading the hunks for the // current file. See the ErrBadHunkLine doc for more info. r.nextFileFirstLine = e.Line - return fd, nil + return fd, "", nil } } - return nil, err + return nil, "", err } } else { // There weren't any hunks, so that line we peeked ahead at @@ -112,7 +130,7 @@ func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { r.nextFileFirstLine = line } - return fd, nil + return fd, "", nil } // ReadAllFiles reads all file unified diffs (including headers and all @@ -141,14 +159,14 @@ func ParseFileDiff(diff []byte) (*FileDiff, error) { // NewFileDiffReader returns a new FileDiffReader that reads a file // unified diff. func NewFileDiffReader(r io.Reader) *FileDiffReader { - return &FileDiffReader{reader: bufio.NewReader(r)} + return &FileDiffReader{reader: &lineReader{reader: bufio.NewReader(r)}} } // FileDiffReader reads a unified file diff. type FileDiffReader struct { line int offset int64 - reader *bufio.Reader + reader *lineReader // fileHeaderLine is the first file header line, set by: // @@ -236,7 +254,6 @@ func (r *FileDiffReader) ReadFileHeaders() (origName, newName string, origTimest "", nil, nil, nil } } - origName, origTimestamp, err = r.readOneFileHeader([]byte("--- ")) if err != nil { return "", "", nil, nil, err @@ -266,7 +283,7 @@ func (r *FileDiffReader) readOneFileHeader(prefix []byte) (filename string, time if r.fileHeaderLine == nil { var err error - line, err = readLine(r.reader) + line, err = r.reader.readLine() if err == io.EOF { return "", nil, &ParseError{r.line, r.offset, ErrNoFileHeader} } else if err != nil { @@ -289,10 +306,16 @@ func (r *FileDiffReader) readOneFileHeader(prefix []byte) (filename string, time parts := strings.SplitN(trimmedLine, "\t", 2) filename = parts[0] if len(parts) == 2 { + var ts time.Time // Timestamp is optional, but this header has it. - ts, err := time.Parse(diffTimeParseLayout, parts[1]) + ts, err = time.Parse(diffTimeParseLayout, parts[1]) if err != nil { - return "", nil, err + var err1 error + ts, err1 = time.Parse(diffTimeParseWithoutTZLayout, parts[1]) + if err1 != nil { + return "", nil, err + } + err = nil } timestamp = &ts } @@ -318,7 +341,7 @@ func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { var line []byte if r.fileHeaderLine == nil { var err error - line, err = readLine(r.reader) + line, err = r.reader.readLine() if err == io.EOF { return xheaders, &ParseError{r.line, r.offset, ErrExtendedHeadersEOF} } else if err != nil { @@ -354,65 +377,192 @@ func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { } } +// readQuotedFilename extracts a quoted filename from the beginning of a string, +// returning the unquoted filename and any remaining text after the filename. +func readQuotedFilename(text string) (value string, remainder string, err error) { + if text == "" || text[0] != '"' { + return "", "", fmt.Errorf(`string must start with a '"': %s`, text) + } + + // The end quote is the first quote NOT preceeded by an uneven number of backslashes. + numberOfBackslashes := 0 + for i, c := range text { + if c == '"' && i > 0 && numberOfBackslashes%2 == 0 { + value, err = strconv.Unquote(text[:i+1]) + remainder = text[i+1:] + return + } else if c == '\\' { + numberOfBackslashes++ + } else { + numberOfBackslashes = 0 + } + } + return "", "", fmt.Errorf(`end of string found while searching for '"': %s`, text) +} + +// parseDiffGitArgs extracts the two filenames from a 'diff --git' line. +// Returns false on syntax error, true if syntax is valid. Even with a +// valid syntax, it may be impossible to extract filenames; if so, the +// function returns ("", "", true). +func parseDiffGitArgs(diffArgs string) (string, string, bool) { + length := len(diffArgs) + if length < 3 { + return "", "", false + } + + if diffArgs[0] != '"' && diffArgs[length-1] != '"' { + // Both filenames are unquoted. + firstSpace := strings.IndexByte(diffArgs, ' ') + if firstSpace <= 0 || firstSpace == length-1 { + return "", "", false + } + + secondSpace := strings.IndexByte(diffArgs[firstSpace+1:], ' ') + if secondSpace == -1 { + if diffArgs[firstSpace+1] == '"' { + // The second filename begins with '"', but doesn't end with one. + return "", "", false + } + return diffArgs[:firstSpace], diffArgs[firstSpace+1:], true + } + + // One or both filenames contain a space, but the names are + // unquoted. Here, the 'diff --git' syntax is ambiguous, and + // we have to obtain the filenames elsewhere (e.g. from the + // hunk headers or extended headers). HOWEVER, if the file + // is newly created and empty, there IS no other place to + // find the filename. In this case, the two filenames are + // identical (except for the leading 'a/' prefix), and we have + // to handle that case here. + first := diffArgs[:length/2] + second := diffArgs[length/2+1:] + + // If the two strings could be equal, based on length, proceed. + if length%2 == 1 { + // If the name minus the a/ b/ prefixes is equal, proceed. + if len(first) >= 3 && first[1] == '/' && first[1:] == second[1:] { + return first, second, true + } + // If the names don't have the a/ and b/ prefixes and they're equal, proceed. + if !(first[:2] == "a/" && second[:2] == "b/") && first == second { + return first, second, true + } + } + + // The syntax is (unfortunately) valid, but we could not extract + // the filenames. + return "", "", true + } + + if diffArgs[0] == '"' { + first, remainder, err := readQuotedFilename(diffArgs) + if err != nil || len(remainder) < 2 || remainder[0] != ' ' { + return "", "", false + } + if remainder[1] == '"' { + second, remainder, err := readQuotedFilename(remainder[1:]) + if remainder != "" || err != nil { + return "", "", false + } + return first, second, true + } + return first, remainder[1:], true + } + + // In this case, second argument MUST be quoted (or it's a syntax error) + i := strings.IndexByte(diffArgs, '"') + if i == -1 || i+2 >= length || diffArgs[i-1] != ' ' { + return "", "", false + } + + second, remainder, err := readQuotedFilename(diffArgs[i:]) + if remainder != "" || err != nil { + return "", "", false + } + return diffArgs[:i-1], second, true +} + // handleEmpty detects when FileDiff was an empty diff and will not have any hunks // that follow. It updates fd fields from the parsed extended headers. func handleEmpty(fd *FileDiff) (wasEmpty bool) { - var err error lineCount := len(fd.Extended) if lineCount > 0 && !strings.HasPrefix(fd.Extended[0], "diff --git ") { return false } - switch { - case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && - strings.HasPrefix(fd.Extended[1], "new file mode "): - names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + lineHasPrefix := func(idx int, prefix string) bool { + return strings.HasPrefix(fd.Extended[idx], prefix) + } + + linesHavePrefixes := func(idx1 int, prefix1 string, idx2 int, prefix2 string) bool { + return lineHasPrefix(idx1, prefix1) && lineHasPrefix(idx2, prefix2) + } + + isCopy := (lineCount == 4 && linesHavePrefixes(2, "copy from ", 3, "copy to ")) || + (lineCount == 6 && linesHavePrefixes(2, "copy from ", 3, "copy to ") && lineHasPrefix(5, "Binary files ")) || + (lineCount == 6 && linesHavePrefixes(1, "old mode ", 2, "new mode ") && linesHavePrefixes(4, "copy from ", 5, "copy to ")) + + isRename := (lineCount == 4 && linesHavePrefixes(2, "rename from ", 3, "rename to ")) || + (lineCount == 5 && linesHavePrefixes(2, "rename from ", 3, "rename to ") && lineHasPrefix(4, "Binary files ")) || + (lineCount == 6 && linesHavePrefixes(2, "rename from ", 3, "rename to ") && lineHasPrefix(5, "Binary files ")) || + (lineCount == 6 && linesHavePrefixes(1, "old mode ", 2, "new mode ") && linesHavePrefixes(4, "rename from ", 5, "rename to ")) + + isDeletedFile := (lineCount == 3 || lineCount == 4 && lineHasPrefix(3, "Binary files ") || lineCount > 4 && lineHasPrefix(3, "GIT binary patch")) && + lineHasPrefix(1, "deleted file mode ") + + isNewFile := (lineCount == 3 || lineCount == 4 && lineHasPrefix(3, "Binary files ") || lineCount > 4 && lineHasPrefix(3, "GIT binary patch")) && + lineHasPrefix(1, "new file mode ") + + isModeChange := lineCount == 3 && linesHavePrefixes(1, "old mode ", 2, "new mode ") + + isBinaryPatch := lineCount == 3 && lineHasPrefix(2, "Binary files ") || lineCount > 3 && lineHasPrefix(2, "GIT binary patch") + + if !isModeChange && !isCopy && !isRename && !isBinaryPatch && !isNewFile && !isDeletedFile { + return false + } + + var success bool + fd.OrigName, fd.NewName, success = parseDiffGitArgs(fd.Extended[0][len("diff --git "):]) + if isNewFile { fd.OrigName = "/dev/null" - fd.NewName, err = strconv.Unquote(names[1]) - if err != nil { - fd.NewName = names[1] - } - return true - case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && - strings.HasPrefix(fd.Extended[1], "deleted file mode "): + } - names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName, err = strconv.Unquote(names[0]) - if err != nil { - fd.OrigName = names[0] - } + if isDeletedFile { fd.NewName = "/dev/null" - return true - case lineCount == 4 && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): - names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName, err = strconv.Unquote(names[0]) - if err != nil { - fd.OrigName = names[0] - } - fd.NewName, err = strconv.Unquote(names[1]) - if err != nil { - fd.NewName = names[1] - } - return true - case lineCount == 6 && strings.HasPrefix(fd.Extended[5], "Binary files ") && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): - names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName = names[0] - fd.NewName = names[1] - return true - case lineCount == 3 && strings.HasPrefix(fd.Extended[2], "Binary files ") || lineCount > 3 && strings.HasPrefix(fd.Extended[2], "GIT binary patch"): - names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) - fd.OrigName, err = strconv.Unquote(names[0]) - if err != nil { - fd.OrigName = names[0] + } + + // For ambiguous 'diff --git' lines, try to reconstruct filenames using extended headers. + if success && (isCopy || isRename) && fd.OrigName == "" && fd.NewName == "" { + diffArgs := fd.Extended[0][len("diff --git "):] + + tryReconstruct := func(header string, prefix string, whichFile int, result *string) { + if !strings.HasPrefix(header, prefix) { + return + } + rawFilename := header[len(prefix):] + + // extract the filename prefix (e.g. "a/") from the 'diff --git' line. + var prefixLetterIndex int + if whichFile == 1 { + prefixLetterIndex = 0 + } else if whichFile == 2 { + prefixLetterIndex = len(diffArgs) - len(rawFilename) - 2 + } + if prefixLetterIndex < 0 || diffArgs[prefixLetterIndex+1] != '/' { + return + } + + *result = diffArgs[prefixLetterIndex:prefixLetterIndex+2] + rawFilename } - fd.NewName, err = strconv.Unquote(names[1]) - if err != nil { - fd.NewName = names[1] + + for _, header := range fd.Extended { + tryReconstruct(header, "copy from ", 1, &fd.OrigName) + tryReconstruct(header, "copy to ", 2, &fd.NewName) + tryReconstruct(header, "rename from ", 1, &fd.OrigName) + tryReconstruct(header, "rename to ", 2, &fd.NewName) } - return true - default: - return false } + return success } var ( @@ -447,7 +597,7 @@ func ParseHunks(diff []byte) ([]*Hunk, error) { // NewHunksReader returns a new HunksReader that reads unified diff hunks // from r. func NewHunksReader(r io.Reader) *HunksReader { - return &HunksReader{reader: bufio.NewReader(r)} + return &HunksReader{reader: &lineReader{reader: bufio.NewReader(r)}} } // A HunksReader reads hunks from a unified diff. @@ -455,7 +605,7 @@ type HunksReader struct { line int offset int64 hunk *Hunk - reader *bufio.Reader + reader *lineReader nextHunkHeaderLine []byte } @@ -474,7 +624,7 @@ func (r *HunksReader) ReadHunk() (*Hunk, error) { line = r.nextHunkHeaderLine r.nextHunkHeaderLine = nil } else { - line, err = readLine(r.reader) + line, err = r.reader.readLine() if err != nil { if err == io.EOF && r.hunk != nil { return r.hunk, nil @@ -518,12 +668,15 @@ func (r *HunksReader) ReadHunk() (*Hunk, error) { // If the line starts with `---` and the next one with `+++` we're // looking at a non-extended file header and need to abort. if bytes.HasPrefix(line, []byte("---")) { - ok, err := peekPrefix(r.reader, "+++") + ok, err := r.reader.nextLineStartsWith("+++") if err != nil { return r.hunk, err } if ok { - return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} + ok2, _ := r.reader.nextNextLineStartsWith(string(hunkPrefix)) + if ok2 { + return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} + } } } @@ -593,19 +746,6 @@ func linePrefix(c byte) bool { return false } -// peekPrefix peeks into the given reader to check whether the next -// bytes match the given prefix. -func peekPrefix(reader *bufio.Reader, prefix string) (bool, error) { - next, err := reader.Peek(len(prefix)) - if err != nil { - if err == io.EOF { - return false, nil - } - return false, err - } - return bytes.HasPrefix(next, []byte(prefix)), nil -} - // normalizeHeader takes a header of the form: // "@@ -linestart[,chunksize] +linestart[,chunksize] @@ section" // and returns two strings, with the first in the form: diff --git a/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go b/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go index 395fb7ba..45300252 100644 --- a/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go +++ b/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go @@ -2,9 +2,92 @@ package diff import ( "bufio" + "bytes" + "errors" "io" ) +var ErrLineReaderUninitialized = errors.New("line reader not initialized") + +func newLineReader(r io.Reader) *lineReader { + return &lineReader{reader: bufio.NewReader(r)} +} + +// lineReader is a wrapper around a bufio.Reader that caches the next line to +// provide lookahead functionality for the next two lines. +type lineReader struct { + reader *bufio.Reader + + cachedNextLine []byte + cachedNextLineErr error +} + +// readLine returns the next unconsumed line and advances the internal cache of +// the lineReader. +func (l *lineReader) readLine() ([]byte, error) { + if l.cachedNextLine == nil && l.cachedNextLineErr == nil { + l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) + } + + if l.cachedNextLineErr != nil { + return nil, l.cachedNextLineErr + } + + next := l.cachedNextLine + + l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) + + return next, nil +} + +// nextLineStartsWith looks at the line that would be returned by the next call +// to readLine to check whether it has the given prefix. +// +// io.EOF and bufio.ErrBufferFull errors are ignored so that the function can +// be used when at the end of the file. +func (l *lineReader) nextLineStartsWith(prefix string) (bool, error) { + if l.cachedNextLine == nil && l.cachedNextLineErr == nil { + l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) + } + + return l.lineHasPrefix(l.cachedNextLine, prefix, l.cachedNextLineErr) +} + +// nextNextLineStartsWith checks the prefix of the line *after* the line that +// would be returned by the next readLine. +// +// io.EOF and bufio.ErrBufferFull errors are ignored so that the function can +// be used when at the end of the file. +// +// The lineReader MUST be initialized by calling readLine at least once before +// calling nextLineStartsWith. Otherwise ErrLineReaderUninitialized will be +// returned. +func (l *lineReader) nextNextLineStartsWith(prefix string) (bool, error) { + if l.cachedNextLine == nil && l.cachedNextLineErr == nil { + l.cachedNextLine, l.cachedNextLineErr = readLine(l.reader) + } + + next, err := l.reader.Peek(len(prefix)) + return l.lineHasPrefix(next, prefix, err) +} + +// lineHasPrefix checks whether the given line has the given prefix with +// bytes.HasPrefix. +// +// The readErr should be the error that was returned when the line was read. +// lineHasPrefix checks the error to adjust its return value to, e.g., return +// false and ignore the error when readErr is io.EOF. +func (l *lineReader) lineHasPrefix(line []byte, prefix string, readErr error) (bool, error) { + if readErr != nil { + if readErr == io.EOF || readErr == bufio.ErrBufferFull { + return false, nil + } + return false, readErr + } + + return bytes.HasPrefix(line, []byte(prefix)), nil +} + // readLine is a helper that mimics the functionality of calling bufio.Scanner.Scan() and // bufio.Scanner.Bytes(), but without the token size limitation. It will read and return // the next line in the Reader with the trailing newline stripped. It will return an diff --git a/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml b/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml new file mode 100644 index 00000000..f17bb616 --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/.travis.yml @@ -0,0 +1,12 @@ +language: go +go: + - 1.6 + - 1.7 + - 1.8 + - tip + +sudo: false +before_install: + - go get github.com/mattn/goveralls +script: + - $GOPATH/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/t-yuki/gocover-cobertura/LICENSE b/vendor/github.com/t-yuki/gocover-cobertura/LICENSE new file mode 100644 index 00000000..7ec1b3d8 --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013 Yukinari Toyota + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/t-yuki/gocover-cobertura/README.md b/vendor/github.com/t-yuki/gocover-cobertura/README.md new file mode 100644 index 00000000..60ab1dbe --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/README.md @@ -0,0 +1,35 @@ +[![Build Status](https://travis-ci.org/t-yuki/gocover-cobertura.svg?branch=master)](https://travis-ci.org/t-yuki/gocover-cobertura) +[![Coverage Status](https://coveralls.io/repos/github/t-yuki/gocover-cobertura/badge.svg?branch=master)](https://coveralls.io/github/t-yuki/gocover-cobertura?branch=master) + +go tool cover XML (Cobertura) export +==================================== + +This is a simple helper tool for generating XML output in [Cobertura](http://cobertura.sourceforge.net/) format +for CIs like [Jenkins](https://wiki.jenkins-ci.org/display/JENKINS/Cobertura+Plugin) and others +from [go tool cover](https://code.google.com/p/go.tools/) output. + +Installation +------------ + +Just type the following to install the program and its dependencies: + + $ go get code.google.com/p/go.tools/cmd/cover + $ go get github.com/t-yuki/gocover-cobertura + +Usage +----- + +`gocover-cobertura` reads from the standard input: + + $ go test -coverprofile=coverage.txt -covermode count github.com/gorilla/mux + $ gocover-cobertura < coverage.txt > coverage.xml + +Authors +------- + +* [Yukinari Toyota (t-yuki)](https://github.com/t-yuki) + +Thanks +------ + +This tool is originated from [gocov-xml](https://github.com/AlekSi/gocov-xml) by [Alexey Palazhchenko (AlekSi)](https://github.com/AlekSi) diff --git a/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go b/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go new file mode 100644 index 00000000..8556dc56 --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/cobertura.go @@ -0,0 +1,178 @@ +package main + +import ( + "encoding/xml" +) + +type Coverage struct { + XMLName xml.Name `xml:"coverage"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Version string `xml:"version,attr"` + Timestamp int64 `xml:"timestamp,attr"` + LinesCovered int64 `xml:"lines-covered,attr"` + LinesValid int64 `xml:"lines-valid,attr"` + BranchesCovered int64 `xml:"branches-covered,attr"` + BranchesValid int64 `xml:"branches-valid,attr"` + Complexity float32 `xml:"complexity,attr"` + Sources []*Source `xml:"sources>source"` + Packages []*Package `xml:"packages>package"` +} + +type Source struct { + Path string `xml:",chardata"` +} + +type Package struct { + Name string `xml:"name,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Classes []*Class `xml:"classes>class"` +} + +type Class struct { + Name string `xml:"name,attr"` + Filename string `xml:"filename,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Methods []*Method `xml:"methods>method"` + Lines Lines `xml:"lines>line"` +} + +type Method struct { + Name string `xml:"name,attr"` + Signature string `xml:"signature,attr"` + LineRate float32 `xml:"line-rate,attr"` + BranchRate float32 `xml:"branch-rate,attr"` + Complexity float32 `xml:"complexity,attr"` + Lines Lines `xml:"lines>line"` +} + +type Line struct { + Number int `xml:"number,attr"` + Hits int64 `xml:"hits,attr"` +} + +// Lines is a slice of Line pointers, with some convenience methods +type Lines []*Line + +// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines +// have hits +func (lines Lines) HitRate() (hitRate float32) { + return float32(lines.NumLinesWithHits()) / float32(len(lines)) +} + +// NumLines returns the number of lines +func (lines Lines) NumLines() int64 { + return int64(len(lines)) +} + +// NumLinesWithHits returns the number of lines with a hit count > 0 +func (lines Lines) NumLinesWithHits() (numLinesWithHits int64) { + for _, line := range lines { + if line.Hits > 0 { + numLinesWithHits++ + } + } + return numLinesWithHits +} + +// AddOrUpdateLine adds a line if it is a different line than the last line recorded. +// If it's the same line as the last line recorded then we update the hits down +// if the new hits is less; otherwise just leave it as-is +func (lines *Lines) AddOrUpdateLine(lineNumber int, hits int64) { + if len(*lines) > 0 { + lastLine := (*lines)[len(*lines)-1] + if lineNumber == lastLine.Number { + if hits < lastLine.Hits { + lastLine.Hits = hits + } + return + } + } + *lines = append(*lines, &Line{Number: lineNumber, Hits: hits}) +} + +// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines +// have hits +func (method Method) HitRate() float32 { + return method.Lines.HitRate() +} + +// NumLines returns the number of lines +func (method Method) NumLines() int64 { + return method.Lines.NumLines() +} + +// NumLinesWithHits returns the number of lines with a hit count > 0 +func (method Method) NumLinesWithHits() int64 { + return method.Lines.NumLinesWithHits() +} + +// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines +// have hits +func (class Class) HitRate() float32 { + return float32(class.NumLinesWithHits()) / float32(class.NumLines()) +} + +// NumLines returns the number of lines +func (class Class) NumLines() (numLines int64) { + for _, method := range class.Methods { + numLines += method.NumLines() + } + return numLines +} + +// NumLinesWithHits returns the number of lines with a hit count > 0 +func (class Class) NumLinesWithHits() (numLinesWithHits int64) { + for _, method := range class.Methods { + numLinesWithHits += method.NumLinesWithHits() + } + return numLinesWithHits +} + +// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines +// have hits +func (pkg Package) HitRate() float32 { + return float32(pkg.NumLinesWithHits()) / float32(pkg.NumLines()) +} + +// NumLines returns the number of lines +func (pkg Package) NumLines() (numLines int64) { + for _, class := range pkg.Classes { + numLines += class.NumLines() + } + return numLines +} + +// NumLinesWithHits returns the number of lines with a hit count > 0 +func (pkg Package) NumLinesWithHits() (numLinesWithHits int64) { + for _, class := range pkg.Classes { + numLinesWithHits += class.NumLinesWithHits() + } + return numLinesWithHits +} + +// HitRate returns a float32 from 0.0 to 1.0 representing what fraction of lines +// have hits +func (cov Coverage) HitRate() float32 { + return float32(cov.NumLinesWithHits()) / float32(cov.NumLines()) +} + +// NumLines returns the number of lines +func (cov Coverage) NumLines() (numLines int64) { + for _, pkg := range cov.Packages { + numLines += pkg.NumLines() + } + return numLines +} + +// NumLinesWithHits returns the number of lines with a hit count > 0 +func (cov Coverage) NumLinesWithHits() (numLinesWithHits int64) { + for _, pkg := range cov.Packages { + numLinesWithHits += pkg.NumLinesWithHits() + } + return numLinesWithHits +} diff --git a/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go b/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go new file mode 100644 index 00000000..e64b5de0 --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/gocover-cobertura.go @@ -0,0 +1,176 @@ +package main + +import ( + "encoding/xml" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" +) + +const coberturaDTDDecl = "\n" + +func main() { + convert(os.Stdin, os.Stdout) +} + +func convert(in io.Reader, out io.Writer) { + profiles, err := ParseProfiles(in) + if err != nil { + panic("Can't parse profiles") + } + + srcDirs := build.Default.SrcDirs() + sources := make([]*Source, len(srcDirs)) + for i, dir := range srcDirs { + sources[i] = &Source{dir} + } + + coverage := Coverage{Sources: sources, Packages: nil, Timestamp: time.Now().UnixNano() / int64(time.Millisecond)} + coverage.parseProfiles(profiles) + + fmt.Fprintf(out, xml.Header) + fmt.Fprintf(out, coberturaDTDDecl) + + encoder := xml.NewEncoder(out) + encoder.Indent("", "\t") + err = encoder.Encode(coverage) + if err != nil { + panic(err) + } + + fmt.Fprintln(out) +} + +func (cov *Coverage) parseProfiles(profiles []*Profile) error { + cov.Packages = []*Package{} + for _, profile := range profiles { + cov.parseProfile(profile) + } + cov.LinesValid = cov.NumLines() + cov.LinesCovered = cov.NumLinesWithHits() + cov.LineRate = cov.HitRate() + return nil +} + +func (cov *Coverage) parseProfile(profile *Profile) error { + fileName := profile.FileName + absFilePath, err := findFile(fileName) + if err != nil { + return err + } + fset := token.NewFileSet() + parsed, err := parser.ParseFile(fset, absFilePath, nil, 0) + if err != nil { + return err + } + data, err := ioutil.ReadFile(absFilePath) + if err != nil { + return err + } + + pkgPath, _ := filepath.Split(fileName) + pkgPath = strings.TrimRight(pkgPath, string(os.PathSeparator)) + + var pkg *Package + for _, p := range cov.Packages { + if p.Name == pkgPath { + pkg = p + } + } + if pkg == nil { + pkg = &Package{Name: pkgPath, Classes: []*Class{}} + cov.Packages = append(cov.Packages, pkg) + } + visitor := &fileVisitor{ + fset: fset, + fileName: fileName, + fileData: data, + classes: make(map[string]*Class), + pkg: pkg, + profile: profile, + } + ast.Walk(visitor, parsed) + pkg.LineRate = pkg.HitRate() + return nil +} + +type fileVisitor struct { + fset *token.FileSet + fileName string + fileData []byte + pkg *Package + classes map[string]*Class + profile *Profile +} + +func (v *fileVisitor) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + class := v.class(n) + method := v.method(n) + method.LineRate = method.Lines.HitRate() + class.Methods = append(class.Methods, method) + for _, line := range method.Lines { + class.Lines = append(class.Lines, line) + } + class.LineRate = class.Lines.HitRate() + } + return v +} + +func (v *fileVisitor) method(n *ast.FuncDecl) *Method { + method := &Method{Name: n.Name.Name} + method.Lines = []*Line{} + + start := v.fset.Position(n.Pos()) + end := v.fset.Position(n.End()) + startLine := start.Line + startCol := start.Column + endLine := end.Line + endCol := end.Column + // The blocks are sorted, so we can stop counting as soon as we reach the end of the relevant block. + for _, b := range v.profile.Blocks { + if b.StartLine > endLine || (b.StartLine == endLine && b.StartCol >= endCol) { + // Past the end of the function. + break + } + if b.EndLine < startLine || (b.EndLine == startLine && b.EndCol <= startCol) { + // Before the beginning of the function + continue + } + for i := b.StartLine; i <= b.EndLine; i++ { + method.Lines.AddOrUpdateLine(i, int64(b.Count)) + } + } + return method +} + +func (v *fileVisitor) class(n *ast.FuncDecl) *Class { + className := v.recvName(n) + var class *Class = v.classes[className] + if class == nil { + class = &Class{Name: className, Filename: v.fileName, Methods: []*Method{}, Lines: []*Line{}} + v.classes[className] = class + v.pkg.Classes = append(v.pkg.Classes, class) + } + return class +} + +func (v *fileVisitor) recvName(n *ast.FuncDecl) string { + if n.Recv == nil { + return "-" + } + recv := n.Recv.List[0].Type + start := v.fset.Position(recv.Pos()) + end := v.fset.Position(recv.End()) + name := string(v.fileData[start.Offset:end.Offset]) + return strings.TrimSpace(strings.TrimLeft(name, "*")) +} diff --git a/vendor/github.com/t-yuki/gocover-cobertura/profile.go b/vendor/github.com/t-yuki/gocover-cobertura/profile.go new file mode 100644 index 00000000..99cbac23 --- /dev/null +++ b/vendor/github.com/t-yuki/gocover-cobertura/profile.go @@ -0,0 +1,202 @@ +// Imported from https://code.google.com/p/go/source/browse/cmd/cover/profile.go?repo=tools&r=c10a9dd5e0b0a859a8385b6f004584cb083a3934 + +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bufio" + "fmt" + "go/build" + "io" + "math" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" +) + +// Profile represents the profiling data for a specific file. +type Profile struct { + FileName string + Mode string + Blocks []ProfileBlock +} + +// ProfileBlock represents a single block of profiling data. +type ProfileBlock struct { + StartLine, StartCol int + EndLine, EndCol int + NumStmt, Count int +} + +type byFileName []*Profile + +func (p byFileName) Len() int { return len(p) } +func (p byFileName) Less(i, j int) bool { return p[i].FileName < p[j].FileName } +func (p byFileName) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// ParseProfiles parses profile data from the given Reader and returns a +// Profile for each file. +func ParseProfiles(in io.Reader) ([]*Profile, error) { + files := make(map[string]*Profile) + // First line is "mode: foo", where foo is "set", "count", or "atomic". + // Rest of file is in the format + // encoding/base64/base64.go:34.44,37.40 3 1 + // where the fields are: name.go:line.column,line.column numberOfStatements count + s := bufio.NewScanner(in) + mode := "" + for s.Scan() { + line := s.Text() + if mode == "" { + const p = "mode: " + if !strings.HasPrefix(line, p) || line == p { + return nil, fmt.Errorf("bad mode line: %v", line) + } + mode = line[len(p):] + continue + } + m := lineRe.FindStringSubmatch(line) + if m == nil { + return nil, fmt.Errorf("line %q doesn't match expected format: %v", m, lineRe) + } + fn := m[1] + p := files[fn] + if p == nil { + p = &Profile{ + FileName: fn, + Mode: mode, + } + files[fn] = p + } + p.Blocks = append(p.Blocks, ProfileBlock{ + StartLine: toInt(m[2]), + StartCol: toInt(m[3]), + EndLine: toInt(m[4]), + EndCol: toInt(m[5]), + NumStmt: toInt(m[6]), + Count: toInt(m[7]), + }) + } + if err := s.Err(); err != nil { + return nil, err + } + for _, p := range files { + sort.Sort(blocksByStart(p.Blocks)) + } + // Generate a sorted slice. + profiles := make([]*Profile, 0, len(files)) + for _, profile := range files { + profiles = append(profiles, profile) + } + sort.Sort(byFileName(profiles)) + return profiles, nil +} + +type blocksByStart []ProfileBlock + +func (b blocksByStart) Len() int { return len(b) } +func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b blocksByStart) Less(i, j int) bool { + bi, bj := b[i], b[j] + return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol +} + +var lineRe = regexp.MustCompile(`^(.+):([0-9]+).([0-9]+),([0-9]+).([0-9]+) ([0-9]+) ([0-9]+)$`) + +func toInt(s string) int { + i, err := strconv.Atoi(s) + if err != nil { + panic(err) + } + return i +} + +// Boundary represents the position in a source file of the beginning or end of a +// block as reported by the coverage profile. In HTML mode, it will correspond to +// the opening or closing of a tag and will be used to colorize the source +type Boundary struct { + Offset int // Location as a byte offset in the source file. + Start bool // Is this the start of a block? + Count int // Event count from the cover profile. + Norm float64 // Count normalized to [0..1]. +} + +// Boundaries returns a Profile as a set of Boundary objects within the provided src. +func (p *Profile) Boundaries(src []byte) (boundaries []Boundary) { + // Find maximum count. + max := 0 + for _, b := range p.Blocks { + if b.Count > max { + max = b.Count + } + } + // Divisor for normalization. + divisor := math.Log(float64(max)) + + // boundary returns a Boundary, populating the Norm field with a normalized Count. + boundary := func(offset int, start bool, count int) Boundary { + b := Boundary{Offset: offset, Start: start, Count: count} + if !start || count == 0 { + return b + } + if max <= 1 { + b.Norm = 0.8 // Profile is in"set" mode; we want a heat map. Use cov8 in the CSS. + } else if count > 0 { + b.Norm = math.Log(float64(count)) / divisor + } + return b + } + + line, col := 1, 2 // TODO: Why is this 2? + for si, bi := 0, 0; si < len(src) && bi < len(p.Blocks); { + b := p.Blocks[bi] + if b.StartLine == line && b.StartCol == col { + boundaries = append(boundaries, boundary(si, true, b.Count)) + } + if b.EndLine == line && b.EndCol == col { + boundaries = append(boundaries, boundary(si, false, 0)) + bi++ + continue // Don't advance through src; maybe the next block starts here. + } + if src[si] == '\n' { + line++ + col = 0 + } + col++ + si++ + } + sort.Sort(boundariesByPos(boundaries)) + return +} + +type boundariesByPos []Boundary + +func (b boundariesByPos) Len() int { return len(b) } +func (b boundariesByPos) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b boundariesByPos) Less(i, j int) bool { + if b[i].Offset == b[j].Offset { + return !b[i].Start && b[j].Start + } + return b[i].Offset < b[j].Offset +} + +// findFile finds the location of the named file in GOROOT, GOPATH etc. +func findFile(file string) (string, error) { + if strings.HasPrefix(file, "_") { + file = file[1:] + } + if _, err := os.Stat(file); err == nil { + return file, nil + } + dir, file := filepath.Split(file) + pkg, err := build.Import(dir, ".", build.FindOnly) + if err != nil { + return "", fmt.Errorf("can't find %q: %v", file, err) + } + return filepath.Join(pkg.Dir, file), nil +} diff --git a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go index a7ff30b4..acf7a039 100644 --- a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go +++ b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go @@ -64,6 +64,8 @@ func (r runner) run(pass *analysis.Pass) (interface{}, error) { field := resStruct.Field(i) if field.Id() == "Body" { r.bodyObj = field + + break } } if r.bodyObj == nil { @@ -75,21 +77,20 @@ func (r runner) run(pass *analysis.Pass) (interface{}, error) { bmthd := bodyItrf.Method(i) if bmthd.Id() == closeMethod { r.closeMthd = bmthd + + break } } r.skipFile = map[*ast.File]bool{} +FuncLoop: for _, f := range funcs { // skip if the function is just referenced - var isreffunc bool for i := 0; i < f.Signature.Results().Len(); i++ { if f.Signature.Results().At(i).Type().String() == r.resTyp.String() { - isreffunc = true + continue FuncLoop } } - if isreffunc { - continue - } for _, b := range f.Blocks { for i := range b.Instrs { @@ -144,11 +145,26 @@ func (r *runner) isopen(b *ssa.BasicBlock, i int) bool { } } - case *ssa.Call: // Indirect function call - if f, ok := resRef.Call.Value.(*ssa.Function); ok { + case *ssa.Call, *ssa.Defer: // Indirect function call + // Hacky way to extract CommonCall + var call ssa.CallCommon + switch rr := resRef.(type) { + case *ssa.Call: + call = rr.Call + case *ssa.Defer: + call = rr.Call + } + + if f, ok := call.Value.(*ssa.Function); ok { for _, b := range f.Blocks { - for i := range b.Instrs { - return r.isopen(b, i) + for i, bi := range b.Instrs { + if r.isCloseCall(bi) { + return false + } + + if r.isopen(b, i) { + return true + } } } } diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go index 3d492ee9..6da17bd8 100644 --- a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go +++ b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go @@ -16,6 +16,7 @@ var DefaultIgnoreSigs = []string{ ".Errorf(", "errors.New(", "errors.Unwrap(", + "errors.Join(", ".Wrap(", ".Wrapf(", ".WithMessage(", @@ -74,7 +75,7 @@ type WrapcheckConfig struct { // to a underlying interface name, will ignore unwrapped errors returned from a // function whose call is defined on the given interface. // - // For example, an ignoreInterfaceRegexps of `[]string{"Transac(tor|tion)"}`` will ignore errors + // For example, an ignoreInterfaceRegexps of `[]string{"Transac(tor|tion)"}` will ignore errors // returned from any function whose call is defined on a interface named 'Transactor' // or 'Transaction' due to the name matching the regular expression `Transac(tor|tion)`. IgnoreInterfaceRegexps []string `mapstructure:"ignoreInterfaceRegexps" yaml:"ignoreInterfaceRegexps"` @@ -112,7 +113,6 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { } if err == nil { ignorePackageGlobs, err = compileGlobs(cfg.IgnorePackageGlobs) - } return func(pass *analysis.Pass) (interface{}, error) { @@ -235,7 +235,16 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { // Report unwrapped takes a call expression and an identifier and reports // if the call is unwrapped. -func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos, cfg WrapcheckConfig, regexpsSig []*regexp.Regexp, regexpsInter []*regexp.Regexp, pkgGlobs []glob.Glob) { +func reportUnwrapped( + pass *analysis.Pass, + call *ast.CallExpr, + tokenPos token.Pos, + cfg WrapcheckConfig, + regexpsSig []*regexp.Regexp, + regexpsInter []*regexp.Regexp, + pkgGlobs []glob.Glob, +) { + sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { return @@ -243,7 +252,6 @@ func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos // Check for ignored signatures fnSig := pass.TypesInfo.ObjectOf(sel.Sel).String() - if contains(cfg.IgnoreSigs, fnSig) { return } else if containsMatch(regexpsSig, fnSig) { @@ -254,9 +262,9 @@ func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos // errors returned from interface types should be wrapped, unless ignored // as per `ignoreInterfaceRegexps` if isInterface(pass, sel) { + pkgPath := pass.TypesInfo.ObjectOf(sel.Sel).Pkg().Path() name := types.TypeString(pass.TypesInfo.TypeOf(sel.X), func(p *types.Package) string { return p.Name() }) - if containsMatch(regexpsInter, name) { - } else { + if !containsMatch(regexpsInter, name) && !containsMatchGlob(pkgGlobs, pkgPath) { pass.Reportf(tokenPos, "error returned from interface method should be wrapped: sig: %s", fnSig) return } @@ -305,7 +313,7 @@ func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, pkgGlobs []glob. // `=`. This does not include `var` statements. This function will return nil if // the only declaration is a `var` (aka ValueSpec) declaration. func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) *ast.AssignStmt { - // A slice containing all the assignments which contain an identifer + // A slice containing all the assignments which contain an identifier // referring to the source declaration of the error. This is to catch // cases where err is defined once, and then reassigned multiple times // within the same block. In these cases, we should check the method of @@ -319,6 +327,7 @@ func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) if !isError(pass.TypesInfo.TypeOf(expr)) { continue } + if assIdent, ok := expr.(*ast.Ident); ok { if assIdent.Obj == nil || returnIdent.Obj == nil { // If we can't find the Obj for one of the identifiers, just skip @@ -342,6 +351,7 @@ func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) if ass.Pos() > returnIdent.Pos() { break } + mostRecentAssign = ass } @@ -374,6 +384,7 @@ func containsMatchGlob(globs []glob.Glob, el string) bool { return true } } + return false } @@ -399,14 +410,14 @@ func isUnresolved(file *ast.File, ident *ast.Ident) bool { // compileRegexps compiles a set of regular expressions returning them for use, // or the first encountered error due to an invalid expression. func compileRegexps(regexps []string) ([]*regexp.Regexp, error) { - var compiledRegexps []*regexp.Regexp - for _, reg := range regexps { + compiledRegexps := make([]*regexp.Regexp, len(regexps)) + for idx, reg := range regexps { re, err := regexp.Compile(reg) if err != nil { return nil, fmt.Errorf("unable to compile regexp %s: %v\n", reg, err) } - compiledRegexps = append(compiledRegexps, re) + compiledRegexps[idx] = re } return compiledRegexps, nil @@ -415,14 +426,14 @@ func compileRegexps(regexps []string) ([]*regexp.Regexp, error) { // compileGlobs compiles a set of globs, returning them for use, // or the first encountered error due to an invalid expression. func compileGlobs(globs []string) ([]glob.Glob, error) { - var compiledGlobs []glob.Glob - for _, globString := range globs { + compiledGlobs := make([]glob.Glob, len(globs)) + for idx, globString := range globs { glob, err := glob.Compile(globString) if err != nil { return nil, fmt.Errorf("unable to compile globs %s: %v\n", glob, err) } - compiledGlobs = append(compiledGlobs, glob) + compiledGlobs[idx] = glob } return compiledGlobs, nil } diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md index a29f266b..bca0815d 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md @@ -63,7 +63,7 @@ go:lint:mnd: needs: [] image: golang:latest before_script: - - go get -u github.com/tommy-muehle/go-mnd/cmd/mnd + - go get -u github.com/tommy-muehle/go-mnd/v2/cmd/mnd - go mod tidy - go mod vendor script: diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go index e186028e..b9fc91e5 100644 --- a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go @@ -28,6 +28,12 @@ func DefaultConfig() *Config { }, IgnoredFunctions: []*regexp.Regexp{ regexp.MustCompile(`time.Date`), + regexp.MustCompile(`strconv.FormatInt`), + regexp.MustCompile(`strconv.FormatUint`), + regexp.MustCompile(`strconv.FormatFloat`), + regexp.MustCompile(`strconv.ParseInt`), + regexp.MustCompile(`strconv.ParseUint`), + regexp.MustCompile(`strconv.ParseFloat`), }, } } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go new file mode 100644 index 00000000..5c665b29 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go @@ -0,0 +1,15 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package timeformat defines an Analyzer that checks for the use +// of time.Format or time.Parse calls with a bad format. +// +// # Analyzer timeformat +// +// timeformat: check for calls of (time.Time).Format or time.Parse with 2006-02-01 +// +// The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm) +// format. Internationally, "yyyy-dd-mm" does not occur in common calendar date +// standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended. +package timeformat diff --git a/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go new file mode 100644 index 00000000..c45b9fa5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go @@ -0,0 +1,133 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package timeformat defines an Analyzer that checks for the use +// of time.Format or time.Parse calls with a bad format. +package timeformat + +import ( + _ "embed" + "go/ast" + "go/constant" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const badFormat = "2006-02-01" +const goodFormat = "2006-01-02" + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "timeformat", + Doc: analysisutil.MustExtractDoc(doc, "timeformat"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Note: (time.Time).Format is a method and can be a typeutil.Callee + // without directly importing "time". So we cannot just skip this package + // when !analysisutil.Imports(pass.Pkg, "time"). + // TODO(taking): Consider using a prepass to collect typeutil.Callees. + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) + if !ok { + return + } + if !isTimeDotFormat(fn) && !isTimeDotParse(fn) { + return + } + if len(call.Args) > 0 { + arg := call.Args[0] + badAt := badFormatAt(pass.TypesInfo, arg) + + if badAt > -1 { + // Check if it's a literal string, otherwise we can't suggest a fix. + if _, ok := arg.(*ast.BasicLit); ok { + pos := int(arg.Pos()) + badAt + 1 // +1 to skip the " or ` + end := pos + len(badFormat) + + pass.Report(analysis.Diagnostic{ + Pos: token.Pos(pos), + End: token.Pos(end), + Message: badFormat + " should be " + goodFormat, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace " + badFormat + " with " + goodFormat, + TextEdits: []analysis.TextEdit{{ + Pos: token.Pos(pos), + End: token.Pos(end), + NewText: []byte(goodFormat), + }}, + }}, + }) + } else { + pass.Reportf(arg.Pos(), badFormat+" should be "+goodFormat) + } + } + } + }) + return nil, nil +} + +func isTimeDotFormat(f *types.Func) bool { + if f.Name() != "Format" || f.Pkg().Path() != "time" { + return false + } + sig, ok := f.Type().(*types.Signature) + if !ok { + return false + } + // Verify that the receiver is time.Time. + recv := sig.Recv() + if recv == nil { + return false + } + named, ok := recv.Type().(*types.Named) + return ok && named.Obj().Name() == "Time" +} + +func isTimeDotParse(f *types.Func) bool { + if f.Name() != "Parse" || f.Pkg().Path() != "time" { + return false + } + // Verify that there is no receiver. + sig, ok := f.Type().(*types.Signature) + return ok && sig.Recv() == nil +} + +// badFormatAt return the start of a bad format in e or -1 if no bad format is found. +func badFormatAt(info *types.Info, e ast.Expr) int { + tv, ok := info.Types[e] + if !ok { // no type info, assume good + return -1 + } + + t, ok := tv.Type.(*types.Basic) + if !ok || t.Info()&types.IsString == 0 { + return -1 + } + + if tv.Value == nil { + return -1 + } + + return strings.Index(constant.StringVal(tv.Value), badFormat) +} diff --git a/vendor/honnef.co/go/tools/analysis/code/code.go b/vendor/honnef.co/go/tools/analysis/code/code.go index db7debc2..f200363d 100644 --- a/vendor/honnef.co/go/tools/analysis/code/code.go +++ b/vendor/honnef.co/go/tools/analysis/code/code.go @@ -17,7 +17,6 @@ import ( "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/pattern" - "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" ) @@ -146,7 +145,7 @@ func CallName(pass *analysis.Pass, call *ast.CallExpr) string { switch idx := fun.(type) { case *ast.IndexExpr: fun = idx.X - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: fun = idx.X } @@ -277,7 +276,7 @@ func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity purity.Result return false case *ast.IndexExpr: return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity) - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: // In theory, none of the checks are necessary, as IndexListExpr only involves types. But there is no harm in // being safe. if MayHaveSideEffects(pass, expr.X, purity) { diff --git a/vendor/honnef.co/go/tools/analysis/code/visit.go b/vendor/honnef.co/go/tools/analysis/code/visit.go index f8bf2d16..0f0d644a 100644 --- a/vendor/honnef.co/go/tools/analysis/code/visit.go +++ b/vendor/honnef.co/go/tools/analysis/code/visit.go @@ -30,7 +30,7 @@ func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matc // AST inspectors that already filter on nodes we're interested // in. m := &pattern.Matcher{TypesInfo: pass.TypesInfo} - ok := m.Match(q.Root, node) + ok := m.Match(q, node) return m, ok } diff --git a/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go b/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go index c558fabb..dd6d655c 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go +++ b/vendor/honnef.co/go/tools/analysis/facts/deprecated/deprecated.go @@ -48,6 +48,7 @@ func deprecated(pass *analysis.Pass) (interface{}, error) { } return "" } + doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { alt := extractDeprecatedMessage(docs) if alt == "" { @@ -86,7 +87,15 @@ func deprecated(pass *analysis.Pass) (interface{}, error) { switch node.Tok { case token.TYPE, token.CONST, token.VAR: docs = append(docs, node.Doc) - return true + for i := range node.Specs { + switch n := node.Specs[i].(type) { + case *ast.ValueSpec: + names = append(names, n.Names...) + case *ast.TypeSpec: + names = append(names, n.Name) + } + } + ret = true default: return false } diff --git a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go index eb4c1bab..d296b5b0 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go +++ b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go @@ -230,6 +230,11 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ return neverNil case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.GenericConst, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch: return nilly + case *ir.CompositeValue: + // We can get here via composite literals of type parameters, for which typeutil.IsPointerLike doesn't + // currently return false (see https://staticcheck.io/issues/1364). However, we only emit ir.CompositeValue + // for value types, so we know it can't be nil. + return neverNil default: panic(fmt.Sprintf("internal error: unhandled type %T", v)) } diff --git a/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go b/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go index 4afc7a6a..0f6895a8 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go +++ b/vendor/honnef.co/go/tools/analysis/facts/purity/purity.go @@ -1,5 +1,8 @@ package purity +// TODO(dh): we should split this into two facts, one tracking actual purity, and one tracking side-effects. A function +// that returns a heap allocation isn't pure, but it may be free of side effects. + import ( "go/types" "reflect" @@ -53,6 +56,52 @@ var pureStdlib = map[string]struct{}{ "strings.TrimSpace": {}, "strings.TrimSuffix": {}, "(*net/http.Request).WithContext": {}, + "time.Now": {}, + "time.Parse": {}, + "time.ParseInLocation": {}, + "time.Unix": {}, + "time.UnixMicro": {}, + "time.UnixMilli": {}, + "(time.Time).Add": {}, + "(time.Time).AddDate": {}, + "(time.Time).After": {}, + "(time.Time).Before": {}, + "(time.Time).Clock": {}, + "(time.Time).Compare": {}, + "(time.Time).Date": {}, + "(time.Time).Day": {}, + "(time.Time).Equal": {}, + "(time.Time).Format": {}, + "(time.Time).GoString": {}, + "(time.Time).GobEncode": {}, + "(time.Time).Hour": {}, + "(time.Time).ISOWeek": {}, + "(time.Time).In": {}, + "(time.Time).IsDST": {}, + "(time.Time).IsZero": {}, + "(time.Time).Local": {}, + "(time.Time).Location": {}, + "(time.Time).MarshalBinary": {}, + "(time.Time).MarshalJSON": {}, + "(time.Time).MarshalText": {}, + "(time.Time).Minute": {}, + "(time.Time).Month": {}, + "(time.Time).Nanosecond": {}, + "(time.Time).Round": {}, + "(time.Time).Second": {}, + "(time.Time).String": {}, + "(time.Time).Sub": {}, + "(time.Time).Truncate": {}, + "(time.Time).UTC": {}, + "(time.Time).Unix": {}, + "(time.Time).UnixMicro": {}, + "(time.Time).UnixMilli": {}, + "(time.Time).UnixNano": {}, + "(time.Time).Weekday": {}, + "(time.Time).Year": {}, + "(time.Time).YearDay": {}, + "(time.Time).Zone": {}, + "(time.Time).ZoneBounds": {}, } func purity(pass *analysis.Pass) (interface{}, error) { @@ -99,10 +148,26 @@ func purity(pass *analysis.Pass) (interface{}, error) { return false } + var isBasic func(typ types.Type) bool + isBasic = func(typ types.Type) bool { + switch u := typ.Underlying().(type) { + case *types.Basic: + return true + case *types.Struct: + for i := 0; i < u.NumFields(); i++ { + if !isBasic(u.Field(i).Type()) { + return false + } + } + return true + default: + return false + } + } + for _, param := range fn.Params { - // TODO(dh): this may not be strictly correct. pure code - // can, to an extent, operate on non-basic types. - if _, ok := param.Type().Underlying().(*types.Basic); !ok { + // TODO(dh): this may not be strictly correct. pure code can, to an extent, operate on non-basic types. + if !isBasic(param.Type()) { return false } } @@ -134,6 +199,18 @@ func purity(pass *analysis.Pass) (interface{}, error) { } return true } + + var isStackAddr func(ir.Value) bool + isStackAddr = func(v ir.Value) bool { + switch v := v.(type) { + case *ir.Alloc: + return !v.Heap + case *ir.FieldAddr: + return isStackAddr(v.X) + default: + return false + } + } for _, b := range fn.Blocks { for _, ins := range b.Instrs { switch ins := ins.(type) { @@ -154,13 +231,22 @@ func purity(pass *analysis.Pass) (interface{}, error) { case *ir.Panic: return false case *ir.Store: - return false + if !isStackAddr(ins.Addr) { + return false + } case *ir.FieldAddr: - return false + if !isStackAddr(ins.X) { + return false + } case *ir.Alloc: - return false + // TODO(dh): make use of proper escape analysis + if ins.Heap { + return false + } case *ir.Load: - return false + if !isStackAddr(ins.X) { + return false + } } } } diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go index 8d9f084c..a815a8a8 100644 --- a/vendor/honnef.co/go/tools/config/config.go +++ b/vendor/honnef.co/go/tools/config/config.go @@ -206,7 +206,7 @@ func parseConfigs(dir string) ([]Config, error) { return nil, err } var cfg Config - _, err = toml.DecodeReader(f, &cfg) + _, err = toml.NewDecoder(f).Decode(&cfg) f.Close() if err != nil { if err, ok := err.(toml.ParseError); ok { diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/util.go b/vendor/honnef.co/go/tools/go/ast/astutil/util.go index 176bcde1..e04e1fb0 100644 --- a/vendor/honnef.co/go/tools/go/ast/astutil/util.go +++ b/vendor/honnef.co/go/tools/go/ast/astutil/util.go @@ -6,8 +6,6 @@ import ( "go/token" "reflect" "strings" - - "golang.org/x/exp/typeparams" ) func IsIdent(expr ast.Expr, ident string) bool { @@ -132,7 +130,7 @@ func CopyExpr(node ast.Expr) (ast.Expr, bool) { cp.X, ok1 = CopyExpr(cp.X) cp.Index, ok2 = CopyExpr(cp.Index) return &cp, ok1 && ok2 - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: var ok bool cp := *node cp.X, ok = CopyExpr(cp.X) @@ -280,8 +278,8 @@ func Equal(a, b ast.Node) bool { case *ast.IndexExpr: b := b.(*ast.IndexExpr) return Equal(a.X, b.X) && Equal(a.Index, b.Index) - case *typeparams.IndexListExpr: - b := b.(*typeparams.IndexListExpr) + case *ast.IndexListExpr: + b := b.(*ast.IndexListExpr) if len(a.Indices) != len(b.Indices) { return false } diff --git a/vendor/honnef.co/go/tools/go/ir/blockopt.go b/vendor/honnef.co/go/tools/go/ir/blockopt.go index d7a0e356..53788611 100644 --- a/vendor/honnef.co/go/tools/go/ir/blockopt.go +++ b/vendor/honnef.co/go/tools/go/ir/blockopt.go @@ -31,7 +31,6 @@ func markReachable(b *BasicBlock) { // deleteUnreachableBlocks marks all reachable blocks of f and // eliminates (nils) all others, including possibly cyclic subgraphs. -// func deleteUnreachableBlocks(f *Function) { const white, black = 0, -1 // We borrow b.gaps temporarily as the mark bit. @@ -64,7 +63,6 @@ func deleteUnreachableBlocks(f *Function) { // jumpThreading attempts to apply simple jump-threading to block b, // in which a->b->c become a->c if b is just a Jump. // The result is true if the optimization was applied. -// func jumpThreading(f *Function, b *BasicBlock) bool { if b.Index == 0 { return false // don't apply to entry block @@ -118,7 +116,6 @@ func jumpThreading(f *Function, b *BasicBlock) bool { // fuseBlocks attempts to apply the block fusion optimization to block // a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. // The result is true if the optimization was applied. -// func fuseBlocks(f *Function, a *BasicBlock) bool { if len(a.Succs) != 1 { return false @@ -167,7 +164,6 @@ func fuseBlocks(f *Function, a *BasicBlock) bool { // optimizeBlocks() performs some simple block optimizations on a // completed function: dead block elimination, block fusion, jump // threading. -// func optimizeBlocks(f *Function) { if debugBlockOpt { f.WriteTo(os.Stderr) diff --git a/vendor/honnef.co/go/tools/go/ir/builder.go b/vendor/honnef.co/go/tools/go/ir/builder.go index e5566ade..d56975b7 100644 --- a/vendor/honnef.co/go/tools/go/ir/builder.go +++ b/vendor/honnef.co/go/tools/go/ir/builder.go @@ -63,7 +63,6 @@ type builder struct { // to t or f depending on its value, performing various simplifications. // // Postcondition: fn.currentBlock is nil. -// func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { switch e := e.(type) { case *ast.ParenExpr: @@ -102,7 +101,6 @@ func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { // logicalBinop emits code to fn to evaluate e, a &&- or // ||-expression whose reified boolean value is wanted. // The value is returned. -// func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { rhs := fn.newBasicBlock("binop.rhs") done := fn.newBasicBlock("binop.done") @@ -161,7 +159,6 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { // Multi-result expressions include CallExprs in a multi-value // assignment or return statement, and "value,ok" uses of // TypeAssertExpr, IndexExpr (when X is a map), and Recv. -// func (b *builder) exprN(fn *Function, e ast.Expr) Value { typ := fn.Pkg.typeOf(e).(*types.Tuple) switch e := e.(type) { @@ -203,7 +200,6 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value { // The result is nil if no special handling was required; in this case // the caller should treat this like an ordinary library function // call. -// func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { switch obj.Name() { case "make": @@ -303,10 +299,10 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ // addressable expression e as being a potentially escaping pointer // value. For example, in this code: // -// a := A{ -// b: [1]B{B{c: 1}} -// } -// return &a.b[0].c +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c // // the application of & causes a.b[0].c to have its address taken, // which means that ultimately the local variable a must be @@ -317,7 +313,6 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ // - &x, including when implicit in method call or composite literals. // - a[:] iff a is an array (not *array) // - references to variables in lexically enclosing functions. -// func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) { switch e := e.(type) { case *ast.Ident: @@ -472,7 +467,6 @@ func (sb *storebuf) emit(fn *Function) { // storebuf sb so that they can be executed later. This allows correct // in-place update of existing variables when the RHS is a composite // literal that may reference parts of the LHS. -// func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) { // Can we initialize it in place? if e, ok := unparen(e).(*ast.CompositeLit); ok { @@ -520,7 +514,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * switch typeutil.CoreType(loc.typ()).Underlying().(type) { case *types.Struct, *types.Array: if sb != nil { - // Make sure we don't emit DebugRefs before the store has actually occured + // Make sure we don't emit DebugRefs before the store has actually occurred if ref := makeDebugRef(fn, e, addr, true); ref != nil { sb.storeDebugRef(ref) } @@ -545,7 +539,6 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // expr lowers a single-result expression e to IR form, emitting code // to fn and returning the Value defined by the expression. -// func (b *builder) expr(fn *Function, e ast.Expr) Value { e = unparen(e) @@ -719,8 +712,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { if _, ok := obj.(*types.Var); ok { return emitLoad(fn, v, e) // var (address) } - instances := typeparams.GetInstances(fn.Pkg.info) - if instance, ok := instances[e]; ok { + if instance, ok := fn.Pkg.info.Instances[e]; ok { // Instantiated generic function return makeInstance(fn.Prog, v.(*Function), instance.Type.(*types.Signature), instance.TypeArgs) } @@ -837,7 +829,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { panic("unexpected container type in IndexExpr: " + t.String()) } - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: // Instantiating a generic function return b.expr(fn, e.X) @@ -866,7 +858,6 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) { // must thus be addressable. // // escaping is defined as per builder.addr(). -// func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value { var v Value if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { @@ -886,7 +877,6 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se // setCallFunc populates the function parts of a CallCommon structure // (Func, Method, Recv, Args[0]) based on the kind of invocation // occurring in e. -// func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { // Is this a method call? if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { @@ -953,7 +943,6 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { // emitCallArgs emits to f code for the actual parameters of call e to // a (possibly built-in) function of effective type sig. // The argument values are appended to args, which is then returned. -// func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { // f(x, y, z...): pass slice z straight through. if e.Ellipsis != 0 { @@ -1024,7 +1013,6 @@ func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallEx // setCall emits to fn code to evaluate all the parameters of a function // call e, and populates *c with those values. -// func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { // First deal with the f(...) part and optional receiver. b.setCallFunc(fn, e, c) @@ -1045,7 +1033,6 @@ func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, // localValueSpec emits to fn code to define all of the vars in the // function-local ValueSpec, spec. -// func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { switch { case len(spec.Values) == len(spec.Names): @@ -1088,7 +1075,6 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { // isDef is true if this is a short variable declaration (:=). // // Note the similarity with localValueSpec. -// func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) { // Side effects of all LHSs and RHSs must occur in left-to-right order. lvals := make([]lvalue, len(lhss)) @@ -1154,46 +1140,64 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { // // Because the elements of a composite literal may refer to the // variables being updated, as in the second line below, +// // x := T{a: 1} // x = T{a: x.a} -// all the reads must occur before all the writes. Thus all stores to -// loc are emitted to the storebuf sb for later execution. +// +// all the reads must occur before all the writes. This is implicitly handled by the write buffering effected by +// compositeElement. // // A CompositeLit may have pointer type only in the recursive (nested) // case when the type name is implicit. e.g. in []*T{{}}, the inner // literal has type *T behaves like &T{}. // In that case, addr must hold a T, not a *T. -// func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + // Even though we no longer need storebuf for nested composite literals (because compositeElements act as buffers + // themselves), we still need storebuf for things like multiple assignment, e.g. 't.F1, t.F2 = T2{1}, T2{t.F1.X}' + typ := deref(fn.Pkg.typeOf(e)) switch t := typeutil.CoreType(typ).(type) { case *types.Struct: - if !isZero && len(e.Elts) != t.NumFields() { - // memclear - sb.store(&address{addr, nil}, zeroValue(fn, deref(addr.Type()), e), e) - isZero = true - } - for i, e := range e.Elts { - fieldIndex := i - if kv, ok := e.(*ast.KeyValueExpr); ok { - fname := kv.Key.(*ast.Ident).Name - for i, n := 0, t.NumFields(); i < n; i++ { - sf := t.Field(i) - if sf.Name() == fname { - fieldIndex = i - e = kv.Value - break + lvalue := &address{addr: addr, expr: e} + if len(e.Elts) == 0 { + if !isZero { + sb.store(lvalue, zeroValue(fn, deref(addr.Type()), e), e) + } + } else { + v := &CompositeValue{ + Values: make([]Value, t.NumFields()), + } + for i := 0; i < t.NumFields(); i++ { + v.Values[i] = emitConst(fn, zeroConst(t.Field(i).Type())) + } + v.setType(typ) + + for i, e := range e.Elts { + fieldIndex := i + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + e = kv.Value + break + } } } + + ce := &compositeElement{ + cv: v, + idx: fieldIndex, + t: t.Field(fieldIndex).Type(), + expr: e, + } + b.assign(fn, ce, e, isZero, sb, e) + v.Bitmap.SetBit(&v.Bitmap, fieldIndex, 1) + v.NumSet++ } - sf := t.Field(fieldIndex) - faddr := &FieldAddr{ - X: addr, - Field: fieldIndex, - } - faddr.setType(types.NewPointer(sf.Type())) - fn.emit(faddr, e) - b.assign(fn, &address{addr: faddr, expr: e}, e, isZero, sb, e) + fn.emit(v, e) + sb.store(lvalue, v, e) } case *types.Array, *types.Slice: @@ -1207,43 +1211,60 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero case *types.Array: at = t array = addr - - if !isZero && int64(len(e.Elts)) != at.Len() { - // memclear - sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e) - } } - var idx *Const - for _, e := range e.Elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - idx = b.expr(fn, kv.Key).(*Const) - e = kv.Value - } else { - var idxval int64 - if idx != nil { - idxval = idx.Int64() + 1 - } - idx = emitConst(fn, intConst(idxval)).(*Const) + var final Value + if len(e.Elts) == 0 { + if !isZero { + zc := emitConst(fn, zeroConst(at)) + final = zc } - iaddr := &IndexAddr{ - X: array, - Index: idx, + } else { + v := &CompositeValue{ + Values: make([]Value, at.Len()), } - iaddr.setType(types.NewPointer(at.Elem())) - fn.emit(iaddr, e) - if t != at { // slice - // backing array is unaliased => storebuf not needed. - b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e) - } else { - b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e) + zc := emitConst(fn, zeroConst(at.Elem())) + for i := range v.Values { + v.Values[i] = zc } - } + v.setType(at) + + var idx *Const + for _, e := range e.Elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = emitConst(fn, intConst(idxval)).(*Const) + } + + iaddr := &compositeElement{ + cv: v, + idx: int(idx.Int64()), + t: at.Elem(), + expr: e, + } + b.assign(fn, iaddr, e, true, sb, e) + v.Bitmap.SetBit(&v.Bitmap, int(idx.Int64()), 1) + v.NumSet++ + } + final = v + fn.emit(v, e) + } if t != at { // slice + if final != nil { + sb.store(&address{addr: array}, final, e) + } s := &Slice{X: array} s.setType(typ) sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e) + } else if final != nil { + sb.store(&address{addr: array, expr: e}, final, e) } case *types.Map: @@ -1395,7 +1416,6 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { // switchStmt emits to fn code for the switch statement s, optionally // labelled by label. -// func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) { // We treat SwitchStmt like a sequential if-else chain. // Multiway dispatch can be recovered later by irutil.Switches() @@ -1656,7 +1676,6 @@ func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lbl // selectStmt emits to fn code for the select statement s, optionally // labelled by label. -// func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) { if len(s.Body.List) == 0 { instr := &Select{Blocking: true} @@ -1843,7 +1862,6 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no // forStmt emits to fn code for the for statement s, optionally // labelled by label. -// func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { // ...init... // jump loop @@ -1900,7 +1918,6 @@ func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { // over array, *array or slice value x. // The v result is defined only if tv is non-nil. // forPos is the position of the "for" token. -// func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { // // length = len(x) @@ -1998,7 +2015,6 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast. // Range/Next/Extract to iterate over map or string value x. // tk and tv are the types of the key/value results k and v, or nil // if the respective component is not wanted. -// func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { // // it = range x @@ -2065,7 +2081,6 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast // tk is the channel's element type, or nil if the k result is // not wanted // pos is the position of the '=' or ':=' token. -// func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) { // // loop: (target of continue) @@ -2124,7 +2139,6 @@ func (v *variable) load() Value { // rangeStmt emits to fn code for the range statement s, optionally // labelled by label. -// func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) { var tk, tv types.Type if s.Key != nil && !isBlankIdent(s.Key) { @@ -2331,7 +2345,7 @@ start: block = fn.labelledBlock(s.Label)._goto } j := emitJump(fn, block, s) - j.Comment = s.Tok.String() + j.comment = s.Tok.String() fn.currentBlock = fn.newBasicBlock("unreachable") case *ast.BlockStmt: @@ -2471,7 +2485,6 @@ func (b *builder) buildFunction(fn *Function) { // buildFuncDecl builds IR code for the function or method declared // by decl in package pkg. -// func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { id := decl.Name if isBlankIdent(id) { @@ -2494,7 +2507,6 @@ func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { // need only build a single package. // // Build is idempotent and thread-safe. -// func (prog *Program) Build() { for _, p := range prog.packages { p.Build() @@ -2508,7 +2520,6 @@ func (prog *Program) Build() { // error-free). // // Build is idempotent and thread-safe. -// func (p *Package) Build() { p.buildOnce.Do(p.build) } func (p *Package) build() { diff --git a/vendor/honnef.co/go/tools/go/ir/const.go b/vendor/honnef.co/go/tools/go/ir/const.go index 9dd7e83b..0faf3852 100644 --- a/vendor/honnef.co/go/tools/go/ir/const.go +++ b/vendor/honnef.co/go/tools/go/ir/const.go @@ -19,7 +19,6 @@ import ( // NewConst returns a new constant of the specified value and type. // val must be valid according to the specification of Const.Value. -// func NewConst(val constant.Value, typ types.Type) *Const { return &Const{ register: register{ @@ -37,7 +36,6 @@ func intConst(i int64) *Const { // nilConst returns a nil constant of the specified type, which may // be any reference type, including interfaces. -// func nilConst(typ types.Type) *Const { return NewConst(nil, typ) } @@ -58,7 +56,7 @@ func zeroConst(t types.Type) Constant { switch typ := tset.CoreType().(type) { case *types.Struct: - values := make([]Constant, typ.NumFields()) + values := make([]Value, typ.NumFields()) for i := 0; i < typ.NumFields(); i++ { values[i] = zeroConst(typ.Field(i).Type()) } @@ -67,7 +65,7 @@ func zeroConst(t types.Type) Constant { Values: values, } case *types.Tuple: - values := make([]Constant, typ.Len()) + values := make([]Value, typ.Len()) for i := 0; i < typ.Len(); i++ { values[i] = zeroConst(typ.At(i).Type()) } @@ -77,7 +75,7 @@ func zeroConst(t types.Type) Constant { } } - isNillable := func(term *typeparams.Term) bool { + isNillable := func(term *types.Term) bool { switch typ := term.Type().Underlying().(type) { case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature, *typeutil.Iterator: return true @@ -93,8 +91,8 @@ func zeroConst(t types.Type) Constant { } } - isInfo := func(info types.BasicInfo) func(*typeparams.Term) bool { - return func(term *typeparams.Term) bool { + isInfo := func(info types.BasicInfo) func(*types.Term) bool { + return func(term *types.Term) bool { basic, ok := term.Type().Underlying().(*types.Basic) if !ok { return false @@ -103,7 +101,7 @@ func zeroConst(t types.Type) Constant { } } - isArray := func(term *typeparams.Term) bool { + isArray := func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Array) return ok } @@ -147,6 +145,11 @@ func (c *Const) RelString(from *types.Package) string { } func (c *Const) String() string { + if c.block == nil { + // Constants don't have a block till late in the compilation process. But we want to print consts during + // debugging. + return c.RelString(nil) + } return c.RelString(c.Parent().pkg()) } @@ -162,7 +165,7 @@ func (v *AggregateConst) RelString(pkg *types.Package) string { values := make([]string, len(v.Values)) for i, v := range v.Values { if v != nil { - values[i] = v.RelString(pkg) + values[i] = v.Name() } else { values[i] = "nil" } @@ -170,6 +173,13 @@ func (v *AggregateConst) RelString(pkg *types.Package) string { return fmt.Sprintf("AggregateConst <%s> (%s)", relType(v.Type(), pkg), strings.Join(values, ", ")) } +func (v *AggregateConst) String() string { + if v.block == nil { + return v.RelString(nil) + } + return v.RelString(v.Parent().pkg()) +} + func (v *GenericConst) RelString(pkg *types.Package) string { return fmt.Sprintf("GenericConst <%s>", relType(v.Type(), pkg)) } @@ -178,10 +188,6 @@ func (v *GenericConst) String() string { return v.RelString(v.Parent().pkg()) } -func (v *AggregateConst) String() string { - return v.RelString(v.Parent().pkg()) -} - // IsNil returns true if this constant represents a typed or untyped nil value. func (c *Const) IsNil() bool { return c.Value == nil @@ -189,7 +195,6 @@ func (c *Const) IsNil() bool { // Int64 returns the numeric value of this constant truncated to fit // a signed 64-bit integer. -// func (c *Const) Int64() int64 { switch x := constant.ToInt(c.Value); x.Kind() { case constant.Int: @@ -206,7 +211,6 @@ func (c *Const) Int64() int64 { // Uint64 returns the numeric value of this constant truncated to fit // an unsigned 64-bit integer. -// func (c *Const) Uint64() uint64 { switch x := constant.ToInt(c.Value); x.Kind() { case constant.Int: @@ -223,7 +227,6 @@ func (c *Const) Uint64() uint64 { // Float64 returns the numeric value of this constant truncated to fit // a float64. -// func (c *Const) Float64() float64 { f, _ := constant.Float64Val(c.Value) return f @@ -231,7 +234,6 @@ func (c *Const) Float64() float64 { // Complex128 returns the complex value of this constant truncated to // fit a complex128. -// func (c *Const) Complex128() complex128 { re, _ := constant.Float64Val(constant.Real(c.Value)) im, _ := constant.Float64Val(constant.Imag(c.Value)) @@ -253,7 +255,15 @@ func (c *AggregateConst) equal(o Constant) bool { return false } // TODO(dh): don't use == for types, this will miss identical pointer types, among others - return c.typ == oc.typ + if c.typ != oc.typ { + return false + } + for i, v := range c.Values { + if !v.(Constant).equal(oc.Values[i].(Constant)) { + return false + } + } + return true } func (c *ArrayConst) equal(o Constant) bool { diff --git a/vendor/honnef.co/go/tools/go/ir/create.go b/vendor/honnef.co/go/tools/go/ir/create.go index 5e7f6ed9..28e7da7e 100644 --- a/vendor/honnef.co/go/tools/go/ir/create.go +++ b/vendor/honnef.co/go/tools/go/ir/create.go @@ -27,7 +27,6 @@ const avgInstructionsPerBlock = 16 // NewProgram returns a new IR Program. // // mode controls diagnostics and checking during IR construction. -// func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { prog := &Program{ Fset: fset, @@ -51,7 +50,6 @@ func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { // For objects from Go source code, syntax is the associated syntax // tree (for funcs and vars only); it will be used during the build // phase. -// func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { name := obj.Name() switch obj := obj.(type) { @@ -128,7 +126,6 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { // membersFromDecl populates package pkg with members for each // typechecker object (var, func, const or type) associated with the // specified decl. -// func membersFromDecl(pkg *Package, decl ast.Decl) { switch decl := decl.(type) { case *ast.GenDecl: // import, const, type or var @@ -177,7 +174,6 @@ func membersFromDecl(pkg *Package, decl ast.Decl) { // // The real work of building IR form for each function is not done // until a subsequent call to Package.Build(). -// func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { p := &Package{ Prog: prog, @@ -260,7 +256,6 @@ var printMu sync.Mutex // AllPackages returns a new slice containing all packages in the // program prog in unspecified order. -// func (prog *Program) AllPackages() []*Package { pkgs := make([]*Package, 0, len(prog.packages)) for _, pkg := range prog.packages { @@ -282,7 +277,6 @@ func (prog *Program) AllPackages() []*Package { // false---yet this function remains very convenient. // Clients should use (*Program).Package instead where possible. // IR doesn't really need a string-keyed map of packages. -// func (prog *Program) ImportedPackage(path string) *Package { return prog.imported[path] } diff --git a/vendor/honnef.co/go/tools/go/ir/doc.go b/vendor/honnef.co/go/tools/go/ir/doc.go index 7158a0ae..5ee6637d 100644 --- a/vendor/honnef.co/go/tools/go/ir/doc.go +++ b/vendor/honnef.co/go/tools/go/ir/doc.go @@ -39,66 +39,67 @@ // // The primary interfaces of this package are: // -// - Member: a named member of a Go package. -// - Value: an expression that yields a value. -// - Instruction: a statement that consumes values and performs computation. -// - Node: a Value or Instruction (emphasizing its membership in the IR value graph) +// - Member: a named member of a Go package. +// - Value: an expression that yields a value. +// - Instruction: a statement that consumes values and performs computation. +// - Node: a Value or Instruction (emphasizing its membership in the IR value graph) // // A computation that yields a result implements both the Value and // Instruction interfaces. The following table shows for each // concrete type which of these interfaces it implements. // -// Value? Instruction? Member? -// *Alloc ✔ ✔ -// *BinOp ✔ ✔ -// *BlankStore ✔ -// *Builtin ✔ -// *Call ✔ ✔ -// *ChangeInterface ✔ ✔ -// *ChangeType ✔ ✔ -// *Const ✔ ✔ -// *Convert ✔ ✔ -// *DebugRef ✔ -// *Defer ✔ ✔ -// *Extract ✔ ✔ -// *Field ✔ ✔ -// *FieldAddr ✔ ✔ -// *FreeVar ✔ -// *Function ✔ ✔ (func) -// *Global ✔ ✔ (var) -// *Go ✔ ✔ -// *If ✔ -// *Index ✔ ✔ -// *IndexAddr ✔ ✔ -// *Jump ✔ -// *Load ✔ ✔ -// *MakeChan ✔ ✔ -// *MakeClosure ✔ ✔ -// *MakeInterface ✔ ✔ -// *MakeMap ✔ ✔ -// *MakeSlice ✔ ✔ -// *MapLookup ✔ ✔ -// *MapUpdate ✔ ✔ -// *NamedConst ✔ (const) -// *Next ✔ ✔ -// *Panic ✔ -// *Parameter ✔ ✔ -// *Phi ✔ ✔ -// *Range ✔ ✔ -// *Recv ✔ ✔ -// *Return ✔ -// *RunDefers ✔ -// *Select ✔ ✔ -// *Send ✔ ✔ -// *Sigma ✔ ✔ -// *Slice ✔ ✔ -// *SliceToArrayPointer ✔ ✔ -// *Store ✔ ✔ -// *StringLookup ✔ ✔ -// *Type ✔ (type) -// *TypeAssert ✔ ✔ -// *UnOp ✔ ✔ -// *Unreachable ✔ +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *BlankStore ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Load ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapLookup ✔ ✔ +// *MapUpdate ✔ ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Recv ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ ✔ +// *Sigma ✔ ✔ +// *Slice ✔ ✔ +// *SliceToArrayPointer ✔ ✔ +// *SliceToArray ✔ ✔ +// *Store ✔ ✔ +// *StringLookup ✔ ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// *Unreachable ✔ // // Other key types in this package include: Program, Package, Function // and BasicBlock. @@ -126,5 +127,4 @@ // of trying to determine corresponding elements across the four // domains of source locations, ast.Nodes, types.Objects, // ir.Values/Instructions. -// package ir diff --git a/vendor/honnef.co/go/tools/go/ir/dom.go b/vendor/honnef.co/go/tools/go/ir/dom.go index 13ecd47c..4febd284 100644 --- a/vendor/honnef.co/go/tools/go/ir/dom.go +++ b/vendor/honnef.co/go/tools/go/ir/dom.go @@ -29,12 +29,10 @@ import ( // Idom returns the block that immediately dominates b: // its parent in the dominator tree, if any. // The entry node (b.Index==0) does not have a parent. -// func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } // Dominees returns the list of blocks that b immediately dominates: // its children in the dominator tree. -// func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } // Dominates reports whether b dominates c. @@ -50,7 +48,6 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre // DomPreorder returns a new slice containing the blocks of f in // dominator tree preorder. -// func (f *Function) DomPreorder() []*BasicBlock { n := len(f.Blocks) order := make(byDomPreorder, n) @@ -68,7 +65,6 @@ type domInfo struct { // buildDomTree computes the dominator tree of f using the LT algorithm. // Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). -// func buildDomTree(fn *Function) { // The step numbers refer to the original LT paper; the // reordering is due to Georgiadis. @@ -277,7 +273,6 @@ func buildPostDomTree(fn *Function) { // numberDomTree sets the pre- and post-order numbers of a depth-first // traversal of the dominator tree rooted at v. These are used to // answer dominance queries in constant time. -// func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { v.dom.pre = pre pre++ @@ -292,7 +287,6 @@ func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { // numberPostDomTree sets the pre- and post-order numbers of a depth-first // traversal of the post-dominator tree rooted at v. These are used to // answer post-dominance queries in constant time. -// func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) { v.pdom.pre = pre pre++ @@ -310,7 +304,6 @@ func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) { // computed by the LT algorithm by comparing against the dominance // relation computed by a naive Kildall-style forward dataflow // analysis (Algorithm 10.16 from the "Dragon" book). -// func sanityCheckDomTree(f *Function) { n := len(f.Blocks) @@ -395,6 +388,7 @@ func sanityCheckDomTree(f *Function) { // Printing functions ---------------------------------------- // printDomTree prints the dominator tree as text, using indentation. +// //lint:ignore U1000 used during debugging func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) @@ -405,6 +399,7 @@ func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { // printDomTreeDot prints the dominator tree of f in AT&T GraphViz // (.dot) format. +// //lint:ignore U1000 used during debugging func printDomTreeDot(buf io.Writer, f *Function) { fmt.Fprintln(buf, "//", f) @@ -432,6 +427,7 @@ func printDomTreeDot(buf io.Writer, f *Function) { } // printDomTree prints the dominator tree as text, using indentation. +// //lint:ignore U1000 used during debugging func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) { fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) @@ -442,6 +438,7 @@ func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) { // printDomTreeDot prints the dominator tree of f in AT&T GraphViz // (.dot) format. +// //lint:ignore U1000 used during debugging func printPostDomTreeDot(buf io.Writer, f *Function) { fmt.Fprintln(buf, "//", f) diff --git a/vendor/honnef.co/go/tools/go/ir/emit.go b/vendor/honnef.co/go/tools/go/ir/emit.go index 7b23041f..f6a1ef37 100644 --- a/vendor/honnef.co/go/tools/go/ir/emit.go +++ b/vendor/honnef.co/go/tools/go/ir/emit.go @@ -20,7 +20,6 @@ import ( // emitNew emits to f a new (heap Alloc) instruction allocating an // object of type typ. pos is the optional source location. -// func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc { v := &Alloc{Heap: true} v.setType(types.NewPointer(typ)) @@ -30,7 +29,6 @@ func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc { // emitLoad emits to f an instruction to load the address addr into a // new temporary, and returns the value so defined. -// func emitLoad(f *Function, addr Value, source ast.Node) *Load { v := &Load{X: addr} v.setType(deref(addr.Type())) @@ -49,7 +47,6 @@ func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.No // emitDebugRef emits to f a DebugRef pseudo-instruction associating // expression e with value v. -// func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { ref := makeDebugRef(f, e, v, isAddr) if ref == nil { @@ -89,7 +86,6 @@ func makeDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) *DebugRef { // where op is an eager shift, logical or arithmetic operation. // (Use emitCompare() for comparisons and Builder.logicalBinop() for // non-eager operations.) -// func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast.Node) Value { switch op { case token.SHL, token.SHR: @@ -124,7 +120,6 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast // emitCompare emits to f code compute the boolean result of // comparison comparison 'x op y'. -// func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value { xt := x.Type().Underlying() yt := y.Type().Underlying() @@ -169,7 +164,6 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value // isValuePreserving returns true if a conversion from ut_src to // ut_dst is value-preserving, i.e. just a change of type. // Precondition: neither argument is a named type. -// func isValuePreserving(ut_src, ut_dst types.Type) bool { // Identical underlying types? if types.IdenticalIgnoreTags(ut_dst, ut_src) { @@ -194,7 +188,6 @@ func isValuePreserving(ut_src, ut_dst types.Type) bool { // and returns the converted value. Implicit conversions are required // by language assignability rules in assignments, parameter passing, // etc. -// func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { t_src := val.Type() @@ -210,8 +203,8 @@ func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { tset_src := typeutil.NewTypeSet(ut_src) // Just a change of type, but not value or representation? - if tset_src.All(func(termSrc *typeparams.Term) bool { - return tset_dst.All(func(termDst *typeparams.Term) bool { + if tset_src.All(func(termSrc *types.Term) bool { + return tset_dst.All(func(termDst *types.Term) bool { return isValuePreserving(termSrc.Type().Underlying(), termDst.Type().Underlying()) }) }) { @@ -262,8 +255,8 @@ func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { } // Conversion from slice to array pointer? - if tset_src.All(func(termSrc *typeparams.Term) bool { - return tset_dst.All(func(termDst *typeparams.Term) bool { + if tset_src.All(func(termSrc *types.Term) bool { + return tset_dst.All(func(termDst *types.Term) bool { if slice, ok := termSrc.Type().Underlying().(*types.Slice); ok { if ptr, ok := termDst.Type().Underlying().(*types.Pointer); ok { if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { @@ -279,11 +272,30 @@ func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { return f.emit(c, source) } + // Conversion from slice to array. This is almost the same as converting from slice to array pointer, then + // dereferencing the pointer. Except that a nil slice can be converted to [0]T, whereas converting a nil slice to + // (*[0]T) results in a nil pointer, dereferencing which would panic. To hide the extra branching we use a dedicated + // instruction, SliceToArray. + if tset_src.All(func(termSrc *types.Term) bool { + return tset_dst.All(func(termDst *types.Term) bool { + if slice, ok := termSrc.Type().Underlying().(*types.Slice); ok { + if arr, ok := termDst.Type().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { + return true + } + } + return false + }) + }) { + c := &SliceToArray{X: val} + c.setType(t_dst) + return f.emit(c, source) + } + // A representation-changing conversion? // At least one of {ut_src,ut_dst} must be *Basic. // (The other may be []byte or []rune.) - ok1 := tset_src.Any(func(term *typeparams.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) - ok2 := tset_dst.Any(func(term *typeparams.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) + ok1 := tset_src.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) + ok2 := tset_dst.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) if ok1 || ok2 { c := &Convert{X: val} c.setType(t_dst) @@ -295,21 +307,17 @@ func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { // emitStore emits to f an instruction to store value val at location // addr, applying implicit conversions as required by assignability rules. -// func emitStore(f *Function, addr, val Value, source ast.Node) *Store { s := &Store{ Addr: addr, Val: emitConv(f, val, deref(addr.Type()), source), } - // make sure we call getMem after the call to emitConv, which may - // itself update the memory state f.emit(s, source) return s } // emitJump emits to f a jump to target, and updates the control-flow graph. // Postcondition: f.currentBlock is nil. -// func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump { b := f.currentBlock j := new(Jump) @@ -322,7 +330,6 @@ func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump { // emitIf emits to f a conditional jump to tblock or fblock based on // cond, and updates the control-flow graph. // Postcondition: f.currentBlock is nil. -// func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node) *If { b := f.currentBlock stmt := &If{Cond: cond} @@ -335,7 +342,6 @@ func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node // emitExtract emits to f an instruction to extract the index'th // component of tuple. It returns the extracted value. -// func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value { e := &Extract{Tuple: tuple, Index: index} e.setType(tuple.Type().(*types.Tuple).At(index).Type()) @@ -344,7 +350,6 @@ func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value { // emitTypeAssert emits to f a type assertion value := x.(t) and // returns the value. x.Type() must be an interface. -// func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value { a := &TypeAssert{X: x, AssertedType: t} a.setType(t) @@ -353,7 +358,6 @@ func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value { // emitTypeTest emits to f a type test value,ok := x.(t) and returns // a (value, ok) tuple. x.Type() must be an interface. -// func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value { a := &TypeAssert{ X: x, @@ -372,7 +376,6 @@ func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value { // Intended for wrapper methods. // Precondition: f does/will not use deferred procedure calls. // Postcondition: f.currentBlock is nil. -// func emitTailCall(f *Function, call *Call, source ast.Node) { tresults := f.Signature.Results() nr := tresults.Len() @@ -413,7 +416,6 @@ func emitTailCall(f *Function, call *Call, source ast.Node) { // If v is the address of a struct, the result will be the address of // a field; if it is the value of a struct, the result will be the // value of a field. -// func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value { for _, index := range indices { // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A @@ -450,7 +452,6 @@ func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node // will be the field's address; otherwise the result will be the // field's value. // Ident id is used for position and debug info. -// func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A // pointer to a generic containing a pointer to a struct shouldn't be possible because the outer pointer gets @@ -484,12 +485,64 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast. // zeroValue emits to f code to produce a zero value of type t, // and returns it. -// func zeroValue(f *Function, t types.Type, source ast.Node) Value { return emitConst(f, zeroConst(t)) } +type constKey struct { + typ types.Type + value constant.Value +} + func emitConst(f *Function, c Constant) Constant { - f.consts = append(f.consts, c) - return c + if f.consts == nil { + f.consts = map[constKey]constValue{} + } + + typ := c.Type() + var val constant.Value + switch c := c.(type) { + case *Const: + val = c.Value + case *ArrayConst, *GenericConst: + // These can only represent zero values, so all we need is the type + case *AggregateConst: + candidates, _ := f.aggregateConsts.At(c.typ) + for _, candidate := range candidates { + if c.equal(candidate) { + return candidate + } + } + + for i := range c.Values { + c.Values[i] = emitConst(f, c.Values[i].(Constant)) + } + + c.setBlock(f.Blocks[0]) + rands := c.Operands(nil) + updateOperandsReferrers(c, rands) + candidates = append(candidates, c) + f.aggregateConsts.Set(c.typ, candidates) + return c + + default: + panic(fmt.Sprintf("unexpected type %T", c)) + } + k := constKey{ + typ: typ, + value: val, + } + dup, ok := f.consts[k] + if ok { + return dup.c + } else { + c.setBlock(f.Blocks[0]) + f.consts[k] = constValue{ + c: c, + idx: len(f.consts), + } + rands := c.Operands(nil) + updateOperandsReferrers(c, rands) + return c + } } diff --git a/vendor/honnef.co/go/tools/go/ir/exits.go b/vendor/honnef.co/go/tools/go/ir/exits.go index 851b62c4..03aa2866 100644 --- a/vendor/honnef.co/go/tools/go/ir/exits.go +++ b/vendor/honnef.co/go/tools/go/ir/exits.go @@ -110,6 +110,19 @@ func (b *builder) buildExits(fn *Function) { // all of these call os.Exit after logging fn.NoReturn = AlwaysExits } + case "k8s.io/klog/v2": + switch obj.(*types.Func).FullName() { + case "k8s.io/klog/v2.Exit", + "k8s.io/klog/v2.ExitDepth", + "k8s.io/klog/v2.Exitf", + "k8s.io/klog/v2.Exitln", + "k8s.io/klog/v2.Fatal", + "k8s.io/klog/v2.FatalDepth", + "k8s.io/klog/v2.Fatalf", + "k8s.io/klog/v2.Fatalln": + // all of these call os.Exit after logging + fn.NoReturn = AlwaysExits + } } } @@ -330,7 +343,7 @@ func (b *builder) addUnreachables(fn *Function) { var c Call c.Call.Value = &Builtin{ name: "ir:noreturnWasPanic", - sig: types.NewSignature(nil, + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple(), types.NewTuple(anonVar(types.Typ[types.Bool])), false, diff --git a/vendor/honnef.co/go/tools/go/ir/func.go b/vendor/honnef.co/go/tools/go/ir/func.go index ca42b5c0..4449b405 100644 --- a/vendor/honnef.co/go/tools/go/ir/func.go +++ b/vendor/honnef.co/go/tools/go/ir/func.go @@ -10,13 +10,15 @@ import ( "bytes" "fmt" "go/ast" - "go/constant" "go/format" "go/token" "go/types" "io" "os" + "sort" "strings" + + "honnef.co/go/tools/go/types/typeutil" ) // addEdge adds a control-flow graph edge from from to to. @@ -53,14 +55,12 @@ func (b *BasicBlock) Parent() *Function { return b.parent } // String returns a human-readable label of this block. // It is not guaranteed unique within the function. -// func (b *BasicBlock) String() string { return fmt.Sprintf("%d", b.Index) } // emit appends an instruction to the current basic block. // If the instruction defines a Value, it is returned. -// func (b *BasicBlock) emit(i Instruction, source ast.Node) Value { i.setSource(source) i.setBlock(b) @@ -112,7 +112,6 @@ func (b *BasicBlock) phis() []Instruction { // replacePred replaces all occurrences of p in b's predecessor list with q. // Ordinarily there should be at most one. -// func (b *BasicBlock) replacePred(p, q *BasicBlock) { for i, pred := range b.Preds { if pred == p { @@ -123,7 +122,6 @@ func (b *BasicBlock) replacePred(p, q *BasicBlock) { // replaceSucc replaces all occurrences of p in b's successor list with q. // Ordinarily there should be at most one. -// func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { for i, succ := range b.Succs { if succ == p { @@ -135,7 +133,6 @@ func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { // removePred removes all occurrences of p in b's // predecessor list and φ-nodes. // Ordinarily there should be at most one. -// func (b *BasicBlock) removePred(p *BasicBlock) { phis := b.phis() @@ -169,7 +166,6 @@ func (b *BasicBlock) removePred(p *BasicBlock) { // Destinations associated with unlabelled for/switch/select stmts. // We push/pop one of these as we enter/leave each construct and for // each BranchStmt we scan for the innermost target of the right type. -// type targets struct { tail *targets // rest of stack _break *BasicBlock @@ -180,7 +176,6 @@ type targets struct { // Destinations associated with a labelled block. // We populate these as labels are encountered in forward gotos or // labelled statements. -// type lblock struct { _goto *BasicBlock _break *BasicBlock @@ -189,9 +184,14 @@ type lblock struct { // labelledBlock returns the branch target associated with the // specified label, creating it if needed. -// func (f *Function) labelledBlock(label *ast.Ident) *lblock { - obj := f.Pkg.objectOf(label) + obj := f.Pkg.info.ObjectOf(label) + if obj == nil { + // Blank label, as in '_:' - don't store to f.lblocks, this label can never be referred to; just return a fresh + // lbock. + return &lblock{_goto: f.newBasicBlock(label.Name)} + } + lb := f.lblocks[obj] if lb == nil { lb = &lblock{_goto: f.newBasicBlock(label.Name)} @@ -205,7 +205,6 @@ func (f *Function) labelledBlock(label *ast.Ident) *lblock { // addParam adds a (non-escaping) parameter to f.Params of the // specified name, type and source position. -// func (f *Function) addParam(name string, typ types.Type, source ast.Node) *Parameter { var b *BasicBlock if len(f.Blocks) > 0 { @@ -240,7 +239,6 @@ func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter { // addSpilledParam declares a parameter that is pre-spilled to the // stack; the function body will load/store the spilled location. // Subsequent lifting will eliminate spills where possible. -// func (f *Function) addSpilledParam(obj types.Object, source ast.Node) { param := f.addParamObj(obj, source) spill := &Alloc{} @@ -255,7 +253,6 @@ func (f *Function) addSpilledParam(obj types.Object, source ast.Node) { // startBody initializes the function prior to generating IR code for its body. // Precondition: f.Type() already set. -// func (f *Function) startBody() { entry := f.newBasicBlock("entry") f.currentBlock = entry @@ -304,7 +301,6 @@ func (f *Function) exitBlock() { // f.startBody() was called. // Postcondition: // len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) -// func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { // Receiver (at most one inner iteration). if recv != nil { @@ -366,141 +362,85 @@ func numberNodes(f *Function) { } } +func updateOperandsReferrers(instr Instruction, ops []*Value) { + for _, op := range ops { + if r := *op; r != nil { + if refs := (*op).Referrers(); refs != nil { + if len(*refs) == 0 { + // per median, each value has two referrers, so we can avoid one call into growslice + // + // Note: we experimented with allocating + // sequential scratch space, but we + // couldn't find a value that gave better + // performance than making many individual + // allocations + *refs = make([]Instruction, 1, 2) + (*refs)[0] = instr + } else { + *refs = append(*refs, instr) + } + } + } + } +} + // buildReferrers populates the def/use information in all non-nil // Value.Referrers slice. // Precondition: all such slices are initially empty. func buildReferrers(f *Function) { var rands []*Value + for _, b := range f.Blocks { for _, instr := range b.Instrs { rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if r := *rand; r != nil { - if ref := r.Referrers(); ref != nil { - if len(*ref) == 0 { - // per median, each value has two referrers, so we can avoid one call into growslice - // - // Note: we experimented with allocating - // sequential scratch space, but we - // couldn't find a value that gave better - // performance than making many individual - // allocations - *ref = make([]Instruction, 1, 2) - (*ref)[0] = instr - } else { - *ref = append(*ref, instr) - } - } - } - } + updateOperandsReferrers(instr, rands) } } + + for _, c := range f.consts { + rands = c.c.Operands(rands[:0]) + updateOperandsReferrers(c.c, rands) + } } func (f *Function) emitConsts() { - if len(f.Blocks) == 0 { + defer func() { f.consts = nil + f.aggregateConsts = typeutil.Map[[]*AggregateConst]{} + }() + + if len(f.Blocks) == 0 { return } // TODO(dh): our deduplication only works on booleans and // integers. other constants are represented as pointers to // things. - if len(f.consts) == 0 { - return - } else if len(f.consts) <= 32 { - f.emitConstsFew() - } else { - f.emitConstsMany() - } -} - -func (f *Function) emitConstsFew() { - dedup := make([]Constant, 0, 32) + head := make([]constValue, 0, len(f.consts)) for _, c := range f.consts { - if len(*c.Referrers()) == 0 { - continue - } - found := false - for _, d := range dedup { - if c.equal(d) { - replaceAll(c, d) - found = true - break - } - } - if !found { - dedup = append(dedup, c) + if len(*c.c.Referrers()) == 0 { + // TODO(dh): killing a const may make other consts dead, too + killInstruction(c.c) + } else { + head = append(head, c) } } - - instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(dedup)) - for i, c := range dedup { - instrs[i] = c - c.setBlock(f.Blocks[0]) - } - copy(instrs[len(dedup):], f.Blocks[0].Instrs) - f.Blocks[0].Instrs = instrs - f.consts = nil -} - -func (f *Function) emitConstsMany() { - type constKey struct { - typ types.Type - value constant.Value + sort.Slice(head, func(i, j int) bool { + return head[i].idx < head[j].idx + }) + entry := f.Blocks[0] + instrs := make([]Instruction, 0, len(entry.Instrs)+len(head)) + for _, c := range head { + instrs = append(instrs, c.c) } - - m := make(map[constKey]Value, len(f.consts)) - areNil := 0 - for i, c := range f.consts { - if len(*c.Referrers()) == 0 { - f.consts[i] = nil - areNil++ - continue + f.aggregateConsts.Iterate(func(key types.Type, value []*AggregateConst) { + for _, c := range value { + instrs = append(instrs, c) } + }) - var typ types.Type - var val constant.Value - switch c := c.(type) { - case *Const: - typ = c.typ - val = c.Value - case *ArrayConst: - // ArrayConst can only encode zero constants, so all we need is the type - typ = c.typ - case *AggregateConst: - // ArrayConst can only encode zero constants, so all we need is the type - typ = c.typ - case *GenericConst: - typ = c.typ - default: - panic(fmt.Sprintf("unexpected type %T", c)) - } - k := constKey{ - typ: typ, - value: val, - } - if dup, ok := m[k]; !ok { - m[k] = c - } else { - f.consts[i] = nil - areNil++ - replaceAll(c, dup) - } - } - - instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(f.consts)-areNil) - i := 0 - for _, c := range f.consts { - if c != nil { - instrs[i] = c - c.setBlock(f.Blocks[0]) - i++ - } - } - copy(instrs[i:], f.Blocks[0].Instrs) - f.Blocks[0].Instrs = instrs - f.consts = nil + instrs = append(instrs, entry.Instrs...) + entry.Instrs = instrs } // buildFakeExits ensures that every block in the function is @@ -593,7 +533,12 @@ func (f *Function) finishBody() { buildPostDomTree(f) if f.Prog.mode&NaiveForm == 0 { - lift(f) + for lift(f) { + } + if doSimplifyConstantCompositeValues { + for simplifyConstantCompositeValues(f) { + } + } } // emit constants after lifting, because lifting may produce new constants, but before other variable splitting, @@ -652,7 +597,6 @@ func (f *Function) RemoveNilBlocks() { // removeNilBlocks eliminates nils from f.Blocks and updates each // BasicBlock.Index. Use this after any pass that may delete blocks. -// func (f *Function) removeNilBlocks() { j := 0 for _, b := range f.Blocks { @@ -673,7 +617,6 @@ func (f *Function) removeNilBlocks() { // functions will include full debug info. This greatly increases the // size of the instruction stream, and causes Functions to depend upon // the ASTs, potentially keeping them live in memory for longer. -// func (pkg *Package) SetDebugMode(debug bool) { // TODO(adonovan): do we want ast.File granularity? pkg.debug = debug @@ -687,7 +630,6 @@ func (f *Function) debugInfo() bool { // addNamedLocal creates a local variable, adds it to function f and // returns it. Its name and type are taken from obj. Subsequent // calls to f.lookup(obj) will return the same local. -// func (f *Function) addNamedLocal(obj types.Object, source ast.Node) *Alloc { l := f.addLocal(obj.Type(), source) f.objects[obj] = l @@ -700,7 +642,6 @@ func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { // addLocal creates an anonymous local variable of type typ, adds it // to function f and returns it. pos is the optional source location. -// func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc { v := &Alloc{} v.setType(types.NewPointer(typ)) @@ -713,7 +654,6 @@ func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc { // that is local to function f or one of its enclosing functions. // If escaping, the reference comes from a potentially escaping pointer // expression and the referent must be heap-allocated. -// func (f *Function) lookup(obj types.Object, escaping bool) Value { if v, ok := f.objects[obj]; ok { if alloc, ok := v.(*Alloc); ok && escaping { @@ -750,13 +690,14 @@ func (f *Function) emit(instr Instruction, source ast.Node) Value { // The specific formatting rules are not guaranteed and may change. // // Examples: -// "math.IsNaN" // a package-level function -// "(*bytes.Buffer).Bytes" // a declared method or a wrapper -// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) -// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) -// "main.main$1" // an anonymous function in main -// "main.init#1" // a declared init function -// "main.init" // the synthesized package initializer +// +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer // // When these functions are referred to from within the same package // (i.e. from == f.Pkg.Object), they are rendered without the package path. @@ -766,7 +707,6 @@ func (f *Function) emit(instr Instruction, source ast.Node) Value { // (But two methods may have the same name "(T).f" if one is a synthetic // wrapper promoting a non-exported method "f" from another package; in // that case, the strings are equal but the identifiers "f" are distinct.) -// func (f *Function) RelString(from *types.Package) string { // Anonymous? if f.parent != nil { @@ -920,6 +860,10 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { default: buf.WriteString(instr.String()) } + if instr != nil && instr.Comment() != "" { + buf.WriteString(" # ") + buf.WriteString(instr.Comment()) + } buf.WriteString("\n") if f.Prog.mode&PrintSource != 0 { @@ -950,7 +894,6 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { // newBasicBlock adds to f a new basic block and returns it. It does // not automatically become the current block for subsequent calls to emit. // comment is an optional string for more readable debugging output. -// func (f *Function) newBasicBlock(comment string) *BasicBlock { var instrs []Instruction if len(f.functionBody.scratchInstructions) > 0 { @@ -985,7 +928,6 @@ func (f *Function) newBasicBlock(comment string) *BasicBlock { // "reflect" package, etc. // // TODO(adonovan): think harder about the API here. -// func (prog *Program) NewFunction(name string, sig *types.Signature, provenance Synthetic) *Function { return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} } @@ -1004,3 +946,12 @@ func (f *Function) initHTML(name string) { f.wr = NewHTMLWriter("ir.html", rel, "") } } + +func killInstruction(instr Instruction) { + ops := instr.Operands(nil) + for _, op := range ops { + if refs := (*op).Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/load.go b/vendor/honnef.co/go/tools/go/ir/irutil/load.go index 1e83effa..883447b7 100644 --- a/vendor/honnef.co/go/tools/go/ir/irutil/load.go +++ b/vendor/honnef.co/go/tools/go/ir/irutil/load.go @@ -13,6 +13,7 @@ import ( "honnef.co/go/tools/go/ir" + //lint:ignore SA1019 go/loader is deprecated, but works fine for our tests "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" ) @@ -39,7 +40,6 @@ type Options struct { // packages with well-typed syntax trees. // // The mode parameter controls diagnostics and checking during IR construction. -// func Packages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) { return doPackages(initial, mode, false, opts) } @@ -61,7 +61,6 @@ func Packages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) ( // well-typed syntax trees. // // The mode parameter controls diagnostics and checking during IR construction. -// func AllPackages(initial []*packages.Package, mode ir.BuilderMode, opts *Options) (*ir.Program, []*ir.Package) { return doPackages(initial, mode, true, opts) } @@ -112,7 +111,6 @@ func doPackages(initial []*packages.Package, mode ir.BuilderMode, deps bool, opt // // Deprecated: use golang.org/x/tools/go/packages and the Packages // function instead; see ir.ExampleLoadPackages. -// func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program { prog := ir.NewProgram(lprog.Fset, mode) @@ -139,7 +137,6 @@ func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program { // The operation fails if there were any type-checking or import errors. // // See ../ir/example_test.go for an example. -// func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ir.BuilderMode) (*ir.Package, *types.Info, error) { if fset == nil { panic("no token.FileSet") diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/switch.go b/vendor/honnef.co/go/tools/go/ir/irutil/switch.go index e7654e00..afe899d8 100644 --- a/vendor/honnef.co/go/tools/go/ir/irutil/switch.go +++ b/vendor/honnef.co/go/tools/go/ir/irutil/switch.go @@ -55,7 +55,6 @@ type TypeCase struct { // A type switch may contain duplicate types, or types assignable // to an interface type also in the list. // TODO(adonovan): eliminate such duplicates. -// type Switch struct { Start *ir.BasicBlock // block containing start of if/else chain X ir.Value // the switch operand @@ -103,7 +102,6 @@ func (sw *Switch) String() string { // Switches may even be inferred from if/else- or goto-based control flow. // (In general, the control flow constructs of the source program // cannot be faithfully reproduced from the IR.) -// func Switches(fn *ir.Function) []Switch { // Traverse the CFG in dominance order, so we don't // enter an if/else-chain in the middle. @@ -227,7 +225,6 @@ func typeSwitch(sw *Switch, y ir.Value, T types.Type, seen map[*ir.BasicBlock]bo // isComparisonBlock returns the operands (v, k) if a block ends with // a comparison v==k, where k is a compile-time constant. -// func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) { if n := len(b.Instrs); n >= 2 { if i, ok := b.Instrs[n-1].(*ir.If); ok { @@ -246,7 +243,6 @@ func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) { // isTypeAssertBlock returns the operands (y, x, T) if a block ends with // a type assertion "if y, ok := x.(T); ok {". -// func isTypeAssertBlock(b *ir.BasicBlock) (y, x ir.Value, T types.Type) { if n := len(b.Instrs); n >= 4 { if i, ok := b.Instrs[n-1].(*ir.If); ok { diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/visit.go b/vendor/honnef.co/go/tools/go/ir/irutil/visit.go index f6d0503d..f2135dca 100644 --- a/vendor/honnef.co/go/tools/go/ir/irutil/visit.go +++ b/vendor/honnef.co/go/tools/go/ir/irutil/visit.go @@ -18,7 +18,6 @@ import "honnef.co/go/tools/go/ir" // synthetic wrappers. // // Precondition: all packages are built. -// func AllFunctions(prog *ir.Program) map[*ir.Function]bool { visit := visitor{ prog: prog, diff --git a/vendor/honnef.co/go/tools/go/ir/lift.go b/vendor/honnef.co/go/tools/go/ir/lift.go index 8ab67eb8..8ac8330d 100644 --- a/vendor/honnef.co/go/tools/go/ir/lift.go +++ b/vendor/honnef.co/go/tools/go/ir/lift.go @@ -63,8 +63,7 @@ const debugLifting = false // // domFrontier's methods mutate the slice's elements but not its // length, so their receivers needn't be pointers. -// -type domFrontier [][]*BasicBlock +type domFrontier BlockMap[[]*BasicBlock] func (df domFrontier) add(u, v *BasicBlock) { df[u.Index] = append(df[u.Index], v) @@ -105,7 +104,7 @@ func buildDomFrontier(fn *Function) domFrontier { return df } -type postDomFrontier [][]*BasicBlock +type postDomFrontier BlockMap[[]*BasicBlock] func (rdf postDomFrontier) add(u, v *BasicBlock) { rdf[u.Index] = append(rdf[u.Index], v) @@ -156,6 +155,19 @@ func clearInstrs(instrs []Instruction) { } } +func numberNodesPerBlock(f *Function) { + for _, b := range f.Blocks { + var base ID + for _, instr := range b.Instrs { + if instr == nil { + continue + } + instr.setID(base) + base++ + } + } +} + // lift replaces local and new Allocs accessed only with // load/store by IR registers, inserting φ- and σ-nodes where necessary. // The result is a program in pruned SSI form. @@ -164,8 +176,7 @@ func clearInstrs(instrs []Instruction) { // - fn has no dead blocks (blockopt has run). // - Def/use info (Operands and Referrers) is up-to-date. // - The dominator tree is up-to-date. -// -func lift(fn *Function) { +func lift(fn *Function) bool { // TODO(adonovan): opt: lots of little optimizations may be // worthwhile here, especially if they cause us to avoid // buildDomFrontier. For example: @@ -187,8 +198,8 @@ func lift(fn *Function) { var df domFrontier var rdf postDomFrontier var closure *closure - var newPhis newPhiMap - var newSigmas newSigmaMap + var newPhis BlockMap[[]newPhi] + var newSigmas BlockMap[[]newSigma] // During this pass we will replace some BasicBlock.Instrs // (allocs, loads and stores) with nil, keeping a count in @@ -204,24 +215,35 @@ func lift(fn *Function) { // Determine which allocs we can lift and number them densely. // The renaming phase uses this numbering for compact maps. numAllocs := 0 + + instructions := make(BlockMap[liftInstructions], len(fn.Blocks)) + for i := range instructions { + instructions[i].insertInstructions = map[Instruction][]Instruction{} + } + + // Number nodes, for liftable + numberNodesPerBlock(fn) + for _, b := range fn.Blocks { b.gaps = 0 b.rundefers = 0 + for _, instr := range b.Instrs { switch instr := instr.(type) { case *Alloc: - if !liftable(instr) { + if !liftable(instr, instructions) { instr.index = -1 continue } + if numAllocs == 0 { df = buildDomFrontier(fn) rdf = buildPostDomFrontier(fn) if len(fn.Blocks) > 2 { closure = transitiveClosure(fn) } - newPhis = make(newPhiMap, len(fn.Blocks)) - newSigmas = make(newSigmaMap, len(fn.Blocks)) + newPhis = make(BlockMap[[]newPhi], len(fn.Blocks)) + newSigmas = make(BlockMap[[]newSigma], len(fn.Blocks)) if debugLifting { title := false @@ -236,7 +258,6 @@ func lift(fn *Function) { } } } - liftAlloc(closure, df, rdf, instr, newPhis, newSigmas) instr.index = numAllocs numAllocs++ case *Defer: @@ -248,6 +269,39 @@ func lift(fn *Function) { } if numAllocs > 0 { + for _, b := range fn.Blocks { + work := instructions[b.Index] + for _, rename := range work.renameAllocs { + for _, instr_ := range b.Instrs[rename.startingAt:] { + replace(instr_, rename.from, rename.to) + } + } + } + + for _, b := range fn.Blocks { + work := instructions[b.Index] + if len(work.insertInstructions) != 0 { + newInstrs := make([]Instruction, 0, len(fn.Blocks)+len(work.insertInstructions)*3) + for _, instr := range b.Instrs { + if add, ok := work.insertInstructions[instr]; ok { + newInstrs = append(newInstrs, add...) + } + newInstrs = append(newInstrs, instr) + } + b.Instrs = newInstrs + } + } + + // TODO(dh): remove inserted allocs that end up unused after lifting. + + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*Alloc); ok && instr.index >= 0 { + liftAlloc(closure, df, rdf, instr, newPhis, newSigmas) + } + } + } + // renaming maps an alloc (keyed by index) to its replacement // value. Initially the renaming contains nil, signifying the // zero constant of the appropriate type; we construct the @@ -380,6 +434,8 @@ func lift(fn *Function) { fn.Locals[i] = nil } fn.Locals = fn.Locals[:j] + + return numAllocs > 0 } func hasDirectReferrer(instr Instruction) bool { @@ -394,7 +450,7 @@ func hasDirectReferrer(instr Instruction) bool { return false } -func markLiveNodes(blocks []*BasicBlock, newPhis newPhiMap, newSigmas newSigmaMap) { +func markLiveNodes(blocks []*BasicBlock, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { // Phis and sigmas may become dead due to optimization passes. We may also insert more nodes than strictly // necessary, e.g. sigma nodes for constants, which will never be used. @@ -461,7 +517,7 @@ func markLiveSigma(sigma *Sigma) { // and replaces trivial phis with non-phi alternatives. Phi // nodes where all edges are identical, or consist of only the phi // itself and one other value, may be replaced with the value. -func simplifyPhisAndSigmas(newPhis newPhiMap, newSigmas newSigmaMap) { +func simplifyPhisAndSigmas(newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { // temporary numbering of values used in phis so that we can build map keys var id ID for _, npList := range newPhis { @@ -652,7 +708,7 @@ func (s *BlockSet) Take() int { type closure struct { span []uint32 - reachables []interval + reachables BlockMap[interval] } type interval uint32 @@ -696,6 +752,8 @@ func (c closure) reachable(id int) []interval { } func (c closure) walk(current *BasicBlock, b *BasicBlock, visited []bool) { + // TODO(dh): the 'current' argument seems to be unused + // TODO(dh): there's no reason for this to be a method visited[b.Index] = true for _, succ := range b.Succs { if visited[succ.Index] { @@ -707,7 +765,7 @@ func (c closure) walk(current *BasicBlock, b *BasicBlock, visited []bool) { } func transitiveClosure(fn *Function) *closure { - reachable := make([]bool, len(fn.Blocks)) + reachable := make(BlockMap[bool], len(fn.Blocks)) c := &closure{} c.span = make([]uint32, len(fn.Blocks)+1) @@ -763,13 +821,65 @@ type newSigma struct { sigmas []*Sigma } -// newPhiMap records for each basic block, the set of newPhis that -// must be prepended to the block. -type newPhiMap [][]newPhi -type newSigmaMap [][]newSigma +type liftInstructions struct { + insertInstructions map[Instruction][]Instruction + renameAllocs []struct { + from *Alloc + to *Alloc + startingAt int + } +} + +// liftable determines if alloc can be lifted, and records instructions to split partially liftable allocs. +// +// In the trivial case, all uses of the alloc can be lifted. This is the case when it is only used for storing into and +// loading from. In that case, no instructions are recorded. +// +// In the more complex case, the alloc is used for storing into and loading from, but it is also used as a value, for +// example because it gets passed to a function, e.g. fn(&x). In this case, uses of the alloc fall into one of two +// categories: those that can be lifted and those that can't. A boundary forms between these two categories in the +// function's control flow: Once an unliftable use is encountered, the alloc is no longer liftable for the remainder of +// the basic block the use is in, nor in any blocks reachable from it. +// +// We record instructions that split the alloc into two allocs: one that is used in liftable uses, and one that is used +// in unliftable uses. Whenever we encounter a boundary between liftable and unliftable uses or blocks, we emit a pair +// of Load and Store that copy the value from the liftable alloc into the unliftable alloc. Taking these instructions +// into account, the normal lifting machinery will completely lift the liftable alloc, store the correct lifted values +// into the unliftable alloc, and will not at all lift the unliftable alloc. +// +// In Go syntax, the transformation looks somewhat like this: +// +// func foo() { +// x := 32 +// if cond { +// println(x) +// escape(&x) +// println(x) +// } else { +// println(x) +// } +// println(x) +// } +// +// transforms into +// +// func fooSplitAlloc() { +// x := 32 +// var x_ int +// if cond { +// println(x) +// x_ = x +// escape(&x_) +// println(x_) +// } else { +// println(x) +// x_ = x +// } +// println(x_) +// } +func liftable(alloc *Alloc, instructions BlockMap[liftInstructions]) bool { + fn := alloc.block.parent -func liftable(alloc *Alloc) bool { - fn := alloc.Parent() // Don't lift named return values in functions that defer // calls that may recover from panic. if fn.hasDefer { @@ -780,24 +890,247 @@ func liftable(alloc *Alloc) bool { } } - for _, instr := range *alloc.Referrers() { + type blockDesc struct { + // is the block (partially) unliftable, because it contains unliftable instructions or is reachable by an unliftable block + isUnliftable bool + hasLiftableLoad bool + hasLiftableOther bool + // we need to emit stores in predecessors because the unliftable use is in a phi + storeInPreds bool + + lastLiftable int + firstUnliftable int + } + blocks := make(BlockMap[blockDesc], len(fn.Blocks)) + for _, b := range fn.Blocks { + blocks[b.Index].lastLiftable = -1 + blocks[b.Index].firstUnliftable = len(b.Instrs) + 1 + } + + // Look at all uses of the alloc and deduce which blocks have liftable or unliftable instructions. + for _, instr := range alloc.referrers { + // Find the first unliftable use + + desc := &blocks[instr.Block().Index] + hasUnliftable := false + inHead := false switch instr := instr.(type) { case *Store: if instr.Val == alloc { - return false // address used as value - } - if instr.Addr != alloc { - panic("Alloc.Referrers is inconsistent") + hasUnliftable = true } case *Load: - if instr.X != alloc { - panic("Alloc.Referrers is inconsistent") + case *DebugRef: + case *Phi, *Sigma: + inHead = true + hasUnliftable = true + default: + hasUnliftable = true + } + + if hasUnliftable { + desc.isUnliftable = true + if int(instr.ID()) < desc.firstUnliftable { + desc.firstUnliftable = int(instr.ID()) + } + if inHead { + desc.storeInPreds = true + desc.firstUnliftable = 0 } + } + } + for _, instr := range alloc.referrers { + // Find the last liftable use, taking the previously calculated firstUnliftable into consideration + + desc := &blocks[instr.Block().Index] + if int(instr.ID()) >= desc.firstUnliftable { + continue + } + hasLiftable := false + switch instr := instr.(type) { + case *Store: + if instr.Val != alloc { + desc.hasLiftableOther = true + hasLiftable = true + } + case *Load: + desc.hasLiftableLoad = true + hasLiftable = true case *DebugRef: - // ok - default: - return false + desc.hasLiftableOther = true + } + if hasLiftable { + if int(instr.ID()) > desc.lastLiftable { + desc.lastLiftable = int(instr.ID()) + } + } + } + + for i := range blocks { + // Update firstUnliftable to be one after lastLiftable. We do this to include the unliftable's preceding + // DebugRefs in the renaming. + blocks[i].firstUnliftable = blocks[i].lastLiftable + 1 + } + + // If a block is reachable by a (partially) unliftable block, then the entirety of the block is unliftable. In that + // case, stores have to be inserted in the predecessors. + // + // TODO(dh): this isn't always necessary. If the block is reachable by itself, i.e. part of a loop, then if the + // Alloc instruction is itself part of that loop, then there is a subset of instructions in the loop that can be + // lifted. For example: + // + // for { + // x := 42 + // println(x) + // escape(&x) + // } + // + // The x that escapes in one iteration of the loop isn't the same x that we read from on the next iteration. + seen := make(BlockMap[bool], len(fn.Blocks)) + var dfs func(b *BasicBlock) + dfs = func(b *BasicBlock) { + if seen[b.Index] { + return + } + seen[b.Index] = true + desc := &blocks[b.Index] + desc.hasLiftableLoad = false + desc.hasLiftableOther = false + desc.isUnliftable = true + desc.firstUnliftable = 0 + desc.storeInPreds = true + for _, succ := range b.Succs { + dfs(succ) + } + } + for _, b := range fn.Blocks { + if blocks[b.Index].isUnliftable { + for _, succ := range b.Succs { + dfs(succ) + } + } + } + + hasLiftableLoad := false + hasLiftableOther := false + hasUnliftable := false + for _, b := range fn.Blocks { + desc := blocks[b.Index] + hasLiftableLoad = hasLiftableLoad || desc.hasLiftableLoad + hasLiftableOther = hasLiftableOther || desc.hasLiftableOther + if desc.isUnliftable { + hasUnliftable = true + } + } + if !hasLiftableLoad && !hasLiftableOther { + // There are no liftable uses + return false + } else if !hasUnliftable { + // The alloc is entirely liftable without splitting + return true + } else if !hasLiftableLoad { + // The alloc is not entirely liftable, and the only liftable uses are stores. While some of those stores could + // get lifted away, it would also lead to an infinite loop when lifting to a fixpoint, because the newly created + // allocs also get stored into repeatable and that's their only liftable uses. + return false + } + + // We need to insert stores for the new alloc. If a (partially) unliftable block has no unliftable + // predecessors and the use isn't in a phi node, then the store can be inserted right before the unliftable use. + // Otherwise, stores have to be inserted at the end of all liftable predecessors. + + newAlloc := &Alloc{Heap: true} + newAlloc.setBlock(alloc.block) + newAlloc.setType(alloc.typ) + newAlloc.setSource(alloc.source) + newAlloc.index = -1 + newAlloc.comment = "split alloc" + + { + work := instructions[alloc.block.Index] + work.insertInstructions[alloc] = append(work.insertInstructions[alloc], newAlloc) + } + + predHasStore := make(BlockMap[bool], len(fn.Blocks)) + for _, b := range fn.Blocks { + desc := &blocks[b.Index] + bWork := &instructions[b.Index] + + if desc.isUnliftable { + bWork.renameAllocs = append(bWork.renameAllocs, struct { + from *Alloc + to *Alloc + startingAt int + }{ + alloc, newAlloc, int(desc.firstUnliftable), + }) + } + + if !desc.isUnliftable { + continue + } + + propagate := func(in *BasicBlock, before Instruction) { + load := &Load{ + X: alloc, + } + store := &Store{ + Addr: newAlloc, + Val: load, + } + load.setType(deref(alloc.typ)) + load.setBlock(in) + load.comment = "split alloc" + store.setBlock(in) + updateOperandReferrers(load) + updateOperandReferrers(store) + store.comment = "split alloc" + + entry := &instructions[in.Index] + entry.insertInstructions[before] = append(entry.insertInstructions[before], load, store) + } + + if desc.storeInPreds { + // emit stores at the end of liftable preds + for _, pred := range b.Preds { + if blocks[pred.Index].isUnliftable { + continue + } + + if !alloc.block.Dominates(pred) { + // Consider this cfg: + // + // 1 + // /| + // / | + // ↙ ↓ + // 2--→3 + // + // with an Alloc in block 2. It doesn't make sense to insert a store in block 1 for the jump to + // block 3, because 1 can never see the Alloc in the first place. + // + // Ignoring phi nodes, an Alloc always dominates all of its uses, and phi nodes don't matter here, + // because for the incoming edges that do matter, we do emit the stores. + + continue + } + + if predHasStore[pred.Index] { + // Don't generate redundant propagations. Not only is it unnecessary, it can lead to infinite loops + // when trying to lift to a fix point, because redundant stores are liftable. + continue + } + + predHasStore[pred.Index] = true + + before := pred.Instrs[len(pred.Instrs)-1] + propagate(pred, before) + } + } else { + // emit store before the first unliftable use + before := b.Instrs[desc.firstUnliftable] + propagate(b, before) } } @@ -805,7 +1138,7 @@ func liftable(alloc *Alloc) bool { } // liftAlloc lifts alloc into registers and populates newPhis and newSigmas with all the φ- and σ-nodes it may require. -func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhiMap, newSigmas newSigmaMap) { +func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { fn := alloc.Parent() defblocks := fn.blockset(0) @@ -950,17 +1283,28 @@ func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *All // replaceAll replaces all intraprocedural uses of x with y, // updating x.Referrers and y.Referrers. // Precondition: x.Referrers() != nil, i.e. x must be local to some function. -// func replaceAll(x, y Value) { var rands []*Value pxrefs := x.Referrers() pyrefs := y.Referrers() for _, instr := range *pxrefs { - rands = instr.Operands(rands[:0]) // recycle storage - for _, rand := range rands { - if *rand != nil { - if *rand == x { - *rand = y + switch instr := instr.(type) { + case *CompositeValue: + // Special case CompositeValue because it might have very large lists of operands + // + // OPT(dh): this loop is still expensive for large composite values + for i, rand := range instr.Values { + if rand == x { + instr.Values[i] = y + } + } + default: + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if *rand != nil { + if *rand == x { + *rand = y + } } } } @@ -995,7 +1339,6 @@ func replace(instr Instruction, x, y Value) { // renamed returns the value to which alloc is being renamed, // constructing it lazily if it's the implicit zero initialization. -// func renamed(fn *Function, renaming []Value, alloc *Alloc) Value { v := renaming[alloc.index] if v == nil { @@ -1141,6 +1484,10 @@ func splitOnNewInformation(u *BasicBlock, renaming *StackMap) { // A slice to array pointer conversion tells us the minimum length of the slice rename(instr.X, instr, CopyInfoUnspecified, i) i++ + case *SliceToArray: + // A slice to array conversion tells us the minimum length of the slice + rename(instr.X, instr, CopyInfoUnspecified, i) + i++ case *Slice: // Slicing tells us about some of the bounds off := 0 @@ -1203,8 +1550,7 @@ func splitOnNewInformation(u *BasicBlock, renaming *StackMap) { // renaming is a map from *Alloc (keyed by index number) to its // dominating stored value; newPhis[x] is the set of new φ-nodes to be // prepended to block x. -// -func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSigmaMap) { +func rename(u *BasicBlock, renaming []Value, newPhis BlockMap[[]newPhi], newSigmas BlockMap[[]newSigma]) { // Each φ-node becomes the new name for its associated Alloc. for _, np := range newPhis[u.Index] { phi := np.phi @@ -1340,7 +1686,6 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig // fresh copy of the renaming map for each subtree. r := make([]Value, len(renaming)) for _, v := range u.dom.children { - // XXX add debugging copy(r, renaming) // on entry to a block, the incoming sigma nodes become the new values for their alloc @@ -1355,3 +1700,54 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig } } + +func simplifyConstantCompositeValues(fn *Function) bool { + changed := false + + for _, b := range fn.Blocks { + n := 0 + for _, instr := range b.Instrs { + replaced := false + + if cv, ok := instr.(*CompositeValue); ok { + ac := &AggregateConst{} + ac.typ = cv.typ + replaced = true + for _, v := range cv.Values { + if c, ok := v.(Constant); ok { + ac.Values = append(ac.Values, c) + } else { + replaced = false + break + } + } + if replaced { + replaceAll(cv, emitConst(fn, ac)) + killInstruction(cv) + } + + } + + if replaced { + changed = true + } else { + b.Instrs[n] = instr + n++ + } + } + + clearInstrs(b.Instrs[n:]) + b.Instrs = b.Instrs[:n] + } + + return changed +} + +func updateOperandReferrers(instr Instruction) { + for _, op := range instr.Operands(nil) { + refs := (*op).Referrers() + if refs != nil { + *refs = append(*refs, instr) + } + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/lvalue.go b/vendor/honnef.co/go/tools/go/ir/lvalue.go index f676a1f7..119eed6c 100644 --- a/vendor/honnef.co/go/tools/go/ir/lvalue.go +++ b/vendor/honnef.co/go/tools/go/ir/lvalue.go @@ -15,7 +15,6 @@ import ( // An lvalue represents an assignable location that may appear on the // left-hand side of an assignment. This is a generalization of a // pointer to permit updates to elements of maps. -// type lvalue interface { store(fn *Function, v Value, source ast.Node) // stores v into the location load(fn *Function, source ast.Node) Value // loads the contents of the location @@ -52,11 +51,38 @@ func (a *address) typ() types.Type { return deref(a.addr.Type()) } +type compositeElement struct { + cv *CompositeValue + idx int + t types.Type + expr ast.Expr +} + +func (ce *compositeElement) load(fn *Function, source ast.Node) Value { + panic("not implemented") +} + +func (ce *compositeElement) store(fn *Function, v Value, source ast.Node) { + v = emitConv(fn, v, ce.t, source) + ce.cv.Values[ce.idx] = v + if ce.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, ce.expr, v, false) + } +} + +func (ce *compositeElement) address(fn *Function) Value { + panic("not implemented") +} + +func (ce *compositeElement) typ() types.Type { + return ce.t +} + // An element is an lvalue represented by m[k], the location of an // element of a map. These locations are not addressable // since pointers cannot be formed from them, but they do support // load() and store(). -// type element struct { m, k Value // map t types.Type // map element type @@ -90,7 +116,6 @@ func (e *element) typ() types.Type { // A blank is a dummy variable whose name is "_". // It is not reified: loads are illegal and stores are ignored. -// type blank struct{} func (bl blank) load(fn *Function, source ast.Node) Value { diff --git a/vendor/honnef.co/go/tools/go/ir/methods.go b/vendor/honnef.co/go/tools/go/ir/methods.go index fa45d1b9..eb247a93 100644 --- a/vendor/honnef.co/go/tools/go/ir/methods.go +++ b/vendor/honnef.co/go/tools/go/ir/methods.go @@ -24,7 +24,6 @@ import ( // Thread-safe. // // EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// func (prog *Program) MethodValue(sel *types.Selection) *Function { if sel.Kind() != types.MethodVal { panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) @@ -46,7 +45,6 @@ func (prog *Program) MethodValue(sel *types.Selection) *Function { // LookupMethod returns the implementation of the method of type T // identified by (pkg, name). It returns nil if the method exists but // is abstract, and panics if T has no such method. -// func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) if sel == nil { @@ -64,7 +62,7 @@ type methodSet struct { // Precondition: !isInterface(T). // EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) func (prog *Program) createMethodSet(T types.Type) *methodSet { - mset, ok := prog.methodSets.At(T).(*methodSet) + mset, ok := prog.methodSets.At(T) if !ok { mset = &methodSet{mapping: make(map[string]*Function)} prog.methodSets.Set(T, mset) @@ -104,14 +102,13 @@ func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function // Thread-safe. // // EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// func (prog *Program) RuntimeTypes() []types.Type { prog.methodsMu.Lock() defer prog.methodsMu.Unlock() var res []types.Type - prog.methodSets.Iterate(func(T types.Type, v interface{}) { - if v.(*methodSet).complete { + prog.methodSets.Iterate(func(T types.Type, v *methodSet) { + if v.complete { res = append(res, T) } }) @@ -148,7 +145,6 @@ func (prog *Program) declaredFunc(obj *types.Func) *Function { // TODO(adonovan): make this faster. It accounts for 20% of SSA build time. // // EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) -// func (prog *Program) needMethodsOf(T types.Type) { prog.methodsMu.Lock() prog.needMethods(T, false) @@ -159,10 +155,9 @@ func (prog *Program) needMethodsOf(T types.Type) { // Recursive case: skip => don't create methods for T. // // EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -// func (prog *Program) needMethods(T types.Type, skip bool) { // Each package maintains its own set of types it has visited. - if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { + if prevSkip, ok := prog.runtimeTypes.At(T); ok { // needMethods(T) was previously called if !prevSkip || skip { return // already seen, with same or false 'skip' value @@ -195,7 +190,7 @@ func (prog *Program) needMethods(T types.Type, skip bool) { case *types.Basic: // nop - case *types.Interface, *typeparams.TypeParam: + case *types.Interface, *types.TypeParam: // nop---handled by recursion over method set. case *types.Pointer: diff --git a/vendor/honnef.co/go/tools/go/ir/mode.go b/vendor/honnef.co/go/tools/go/ir/mode.go index b0b2c929..15b5a33f 100644 --- a/vendor/honnef.co/go/tools/go/ir/mode.go +++ b/vendor/honnef.co/go/tools/go/ir/mode.go @@ -15,9 +15,8 @@ import ( // // *BuilderMode satisfies the flag.Value interface. Example: // -// var mode = ir.BuilderMode(0) -// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) } -// +// var mode = ir.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) } type BuilderMode uint const ( diff --git a/vendor/honnef.co/go/tools/go/ir/print.go b/vendor/honnef.co/go/tools/go/ir/print.go index c5b51ca8..ad23d16d 100644 --- a/vendor/honnef.co/go/tools/go/ir/print.go +++ b/vendor/honnef.co/go/tools/go/ir/print.go @@ -23,7 +23,6 @@ import ( // Functions (including methods) and Globals use RelString and // all types are displayed with relType, so that only cross-package // references are package-qualified. -// func relName(v Value, i Instruction) string { if v == nil { return "" @@ -174,6 +173,7 @@ func (v *ChangeType) String() string { return printConv("ChangeType", v func (v *Convert) String() string { return printConv("Convert", v, v.X) } func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) } func (v *SliceToArrayPointer) String() string { return printConv("SliceToArrayPointer", v, v.X) } +func (v *SliceToArray) String() string { return printConv("SliceToArray", v, v.X) } func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) } func (v *MakeClosure) String() string { @@ -288,8 +288,8 @@ func (s *Jump) String() string { block = s.block.Succs[0].Index } str := fmt.Sprintf("Jump → b%d", block) - if s.Comment != "" { - str = fmt.Sprintf("%s # %s", str, s.Comment) + if s.Comment() != "" { + str = fmt.Sprintf("%s # %s", str, s.Comment()) } return str } @@ -326,6 +326,31 @@ func (s *ConstantSwitch) String() string { return b.String() } +func (v *CompositeValue) String() string { + var b bytes.Buffer + from := v.Parent().pkg() + fmt.Fprintf(&b, "CompositeValue <%s>", relType(v.Type(), from)) + if v.NumSet >= len(v.Values) { + // All values provided + fmt.Fprint(&b, " [all]") + } else if v.Bitmap.BitLen() == 0 { + // No values provided + fmt.Fprint(&b, " [none]") + } else { + // Some values provided + bits := []byte(fmt.Sprintf("%0*b", len(v.Values), &v.Bitmap)) + for i := 0; i < len(bits)/2; i++ { + o := len(bits) - 1 - i + bits[i], bits[o] = bits[o], bits[i] + } + fmt.Fprintf(&b, " [%s]", bits) + } + for _, vv := range v.Values { + fmt.Fprintf(&b, " %s", relName(vv, v)) + } + return b.String() +} + func (s *TypeSwitch) String() string { from := s.Parent().pkg() var b bytes.Buffer diff --git a/vendor/honnef.co/go/tools/go/ir/sanity.go b/vendor/honnef.co/go/tools/go/ir/sanity.go index 1788d0f1..b6c59c95 100644 --- a/vendor/honnef.co/go/tools/go/ir/sanity.go +++ b/vendor/honnef.co/go/tools/go/ir/sanity.go @@ -13,6 +13,8 @@ import ( "io" "os" "strings" + + "honnef.co/go/tools/go/types/typeutil" ) type sanity struct { @@ -30,7 +32,6 @@ type sanity struct { // // Sanity-checking is intended to facilitate the debugging of code // transformation passes. -// func sanityCheck(fn *Function, reporter io.Writer) bool { if reporter == nil { reporter = os.Stderr @@ -40,7 +41,6 @@ func sanityCheck(fn *Function, reporter io.Writer) bool { // mustSanityCheck is like sanityCheck but panics instead of returning // a negative result. -// func mustSanityCheck(fn *Function, reporter io.Writer) { if !sanityCheck(fn, reporter) { fn.WriteTo(os.Stderr) @@ -142,11 +142,14 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *ChangeInterface: case *ChangeType: case *SliceToArrayPointer: + case *SliceToArray: case *Convert: - if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { - if _, ok := instr.Type().Underlying().(*types.Basic); !ok { - s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) - } + tsetInstrX := typeutil.NewTypeSet(instr.X.Type().Underlying()) + tsetInstr := typeutil.NewTypeSet(instr.Type().Underlying()) + ok1 := tsetInstr.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) + ok2 := tsetInstrX.Any(func(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) + if !ok1 && !ok2 { + s.errorf("convert %s -> %s: at least one type set must contain basic type", instr.X.Type(), instr.Type()) } case *Defer: @@ -194,6 +197,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *GenericConst: case *Recv: case *TypeSwitch: + case *CompositeValue: default: panic(fmt.Sprintf("Unknown instruction type: %T", instr)) } diff --git a/vendor/honnef.co/go/tools/go/ir/source.go b/vendor/honnef.co/go/tools/go/ir/source.go index 677eefbd..0aa8ef30 100644 --- a/vendor/honnef.co/go/tools/go/ir/source.go +++ b/vendor/honnef.co/go/tools/go/ir/source.go @@ -25,11 +25,10 @@ import ( // enclosed by the package's init() function. // // Returns nil if not found; reasons might include: -// - the node is not enclosed by any function. -// - the node is within an anonymous function (FuncLit) and -// its IR function has not been created yet -// (pkg.Build() has not yet been called). -// +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its IR function has not been created yet +// (pkg.Build() has not yet been called). func EnclosingFunction(pkg *Package, path []ast.Node) *Function { // Start with package-level function... fn := findEnclosingPackageLevelFunction(pkg, path) @@ -67,14 +66,12 @@ outer: // depend on whether IR code for pkg has been built, so it can be // used to quickly reject check inputs that will cause // EnclosingFunction to fail, prior to IR building. -// func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { return findEnclosingPackageLevelFunction(pkg, path) != nil } // findEnclosingPackageLevelFunction returns the Function // corresponding to the package-level function enclosing path. -// func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] switch decl := path[n-2].(type) { @@ -100,7 +97,6 @@ func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function // findNamedFunc returns the named function whose FuncDecl.Ident is at // position pos. -// func findNamedFunc(pkg *Package, pos token.Pos) *Function { for _, fn := range pkg.Functions { if fn.Pos() == pos { @@ -114,13 +110,13 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // expression e. // // It returns nil if no value was found, e.g. -// - the expression is not lexically contained within f; -// - f was not built with debug information; or -// - e is a constant expression. (For efficiency, no debug -// information is stored for constants. Use -// go/types.Info.Types[e].Value instead.) -// - e is a reference to nil or a built-in function. -// - the value was optimised away. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. // // If e is an addressable expression used in an lvalue context, // value is the address denoted by e, and isAddr is true. @@ -132,7 +128,6 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // astutil.PathEnclosingInterval to locate the ast.Node, then // EnclosingFunction to locate the Function, then ValueForExpr to find // the ir.Value.) -// func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { if f.debugInfo() { // (opt) e = unparen(e) @@ -154,7 +149,6 @@ func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { // Package returns the IR Package corresponding to the specified // type-checker package object. // It returns nil if no such IR package has been created. -// func (prog *Program) Package(obj *types.Package) *Package { return prog.packages[obj] } @@ -163,7 +157,6 @@ func (prog *Program) Package(obj *types.Package) *Package { // the specified named object, which may be a package-level const // (*Const), var (*Global) or func (*Function) of some package in // prog. It returns nil if the object is not found. -// func (prog *Program) packageLevelValue(obj types.Object) Value { if pkg, ok := prog.packages[obj.Pkg()]; ok { return pkg.values[obj] @@ -176,7 +169,6 @@ func (prog *Program) packageLevelValue(obj types.Object) Value { // // TODO(adonovan): check the invariant that obj.Type() matches the // result's Signature, both in the params/results and in the receiver. -// func (prog *Program) FuncValue(obj *types.Func) *Function { obj = typeparams.OriginMethod(obj) fn, _ := prog.packageLevelValue(obj).(*Function) @@ -185,7 +177,6 @@ func (prog *Program) FuncValue(obj *types.Func) *Function { // ConstValue returns the IR Value denoted by the source-level named // constant obj. -// func (prog *Program) ConstValue(obj *types.Const) *Const { // TODO(adonovan): opt: share (don't reallocate) // Consts for const objects and constant ast.Exprs. @@ -217,8 +208,9 @@ func (prog *Program) ConstValue(obj *types.Const) *Const { // If the identifier is a field selector and its base expression is // non-addressable, then VarValue returns the value of that field. // For example: -// func f() struct {x int} -// f().x // VarValue(x) returns a *Field instruction of type int +// +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int // // All other identifiers denote addressable locations (variables). // For them, VarValue may return either the variable's address or its @@ -227,14 +219,14 @@ func (prog *Program) ConstValue(obj *types.Const) *Const { // // If !isAddr, the returned value is the one associated with the // specific identifier. For example, -// var x int // VarValue(x) returns Const 0 here -// x = 1 // VarValue(x) returns Const 1 here +// +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here // // It is not specified whether the value or the address is returned in // any particular case, as it may depend upon optimizations performed // during IR code generation, such as registerization, constant // folding, avoidance of materialization of subexpressions, etc. -// func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { // All references to a var are local to some function, possibly init. fn := EnclosingFunction(pkg, ref) diff --git a/vendor/honnef.co/go/tools/go/ir/ssa.go b/vendor/honnef.co/go/tools/go/ir/ssa.go index c51dd67d..1ef87f9e 100644 --- a/vendor/honnef.co/go/tools/go/ir/ssa.go +++ b/vendor/honnef.co/go/tools/go/ir/ssa.go @@ -13,12 +13,18 @@ import ( "go/constant" "go/token" "go/types" + "math/big" "sync" - "golang.org/x/exp/typeparams" "honnef.co/go/tools/go/types/typeutil" ) +const ( + // Replace CompositeValue with only constant values with AggregateConst. Currently disabled because it breaks field + // tracking in U1000. + doSimplifyConstantCompositeValues = false +) + type ID int // A Program is a partial or complete Go program converted to IR form. @@ -31,9 +37,9 @@ type Program struct { MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets methodsMu sync.Mutex // guards the following maps: - methodSets typeutil.Map // maps type to its concrete methodSet - runtimeTypes typeutil.Map // types for which rtypes are needed - canon typeutil.Map // type canonicalization map + methodSets typeutil.Map[*methodSet] // maps type to its concrete methodSet + runtimeTypes typeutil.Map[bool] // types for which rtypes are needed + canon typeutil.Map[types.Type] // type canonicalization map bounds map[*types.Func]*Function // bounds for curried x.Method closures thunks map[selectionKey]*Function // thunks for T.Method expressions } @@ -46,7 +52,6 @@ type Program struct { // Members also contains entries for "init" (the synthetic package // initializer) and "init#%d", the nth declared init function, // and unspecified other things too. -// type Package struct { Prog *Program // the owning program Pkg *types.Package // the corresponding go/types.Package @@ -68,7 +73,6 @@ type Package struct { // A Member is a member of a Go package, implemented by *NamedConst, // *Global, *Function, or *Type; they are created by package-level // const, var, func and type declarations respectively. -// type Member interface { Name() string // declared name of the package member String() string // package-qualified name of the package member @@ -93,7 +97,6 @@ type Type struct { // // NB: a NamedConst is not a Value; it contains a constant Value, which // it augments with the name and position of its 'const' declaration. -// type NamedConst struct { object *types.Const Value *Const @@ -180,11 +183,12 @@ type Value interface { // An Instruction that defines a value (e.g. BinOp) also implements // the Value interface; an Instruction that only has an effect (e.g. Store) // does not. -// type Instruction interface { setSource(ast.Node) setID(ID) + Comment() string + // String returns the disassembled form of this value. // // Examples of Instructions that are Values: @@ -264,7 +268,6 @@ type Instruction interface { // Node is provided to simplify IR graph algorithms. Clients should // use the more specific and informative Value or Instruction // interfaces where appropriate. -// type Node interface { setID(ID) @@ -344,7 +347,6 @@ func (syn Synthetic) String() string { // Syntax.Pos() always returns the position of the declaring "func" token. // // Type() returns the function's Signature. -// type Function struct { node @@ -373,13 +375,13 @@ type Function struct { type instanceWrapperMap struct { h typeutil.Hasher entries map[uint32][]struct { - key *typeparams.TypeList + key *types.TypeList val *Function } len int } -func typeListIdentical(l1, l2 *typeparams.TypeList) bool { +func typeListIdentical(l1, l2 *types.TypeList) bool { if l1.Len() != l2.Len() { return false } @@ -393,10 +395,10 @@ func typeListIdentical(l1, l2 *typeparams.TypeList) bool { return true } -func (m *instanceWrapperMap) At(key *typeparams.TypeList) *Function { +func (m *instanceWrapperMap) At(key *types.TypeList) *Function { if m.entries == nil { m.entries = make(map[uint32][]struct { - key *typeparams.TypeList + key *types.TypeList val *Function }) m.h = typeutil.MakeHasher() @@ -416,10 +418,10 @@ func (m *instanceWrapperMap) At(key *typeparams.TypeList) *Function { return nil } -func (m *instanceWrapperMap) Set(key *typeparams.TypeList, val *Function) { +func (m *instanceWrapperMap) Set(key *types.TypeList, val *Function) { if m.entries == nil { m.entries = make(map[uint32][]struct { - key *typeparams.TypeList + key *types.TypeList val *Function }) m.h = typeutil.MakeHasher() @@ -437,7 +439,7 @@ func (m *instanceWrapperMap) Set(key *typeparams.TypeList, val *Function) { } } m.entries[hash] = append(m.entries[hash], struct { - key *typeparams.TypeList + key *types.TypeList val *Function }{key, val}) m.len++ @@ -456,6 +458,11 @@ const ( NeverReturns ) +type constValue struct { + c Constant + idx int +} + type functionBody struct { // The following fields are set transiently during building, // then cleared. @@ -465,11 +472,14 @@ type functionBody struct { implicitResults []*Alloc // tuple of results targets *targets // linked stack of branch targets lblocks map[types.Object]*lblock // labelled blocks - consts []Constant - wr *HTMLWriter - fakeExits BlockSet - blocksets [5]BlockSet - hasDefer bool + + consts map[constKey]constValue + aggregateConsts typeutil.Map[[]*AggregateConst] + + wr *HTMLWriter + fakeExits BlockSet + blocksets [5]BlockSet + hasDefer bool // a contiguous block of instructions that will be used by blocks, // to avoid making multiple allocations. @@ -502,7 +512,6 @@ func (fn *Function) results() []*Alloc { // // The order of Preds and Succs is significant (to Phi and If // instructions, respectively). -// type BasicBlock struct { Index int // index of this block within Parent().Blocks Comment string // optional label; no semantic significance @@ -534,7 +543,6 @@ type BasicBlock struct { // // Pos() returns the position of the value that was captured, which // belongs to an enclosing function. -// type FreeVar struct { node @@ -548,7 +556,6 @@ type FreeVar struct { } // A Parameter represents an input parameter of a function. -// type Parameter struct { register @@ -573,10 +580,10 @@ type Parameter struct { // Pos() returns token.NoPos. // // Example printed form: -// Const {42} -// Const {"test"} -// Const {(3 + 4i)} // +// Const {42} +// Const {"test"} +// Const {(3 + 4i)} type Const struct { register @@ -586,7 +593,18 @@ type Const struct { type AggregateConst struct { register - Values []Constant + Values []Value +} + +type CompositeValue struct { + register + + // Bitmap records which elements were explicitly provided. For example, [4]byte{2: x} would have a bitmap of 0010. + Bitmap big.Int + // The number of bits set in Bitmap + NumSet int + // Dense list of values in the composite literal. Omitted elements are filled in with zero values. + Values []Value } // TODO add the element's zero constant to ArrayConst @@ -617,7 +635,6 @@ func (*GenericConst) aConstant() {} // // Pos() returns the position of the ast.ValueSpec.Names[*] // identifier. -// type Global struct { node @@ -637,20 +654,19 @@ type Global struct { // Go spec (excluding "make" and "new") or one of these ir-defined // intrinsics: // -// // wrapnilchk returns ptr if non-nil, panics otherwise. -// // (For use in indirection wrappers.) -// func ir:wrapnilchk(ptr *T, recvType, methodName string) *T +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ir:wrapnilchk(ptr *T, recvType, methodName string) *T // -// // noreturnWasPanic returns true if the previously called -// // function panicked, false if it exited the process. -// func ir:noreturnWasPanic() bool +// // noreturnWasPanic returns true if the previously called +// // function panicked, false if it exited the process. +// func ir:noreturnWasPanic() bool // // Object() returns a *types.Builtin for built-ins defined by the spec, // nil for others. // // Type() returns a *types.Signature representing the effective // signature of the built-in for this call. -// type Builtin struct { node @@ -687,9 +703,9 @@ type Builtin struct { // allocates a varargs slice. // // Example printed form: -// t1 = StackAlloc <*int> -// t2 = HeapAlloc <*int> (new) // +// t1 = StackAlloc <*int> +// t2 = HeapAlloc <*int> (new) type Alloc struct { register Heap bool @@ -711,8 +727,8 @@ var _ Value = (*Sigma)(nil) // Within a block, all σ-nodes must appear before all non-σ nodes. // // Example printed form: -// t2 = Sigma [#0] t1 (x) // +// t2 = Sigma [#0] t1 (x) type Sigma struct { register From *BasicBlock @@ -749,8 +765,8 @@ type Copy struct { // during SSA renaming. // // Example printed form: -// t3 = Phi 2:t1 4:t2 (x) // +// t3 = Phi 2:t1 4:t2 (x) type Phi struct { register Edges []Value // Edges[i] is value for Block().Preds[i] @@ -769,10 +785,10 @@ type Phi struct { // Pos() returns the ast.CallExpr.Lparen, if explicit in the source. // // Example printed form: -// t3 = Call <()> println t1 t2 -// t4 = Call <()> foo$1 -// t6 = Invoke t5.String // +// t3 = Call <()> println t1 t2 +// t4 = Call <()> foo$1 +// t6 = Invoke t5.String type Call struct { register Call CallCommon @@ -783,8 +799,8 @@ type Call struct { // Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. // // Example printed form: -// t3 = BinOp {+} t2 t1 // +// t3 = BinOp {+} t2 t1 type BinOp struct { register // One of: @@ -800,10 +816,9 @@ type BinOp struct { // SUB is negation. // NOT is logical negation. // -// // Example printed form: -// t2 = UnOp {^} t1 // +// t2 = UnOp {^} t1 type UnOp struct { register Op token.Token // One of: NOT SUB XOR ! - ^ @@ -817,8 +832,8 @@ type UnOp struct { // specified. // // Example printed form: -// t2 = Load t1 // +// t2 = Load t1 type Load struct { register X Value @@ -828,11 +843,11 @@ type Load struct { // change to Type(). // // Type changes are permitted: -// - between a named type and its underlying type. -// - between two named types of the same underlying type. -// - between (possibly named) pointers to identical base types. -// - from a bidirectional channel to a read- or write-channel, -// optionally adding/removing a name. +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. // // This operation cannot fail dynamically. // @@ -840,8 +855,8 @@ type Load struct { // from an explicit conversion in the source. // // Example printed form: -// t2 = ChangeType <*T> t1 // +// t2 = ChangeType <*T> t1 type ChangeType struct { register X Value @@ -852,12 +867,13 @@ type ChangeType struct { // // A conversion may change the value and representation of its operand. // Conversions are permitted: -// - between real numeric types. -// - between complex numeric types. -// - between string and []byte or []rune. -// - between pointers and unsafe.Pointer. -// - between unsafe.Pointer and uintptr. -// - from (Unicode) integer to (UTF-8) string. +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// // A conversion may imply a type name change also. // // This operation cannot fail dynamically. @@ -869,8 +885,8 @@ type ChangeType struct { // from an explicit conversion in the source. // // Example printed form: -// t2 = Convert <[]byte> t1 // +// t2 = Convert <[]byte> t1 type Convert struct { register X Value @@ -886,8 +902,8 @@ type Convert struct { // otherwise. // // Example printed form: -// t2 = ChangeInterface t1 // +// t2 = ChangeInterface t1 type ChangeInterface struct { register X Value @@ -900,13 +916,27 @@ type ChangeInterface struct { // from an explicit conversion in the source. // // Example printed form: -// t1 = SliceToArrayPointer <*[4]byte> t1 // +// t2 = SliceToArrayPointer <*[4]byte> t1 type SliceToArrayPointer struct { register X Value } +// The SliceToArray instruction yields the conversion of slice X to +// array. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// +// t2 = SliceToArray <[4]byte> t1 +type SliceToArray struct { + register + X Value +} + // MakeInterface constructs an instance of an interface type from a // value of a concrete type. // @@ -914,14 +944,15 @@ type SliceToArrayPointer struct { // of X, and Program.MethodValue(m) to find the implementation of a method. // // To construct the zero value of an interface type T, use: -// NewConst(constant.MakeNil(), T, pos) +// +// NewConst(constant.MakeNil(), T, pos) // // Pos() returns the ast.CallExpr.Lparen, if the instruction arose // from an explicit conversion in the source. // // Example printed form: -// t2 = MakeInterface t1 // +// t2 = MakeInterface t1 type MakeInterface struct { register X Value @@ -936,9 +967,9 @@ type MakeInterface struct { // closure or the ast.SelectorExpr.Sel for a bound method closure. // // Example printed form: -// t1 = MakeClosure foo$1 t1 t2 -// t5 = MakeClosure (T).foo$bound t4 // +// t1 = MakeClosure foo$1 t1 t2 +// t5 = MakeClosure (T).foo$bound t4 type MakeClosure struct { register Fn Value // always a *Function @@ -954,9 +985,9 @@ type MakeClosure struct { // the ast.CompositeLit.Lbrack if created by a literal. // // Example printed form: -// t1 = MakeMap -// t2 = MakeMap t1 // +// t1 = MakeMap +// t2 = MakeMap t1 type MakeMap struct { register Reserve Value // initial space reservation; nil => default @@ -971,9 +1002,9 @@ type MakeMap struct { // created it. // // Example printed form: -// t3 = MakeChan t1 -// t4 = MakeChan t2 // +// t3 = MakeChan t1 +// t4 = MakeChan t2 type MakeChan struct { register Size Value // int; size of buffer; zero => synchronous. @@ -993,9 +1024,9 @@ type MakeChan struct { // created it. // // Example printed form: -// t3 = MakeSlice <[]string> t1 t2 -// t4 = MakeSlice t1 t2 // +// t3 = MakeSlice <[]string> t1 t2 +// t4 = MakeSlice t1 t2 type MakeSlice struct { register Len Value @@ -1016,8 +1047,8 @@ type MakeSlice struct { // NoPos if not explicit in the source (e.g. a variadic argument slice). // // Example printed form: -// t4 = Slice <[]int> t3 t2 t1 // +// t4 = Slice <[]int> t3 t2 t1 type Slice struct { register X Value // slice, string, or *array @@ -1038,8 +1069,8 @@ type Slice struct { // field, if explicit in the source. // // Example printed form: -// t2 = FieldAddr <*int> [0] (X) t1 // +// t2 = FieldAddr <*int> [0] (X) t1 type FieldAddr struct { register X Value // *struct @@ -1056,8 +1087,8 @@ type FieldAddr struct { // field, if explicit in the source. // // Example printed form: -// t2 = FieldAddr [0] (X) t1 // +// t2 = FieldAddr [0] (X) t1 type Field struct { register X Value // struct @@ -1079,8 +1110,8 @@ type Field struct { // explicit in the source. // // Example printed form: -// t3 = IndexAddr <*int> t2 t1 // +// t3 = IndexAddr <*int> t2 t1 type IndexAddr struct { register X Value // slice or *array, @@ -1093,8 +1124,8 @@ type IndexAddr struct { // explicit in the source. // // Example printed form: -// t3 = Index t2 t1 // +// t3 = Index t2 t1 type Index struct { register X Value // array @@ -1110,9 +1141,9 @@ type Index struct { // Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. // // Example printed form: -// t4 = MapLookup t3 t1 -// t6 = MapLookup <(string, bool)> t3 t2 // +// t4 = MapLookup t3 t1 +// t6 = MapLookup <(string, bool)> t3 t2 type MapLookup struct { register X Value // map @@ -1126,8 +1157,8 @@ type MapLookup struct { // Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. // // Example printed form: -// t3 = StringLookup t2 t1 // +// t3 = StringLookup t2 t1 type StringLookup struct { register X Value // string @@ -1136,7 +1167,6 @@ type StringLookup struct { // SelectState is a helper for Select. // It represents one goal state and its corresponding communication. -// type SelectState struct { Dir types.ChanDir // direction of case (SendOnly or RecvOnly) Chan Value // channel to use (for send or receive) @@ -1151,7 +1181,9 @@ type SelectState struct { // Let n be the number of States for which Dir==RECV and Tᵢ (0 ≤ i < n) // be the element type of each such state's Chan. // Select returns an n+2-tuple -// (index int, recvOk bool, r₀ T₀, ... rₙ-1 Tₙ-1) +// +// (index int, recvOk bool, r₀ T₀, ... rₙ-1 Tₙ-1) +// // The tuple's components, described below, must be accessed via the // Extract instruction. // @@ -1177,9 +1209,9 @@ type SelectState struct { // Pos() returns the ast.SelectStmt.Select. // // Example printed form: -// t6 = SelectNonBlocking <(index int, ok bool, int)> [<-t4, t5<-t1] -// t11 = SelectBlocking <(index int, ok bool)> [] // +// t6 = SelectNonBlocking <(index int, ok bool, int)> [<-t4, t5<-t1] +// t11 = SelectBlocking <(index int, ok bool)> [] type Select struct { register States []*SelectState @@ -1196,8 +1228,8 @@ type Select struct { // Pos() returns the ast.RangeStmt.For. // // Example printed form: -// t2 = Range t1 // +// t2 = Range t1 type Range struct { register X Value // string or map @@ -1219,9 +1251,9 @@ type Range struct { // The types of k and/or v may be types.Invalid. // // Example printed form: -// t5 = Next <(ok bool, k int, v rune)> t2 -// t5 = Next <(ok bool, k invalid type, v invalid type)> t2 // +// t5 = Next <(ok bool, k int, v rune)> t2 +// t5 = Next <(ok bool, k invalid type, v invalid type)> t2 type Next struct { register Iter Value @@ -1260,9 +1292,9 @@ type Next struct { // type-switch statement. // // Example printed form: -// t2 = TypeAssert t1 -// t4 = TypeAssert <(value fmt.Stringer, ok bool)> t1 // +// t2 = TypeAssert t1 +// t4 = TypeAssert <(value fmt.Stringer, ok bool)> t1 type TypeAssert struct { register X Value @@ -1277,8 +1309,8 @@ type TypeAssert struct { // MapLookup and others. // // Example printed form: -// t7 = Extract [1] (ok) t4 // +// t7 = Extract [1] (ok) t4 type Extract struct { register Tuple Value @@ -1295,11 +1327,10 @@ type Extract struct { // Pos() returns NoPos. // // Example printed form: -// Jump → b1 // +// Jump → b1 type Jump struct { anInstruction - Comment string } // The Unreachable pseudo-instruction signals that execution cannot @@ -1313,8 +1344,8 @@ type Jump struct { // containing BasicBlock. // // Example printed form: -// Unreachable → b1 // +// Unreachable → b1 type Unreachable struct { anInstruction } @@ -1329,8 +1360,8 @@ type Unreachable struct { // Pos() returns the *ast.IfStmt, if explicit in the source. // // Example printed form: -// If t2 → b1 b2 // +// If t2 → b1 b2 type If struct { anInstruction Cond Value @@ -1369,9 +1400,9 @@ type TypeSwitch struct { // Pos() returns the ast.ReturnStmt.Return, if explicit in the source. // // Example printed form: -// Return -// Return t1 t2 // +// Return +// Return t1 t2 type Return struct { anInstruction Results []Value @@ -1387,8 +1418,8 @@ type Return struct { // Pos() returns NoPos. // // Example printed form: -// RunDefers // +// RunDefers type RunDefers struct { anInstruction } @@ -1405,8 +1436,8 @@ type RunDefers struct { // in the source. // // Example printed form: -// Panic t1 // +// Panic t1 type Panic struct { anInstruction X Value // an interface{} @@ -1420,10 +1451,10 @@ type Panic struct { // Pos() returns the ast.GoStmt.Go. // // Example printed form: -// Go println t1 -// Go t3 -// GoInvoke t4.Bar t2 // +// Go println t1 +// Go t3 +// GoInvoke t4.Bar t2 type Go struct { anInstruction Call CallCommon @@ -1437,10 +1468,10 @@ type Go struct { // Pos() returns the ast.DeferStmt.Defer. // // Example printed form: -// Defer println t1 -// Defer t3 -// DeferInvoke t4.Bar t2 // +// Defer println t1 +// Defer t3 +// DeferInvoke t4.Bar t2 type Defer struct { anInstruction Call CallCommon @@ -1451,8 +1482,8 @@ type Defer struct { // Pos() returns the ast.SendStmt.Arrow, if explicit in the source. // // Example printed form: -// Send t2 t1 // +// Send t2 t1 type Send struct { anInstruction Chan, X Value @@ -1469,8 +1500,9 @@ type Send struct { // Pos() returns the ast.RangeStmt.For. // // Example printed form: -// t2 = Recv t1 -// t3 = Recv <(int, bool)> t1 +// +// t2 = Recv t1 +// t3 = Recv <(int, bool)> t1 type Recv struct { register Chan Value @@ -1486,8 +1518,8 @@ type Recv struct { // implementation choices, the details are not specified. // // Example printed form: -// Store {int} t2 t1 // +// Store {int} t2 t1 type Store struct { anInstruction Addr Value @@ -1502,8 +1534,8 @@ type Store struct { // Pos() returns NoPos. // // Example printed form: -// BlankStore t1 // +// BlankStore t1 type BlankStore struct { anInstruction Val Value @@ -1516,8 +1548,8 @@ type BlankStore struct { // if explicit in the source. // // Example printed form: -// MapUpdate t3 t1 t2 // +// MapUpdate t3 t1 t2 type MapUpdate struct { anInstruction Map Value @@ -1549,10 +1581,10 @@ type MapUpdate struct { // ordinary SSA renaming machinery.) // // Example printed form: -// ; *ast.CallExpr @ 102:9 is t5 -// ; var x float64 @ 109:72 is x -// ; address of *ast.CompositeLit @ 216:10 is t0 // +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 type DebugRef struct { anInstruction Expr ast.Expr // the referring expression (never *ast.ParenExpr) @@ -1570,7 +1602,6 @@ type DebugRef struct { // // Temporary names are automatically assigned to each register on // completion of building a function in IR form. -// type register struct { anInstruction typ types.Type // type of virtual register @@ -1599,7 +1630,12 @@ func (n *node) Pos() token.Pos { // It provides the implementations of the Block and setBlock methods. type anInstruction struct { node - block *BasicBlock // the basic block of this instruction + block *BasicBlock // the basic block of this instruction + comment string +} + +func (instr anInstruction) Comment() string { + return instr.comment } // CallCommon is contained by Go, Defer and Call to hold the @@ -1614,15 +1650,17 @@ type anInstruction struct { // 'func'. // // Value may be one of: -// (a) a *Function, indicating a statically dispatched call -// to a package-level function, an anonymous function, or -// a method of a named type. -// (b) a *MakeClosure, indicating an immediately applied -// function literal with free variables. -// (c) a *Builtin, indicating a statically dispatched call -// to a built-in function. -// (d) any other value, indicating a dynamically dispatched -// function call. +// +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// // StaticCallee returns the identity of the callee in cases // (a) and (b), nil otherwise. // @@ -1630,9 +1668,10 @@ type anInstruction struct { // Args[0] contains the receiver parameter. // // Example printed form: -// t3 = Call <()> println t1 t2 -// Go t3 -// Defer t3 +// +// t3 = Call <()> println t1 t2 +// Go t3 +// Defer t3 // // 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon // represents a dynamically dispatched call to an interface method. @@ -1646,13 +1685,13 @@ type anInstruction struct { // receiver but the first true argument. // // Example printed form: -// t6 = Invoke t5.String -// GoInvoke t4.Bar t2 -// DeferInvoke t4.Bar t2 +// +// t6 = Invoke t5.String +// GoInvoke t4.Bar t2 +// DeferInvoke t4.Bar t2 // // For all calls to variadic functions (Signature().Variadic()), // the last element of Args is a slice. -// type CallCommon struct { Value Value // receiver (invoke mode) or func value (call mode) Method *types.Func // abstract method (invoke mode) @@ -1673,7 +1712,6 @@ func (c *CallCommon) IsInvoke() bool { // // In either "call" or "invoke" mode, if the callee is a method, its // receiver is represented by sig.Recv, not sig.Params().At(0). -// func (c *CallCommon) Signature() *types.Signature { if c.Method != nil { return c.Method.Type().(*types.Signature) @@ -1716,7 +1754,6 @@ func (c *CallCommon) Description() string { // The CallInstruction interface, implemented by *Go, *Defer and *Call, // exposes the common parts of function-calling instructions, // yet provides a way back to the Value defined by *Call alone. -// type CallInstruction interface { Instruction Common() *CallCommon // returns the common parts of the call @@ -1802,7 +1839,6 @@ func (c *NamedConst) RelString(from *types.Package) string { return relString(c, // Func returns the package-level function of the specified name, // or nil if not found. -// func (p *Package) Func(name string) (f *Function) { f, _ = p.Members[name].(*Function) return @@ -1810,7 +1846,6 @@ func (p *Package) Func(name string) (f *Function) { // Var returns the package-level variable of the specified name, // or nil if not found. -// func (p *Package) Var(name string) (g *Global) { g, _ = p.Members[name].(*Global) return @@ -1818,7 +1853,6 @@ func (p *Package) Var(name string) (g *Global) { // Const returns the package-level constant of the specified name, // or nil if not found. -// func (p *Package) Const(name string) (c *NamedConst) { c, _ = p.Members[name].(*NamedConst) return @@ -1826,7 +1860,6 @@ func (p *Package) Const(name string) (c *NamedConst) { // Type returns the package-level type of the specified name, // or nil if not found. -// func (p *Package) Type(name string) (t *Type) { t, _ = p.Members[name].(*Type) return @@ -1880,6 +1913,10 @@ func (v *SliceToArrayPointer) Operands(rands []*Value) []*Value { return append(rands, &v.X) } +func (v *SliceToArray) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + func (s *DebugRef) Operands(rands []*Value) []*Value { return append(rands, &s.X) } @@ -2042,13 +2079,26 @@ func (v *Load) Operands(rands []*Value) []*Value { return append(rands, &v.X) } +func (v *AggregateConst) Operands(rands []*Value) []*Value { + for i := range v.Values { + rands = append(rands, &v.Values[i]) + } + return rands +} + +func (v *CompositeValue) Operands(rands []*Value) []*Value { + for i := range v.Values { + rands = append(rands, &v.Values[i]) + } + return rands +} + // Non-Instruction Values: -func (v *Builtin) Operands(rands []*Value) []*Value { return rands } -func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } -func (v *Const) Operands(rands []*Value) []*Value { return rands } -func (v *ArrayConst) Operands(rands []*Value) []*Value { return rands } -func (v *AggregateConst) Operands(rands []*Value) []*Value { return rands } -func (v *GenericConst) Operands(rands []*Value) []*Value { return rands } -func (v *Function) Operands(rands []*Value) []*Value { return rands } -func (v *Global) Operands(rands []*Value) []*Value { return rands } -func (v *Parameter) Operands(rands []*Value) []*Value { return rands } +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *ArrayConst) Operands(rands []*Value) []*Value { return rands } +func (v *GenericConst) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/go/ir/staticcheck.conf b/vendor/honnef.co/go/tools/go/ir/staticcheck.conf deleted file mode 100644 index d7b38bc3..00000000 --- a/vendor/honnef.co/go/tools/go/ir/staticcheck.conf +++ /dev/null @@ -1,3 +0,0 @@ -# ssa/... is mostly imported from upstream and we don't want to -# deviate from it too much, hence disabling SA1019 -checks = ["inherit", "-SA1019"] diff --git a/vendor/honnef.co/go/tools/go/ir/util.go b/vendor/honnef.co/go/tools/go/ir/util.go index 550f6c9d..0a733b65 100644 --- a/vendor/honnef.co/go/tools/go/ir/util.go +++ b/vendor/honnef.co/go/tools/go/ir/util.go @@ -16,8 +16,6 @@ import ( "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/types/typeutil" - - "golang.org/x/exp/typeparams" ) //// AST utilities @@ -26,7 +24,6 @@ func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } // isBlankIdent returns true iff e is an Ident with name "_". // They have no associated types.Object, and thus no type. -// func isBlankIdent(e ast.Expr) bool { id, ok := e.(*ast.Ident) return ok && id.Name == "_" @@ -51,7 +48,7 @@ func isInterface(T types.Type) bool { return types.IsInterface(T) } func deref(typ types.Type) types.Type { orig := typ - if t, ok := typ.(*typeparams.TypeParam); ok { + if t, ok := typ.(*types.TypeParam); ok { if ctyp := typeutil.CoreType(t); ctyp != nil { typ = ctyp } @@ -71,7 +68,6 @@ func recvType(obj *types.Func) types.Type { // returns a closure that prints the corresponding "end" message. // Call using 'defer logStack(...)()' to show builder stack on panic. // Don't forget trailing parens! -// func logStack(format string, args ...interface{}) func() { msg := fmt.Sprintf(format, args...) io.WriteString(os.Stderr, msg) @@ -99,7 +95,7 @@ func makeLen(T types.Type) *Builtin { lenParams := types.NewTuple(anonVar(T)) return &Builtin{ name: "len", - sig: types.NewSignature(nil, lenParams, lenResults, false), + sig: types.NewSignatureType(nil, nil, nil, lenParams, lenResults, false), } } @@ -147,3 +143,6 @@ func assert(x bool) { panic("failed assertion") } } + +// BlockMap is a mapping from basic blocks (identified by their indices) to values. +type BlockMap[T any] []T diff --git a/vendor/honnef.co/go/tools/go/ir/wrappers.go b/vendor/honnef.co/go/tools/go/ir/wrappers.go index 6082d07e..69537fb7 100644 --- a/vendor/honnef.co/go/tools/go/ir/wrappers.go +++ b/vendor/honnef.co/go/tools/go/ir/wrappers.go @@ -22,8 +22,6 @@ package ir import ( "fmt" "go/types" - - "golang.org/x/exp/typeparams" ) // -- wrappers ----------------------------------------------------------- @@ -42,7 +40,6 @@ import ( // - the result may be a thunk or a wrapper. // // EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) -// func makeWrapper(prog *Program, sel *types.Selection) *Function { obj := sel.Obj().(*types.Func) // the declared function sig := sel.Type().(*types.Signature) // type of this wrapper @@ -90,7 +87,7 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function { var c Call c.Call.Value = &Builtin{ name: "ir:wrapnilchk", - sig: types.NewSignature(nil, + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), types.NewTuple(anonVar(sel.Recv())), false), } @@ -140,7 +137,6 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function { // createParams creates parameters for wrapper method fn based on its // Signature.Params, which do not include the receiver. // start is the index of the first regular parameter to use. -// func createParams(fn *Function, start int) { tparams := fn.Signature.Params() for i, n := start, tparams.Len(); i < n; i++ { @@ -159,22 +155,21 @@ func createParams(fn *Function, start int) { // Use MakeClosure with such a wrapper to construct a bound method // closure. e.g.: // -// type T int or: type T interface { meth() } -// func (t T) meth() -// var t T -// f := t.meth -// f() // calls t.meth() +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() // // f is a closure of a synthetic wrapper defined as if by: // -// f := func() { return t.meth() } +// f := func() { return t.meth() } // // Unlike makeWrapper, makeBound need perform no indirection or field // selections because that can be done before the closure is // constructed. // // EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -// func makeBound(prog *Program, obj *types.Func) *Function { prog.methodsMu.Lock() defer prog.methodsMu.Unlock() @@ -226,22 +221,21 @@ func makeBound(prog *Program, obj *types.Func) *Function { // // Precondition: sel.Kind() == types.MethodExpr. // -// type T int or: type T interface { meth() } -// func (t T) meth() -// f := T.meth -// var t T -// f(t) // calls t.meth() +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() // // f is a synthetic wrapper defined as if by: // -// f := func(t T) { return t.meth() } +// f := func(t T) { return t.meth() } // // TODO(adonovan): opt: currently the stub is created even when used // directly in a function call: C.f(i, 0). This is less efficient // than inlining the stub. // // EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) -// func makeThunk(prog *Program, sel *types.Selection) *Function { if sel.Kind() != types.MethodExpr { panic(sel) @@ -259,7 +253,7 @@ func makeThunk(prog *Program, sel *types.Selection) *Function { defer prog.methodsMu.Unlock() // Canonicalize key.recv to avoid constructing duplicate thunks. - canonRecv, ok := prog.canon.At(key.recv).(types.Type) + canonRecv, ok := prog.canon.At(key.recv) if !ok { canonRecv = key.recv prog.canon.Set(key.recv, canonRecv) @@ -278,7 +272,7 @@ func makeThunk(prog *Program, sel *types.Selection) *Function { } func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { - return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) + return types.NewSignatureType(recv, nil, nil, s.Params(), s.Results(), s.Variadic()) } // selectionKey is like types.Selection but a usable map key. @@ -293,11 +287,11 @@ type selectionKey struct { // makeInstance creates a wrapper function with signature sig that calls the generic function fn. // If targs is not nil, fn is a function and targs describes the concrete type arguments. // If targs is nil, fn is a method and the type arguments are derived from the receiver. -func makeInstance(prog *Program, fn *Function, sig *types.Signature, targs *typeparams.TypeList) *Function { +func makeInstance(prog *Program, fn *Function, sig *types.Signature, targs *types.TypeList) *Function { if sig.Recv() != nil { assert(targs == nil) // Methods don't have their own type parameters, but the receiver does - targs = typeparams.NamedTypeArgs(deref(sig.Recv().Type()).(*types.Named)) + targs = deref(sig.Recv().Type()).(*types.Named).TypeArgs() } else { assert(targs != nil) } diff --git a/vendor/honnef.co/go/tools/go/ir/write.go b/vendor/honnef.co/go/tools/go/ir/write.go index b936bc98..139c8cf3 100644 --- a/vendor/honnef.co/go/tools/go/ir/write.go +++ b/vendor/honnef.co/go/tools/go/ir/write.go @@ -1,5 +1,5 @@ package ir func NewJump(parent *BasicBlock) *Jump { - return &Jump{anInstruction{block: parent}, ""} + return &Jump{anInstruction{block: parent}} } diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go b/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go index 9bfe8a38..2bf6ec60 100644 --- a/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go +++ b/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go @@ -8,7 +8,7 @@ import ( ) type TypeSet struct { - Terms []*typeparams.Term + Terms []*types.Term empty bool } @@ -71,7 +71,7 @@ func CoreType(typ types.Type) types.Type { // All calls fn for each term in the type set and reports whether all invocations returned true. // If the type set is empty or unconstrained, All immediately returns false. -func (ts TypeSet) All(fn func(*typeparams.Term) bool) bool { +func (ts TypeSet) All(fn func(*types.Term) bool) bool { if len(ts.Terms) == 0 { return false } @@ -85,7 +85,7 @@ func (ts TypeSet) All(fn func(*typeparams.Term) bool) bool { // Any calls fn for each term in the type set and reports whether any invocation returned true. // It stops after the first call that returned true. -func (ts TypeSet) Any(fn func(*typeparams.Term) bool) bool { +func (ts TypeSet) Any(fn func(*types.Term) bool) bool { for _, term := range ts.Terms { if fn(term) { return true @@ -95,16 +95,16 @@ func (ts TypeSet) Any(fn func(*typeparams.Term) bool) bool { } // All is a wrapper for NewTypeSet(typ).All(fn). -func All(typ types.Type, fn func(*typeparams.Term) bool) bool { +func All(typ types.Type, fn func(*types.Term) bool) bool { return NewTypeSet(typ).All(fn) } // Any is a wrapper for NewTypeSet(typ).Any(fn). -func Any(typ types.Type, fn func(*typeparams.Term) bool) bool { +func Any(typ types.Type, fn func(*types.Term) bool) bool { return NewTypeSet(typ).Any(fn) } -func IsSlice(term *typeparams.Term) bool { +func IsSlice(term *types.Term) bool { _, ok := term.Type().Underlying().(*types.Slice) return ok } diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go b/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go index d35d08e0..04d8c21b 100644 --- a/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go +++ b/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go @@ -9,7 +9,6 @@ import ( ) type MethodSetCache = typeutil.MethodSetCache -type Map = typeutil.Map type Hasher = typeutil.Hasher func Callee(info *types.Info, call *ast.CallExpr) types.Object { @@ -23,3 +22,31 @@ func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection func MakeHasher() Hasher { return typeutil.MakeHasher() } + +type Map[V any] struct { + m typeutil.Map +} + +func (m *Map[V]) Delete(key types.Type) bool { return m.m.Delete(key) } +func (m *Map[V]) At(key types.Type) (V, bool) { + v := m.m.At(key) + if v == nil { + var zero V + return zero, false + } else { + return v.(V), true + } +} +func (m *Map[V]) Set(key types.Type, value V) { m.m.Set(key, value) } +func (m *Map[V]) Len() int { return m.m.Len() } +func (m *Map[V]) Iterate(f func(key types.Type, value V)) { + ff := func(key types.Type, value interface{}) { + f(key, value.(V)) + } + m.m.Iterate(ff) + +} +func (m *Map[V]) Keys() []types.Type { return m.m.Keys() } +func (m *Map[V]) String() string { return m.m.String() } +func (m *Map[V]) KeysString() string { return m.m.KeysString() } +func (m *Map[V]) SetHasher(h typeutil.Hasher) { m.m.SetHasher(h) } diff --git a/vendor/honnef.co/go/tools/knowledge/deprecated.go b/vendor/honnef.co/go/tools/knowledge/deprecated.go index 7412a86e..343d5deb 100644 --- a/vendor/honnef.co/go/tools/knowledge/deprecated.go +++ b/vendor/honnef.co/go/tools/knowledge/deprecated.go @@ -19,6 +19,9 @@ type Deprecation struct { // go/importer.ForCompiler contains "Deprecated:", but it refers to a single argument, not the whole function. // Luckily, the notice starts in the middle of a paragraph, and as such isn't detected by us. +// TODO(dh): StdlibDeprecations doesn't contain entries for internal packages and unexported API. That's fine for normal +// users, but makes the Deprecated check less useful for people working on Go itself. + // StdlibDeprecations contains a mapping of Go API (such as variables, methods, or fields, among others) // to information about when it has been deprecated. var StdlibDeprecations = map[string]Deprecation{ @@ -85,8 +88,19 @@ var StdlibDeprecations = map[string]Deprecation{ "net/http.ErrUnexpectedTrailer": {12, DeprecatedUseNoLonger}, "net/http.CloseNotifier": {11, 7}, // This is hairy. The notice says "Not all errors in the http package related to protocol errors are of type ProtocolError", but doesn't that imply that some errors do? - "net/http.ProtocolError": {8, DeprecatedUseNoLonger}, - "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + "net/http.ProtocolError": {8, DeprecatedUseNoLonger}, + "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + "(*crypto/x509.Certificate).CheckCRLSignature": {19, 19}, + "crypto/x509.ParseCRL": {19, 19}, + "crypto/x509.ParseDERCRL": {19, 19}, + "(*crypto/x509.Certificate).CreateCRL": {19, 19}, + "crypto/x509/pkix.TBSCertificateList": {19, 19}, + "crypto/x509/pkix.RevokedCertificate": {19, 19}, + "go/doc.ToHTML": {20, 20}, + "go/doc.ToText": {20, 20}, + "go/doc.Synopsis": {20, 20}, + "math/rand.Seed": {20, 0}, + "math/rand.Read": {20, DeprecatedNeverUse}, // These functions have no direct alternative, but they are insecure and should no longer be used. "crypto/x509.IsEncryptedPEMBlock": {16, DeprecatedNeverUse}, @@ -150,8 +164,9 @@ var StdlibDeprecations = map[string]Deprecation{ "syscall.GetQueuedCompletionStatus": {17, 0}, "syscall.CreateIoCompletionPort": {17, 0}, - // Not marked as deprecated with a recognizable header, but deprecated nonetheless. - "io/ioutil": {16, 16}, + // We choose to only track the package itself, even though all functions are derecated individually, too. Anyone + // using ioutil directly will have to import it, and this keeps the noise down. + "io/ioutil": {19, 19}, "bytes.Title": {18, 0}, "strings.Title": {18, 0}, @@ -174,15 +189,18 @@ var StdlibDeprecations = map[string]Deprecation{ "syscall.Syscall9": {18, 18}, } -// Last imported from Go at 4aa1efed4853ea067d665a952eee77c52faac774 with the following numbers of deprecations: +// Last imported from Go at 9f0234214473dfb785a5ad84a8fc62a6a395cbc3 with the following numbers of deprecations: // // archive/tar/common.go:2 // archive/zip/struct.go:6 // bytes/bytes.go:1 -// cmd/compile/internal/ir/expr.go:1 -// cmd/compile/internal/ir/type.go:1 +// cmd/api/testdata/src/pkg/p1/p1.go:8 +// cmd/api/testdata/src/pkg/p2/p2.go:2 +// cmd/api/testdata/src/pkg/p4/p4.go:1 +// cmd/compile/internal/noder/quirks.go:1 +// cmd/compile/internal/reflectdata/reflect.go:2 // cmd/compile/internal/syntax/walk.go:1 -// cmd/compile/internal/types/sym.go:2 +// cmd/compile/internal/types/sym.go:3 // cmd/go/internal/modcmd/edit.go:1 // cmd/go/testdata/mod/example.com_deprecated_a_v1.9.0.txt:2 // cmd/go/testdata/mod/example.com_deprecated_b_v1.9.0.txt:2 @@ -197,6 +215,11 @@ var StdlibDeprecations = map[string]Deprecation{ // cmd/vendor/golang.org/x/mod/semver/semver.go:1 // cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go:1 // cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_openbsd_386.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_openbsd_amd64.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_openbsd_arm.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_openbsd_arm64.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_openbsd_riscv64.go:1 // cmd/vendor/golang.org/x/sys/windows/security_windows.go:1 // cmd/vendor/golang.org/x/sys/windows/syscall_windows.go:2 // compress/flate/inflate.go:2 @@ -205,20 +228,27 @@ var StdlibDeprecations = map[string]Deprecation{ // crypto/tls/common.go:7 // crypto/x509/cert_pool.go:1 // crypto/x509/pem_decrypt.go:3 -// crypto/x509/x509.go:1 +// crypto/x509/pkix/pkix.go:2 +// crypto/x509/x509.go:5 // database/sql/driver/driver.go:6 // debug/gosym/pclntab.go:2 // encoding/csv/reader.go:2 // encoding/json/decode.go:1 // encoding/json/encode.go:1 +// go/doc/comment.go:2 // go/doc/doc.go:1 +// go/doc/synopsis.go:1 // go/importer/importer.go:2 -// go/types/errorcodes.go:1 // go/types/interface.go:2 // go/types/signature.go:1 // image/geom.go:2 // image/jpeg/reader.go:1 +// internal/types/errors/codes.go:1 +// io/ioutil/ioutil.go:7 +// io/ioutil/tempfile.go:2 +// math/rand/rand.go:2 // net/dial.go:2 +// net/http/h2_bundle.go:1 // net/http/httptest/recorder.go:1 // net/http/httputil/persist.go:8 // net/http/request.go:6 @@ -249,5 +279,4 @@ var StdlibDeprecations = map[string]Deprecation{ // syscall/syscall.go:3 // syscall/syscall_windows.go:6 // text/template/parse/node.go:5 -// vendor/golang.org/x/crypto/curve25519/curve25519.go:1 // vendor/golang.org/x/text/transform/transform.go:1 diff --git a/vendor/honnef.co/go/tools/knowledge/signatures.go b/vendor/honnef.co/go/tools/knowledge/signatures.go index d072e61a..03f4d53e 100644 --- a/vendor/honnef.co/go/tools/knowledge/signatures.go +++ b/vendor/honnef.co/go/tools/knowledge/signatures.go @@ -6,7 +6,7 @@ import ( ) var Signatures = map[string]*types.Signature{ - "(io.Seeker).Seek": types.NewSignature(nil, + "(io.Seeker).Seek": types.NewSignatureType(nil, nil, nil, types.NewTuple( types.NewParam(token.NoPos, nil, "", types.Typ[types.Int64]), types.NewParam(token.NoPos, nil, "", types.Typ[types.Int]), @@ -18,7 +18,7 @@ var Signatures = map[string]*types.Signature{ false, ), - "(io.Writer).Write": types.NewSignature(nil, + "(io.Writer).Write": types.NewSignatureType(nil, nil, nil, types.NewTuple( types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), ), @@ -29,7 +29,7 @@ var Signatures = map[string]*types.Signature{ false, ), - "(io.StringWriter).WriteString": types.NewSignature(nil, + "(io.StringWriter).WriteString": types.NewSignatureType(nil, nil, nil, types.NewTuple( types.NewParam(token.NoPos, nil, "", types.Typ[types.String]), ), @@ -40,7 +40,7 @@ var Signatures = map[string]*types.Signature{ false, ), - "(encoding.TextMarshaler).MarshalText": types.NewSignature(nil, + "(encoding.TextMarshaler).MarshalText": types.NewSignatureType(nil, nil, nil, types.NewTuple(), types.NewTuple( types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), @@ -49,7 +49,7 @@ var Signatures = map[string]*types.Signature{ false, ), - "(encoding/json.Marshaler).MarshalJSON": types.NewSignature(nil, + "(encoding/json.Marshaler).MarshalJSON": types.NewSignatureType(nil, nil, nil, types.NewTuple(), types.NewTuple( types.NewParam(token.NoPos, nil, "", types.NewSlice(types.Typ[types.Byte])), @@ -58,7 +58,7 @@ var Signatures = map[string]*types.Signature{ false, ), - "(fmt.Stringer).String": types.NewSignature(nil, + "(fmt.Stringer).String": types.NewSignatureType(nil, nil, nil, types.NewTuple(), types.NewTuple( types.NewParam(token.NoPos, nil, "", types.Typ[types.String]), diff --git a/vendor/honnef.co/go/tools/pattern/convert.go b/vendor/honnef.co/go/tools/pattern/convert.go index 34e2cd45..aed3617c 100644 --- a/vendor/honnef.co/go/tools/pattern/convert.go +++ b/vendor/honnef.co/go/tools/pattern/convert.go @@ -6,8 +6,6 @@ import ( "go/token" "go/types" "reflect" - - "golang.org/x/exp/typeparams" ) var astTypes = map[string]reflect.Type{ @@ -15,7 +13,7 @@ var astTypes = map[string]reflect.Type{ "RangeStmt": reflect.TypeOf(ast.RangeStmt{}), "AssignStmt": reflect.TypeOf(ast.AssignStmt{}), "IndexExpr": reflect.TypeOf(ast.IndexExpr{}), - "IndexListExpr": reflect.TypeOf(typeparams.IndexListExpr{}), + "IndexListExpr": reflect.TypeOf(ast.IndexListExpr{}), "Ident": reflect.TypeOf(ast.Ident{}), "ValueSpec": reflect.TypeOf(ast.ValueSpec{}), "GenDecl": reflect.TypeOf(ast.GenDecl{}), diff --git a/vendor/honnef.co/go/tools/pattern/doc.go b/vendor/honnef.co/go/tools/pattern/doc.go index c963bf71..22fe2cf3 100644 --- a/vendor/honnef.co/go/tools/pattern/doc.go +++ b/vendor/honnef.co/go/tools/pattern/doc.go @@ -1,7 +1,7 @@ /* Package pattern implements a simple language for pattern matching Go ASTs. -Design decisions and trade-offs +# Design decisions and trade-offs The language is designed specifically for the task of filtering ASTs to simplify the implementation of analyses in staticcheck. @@ -14,7 +14,7 @@ Furthermore, it is fully expected that the majority of analyses will still requi to further process the filtered AST, to make use of type information and to enforce complex invariants. It is not our goal to design a scripting language for writing entire checks in. -The language +# The language At its core, patterns are a representation of Go ASTs, allowing for the use of placeholders to enable pattern matching. Their syntax is inspired by LISP and Haskell, but unlike LISP, the core unit of patterns isn't the list, but the node. @@ -60,13 +60,13 @@ Thus, the two following forms have identical matching behavior: This section serves as an overview of the language's syntax. More in-depth explanations of the matching behavior as well as an exhaustive list of node types follows in the coming sections. -Pattern matching +# Pattern matching -TODO write about pattern matching +# TODO write about pattern matching - inspired by haskell syntax, but much, much simpler and naive -Node types +# Node types The language contains two kinds of nodes: those that map to nodes in the AST, and those that implement additional logic. @@ -246,7 +246,7 @@ The Not node negates a match. For example, (Not (Ident _)) will match all nodes ChanDir(0) -Automatic unnesting of AST nodes +# Automatic unnesting of AST nodes The Go AST has several types of nodes that wrap other nodes. To simplify matching, we automatically unwrap some of these nodes. @@ -268,6 +268,5 @@ On the flip-side, there is no way to specifically match these wrapper nodes. For example, there is no way of searching for unnecessary parentheses, like in the following piece of Go code: ((x)) += 2 - */ package pattern diff --git a/vendor/honnef.co/go/tools/pattern/fuzz.go b/vendor/honnef.co/go/tools/pattern/fuzz.go deleted file mode 100644 index 2afbb524..00000000 --- a/vendor/honnef.co/go/tools/pattern/fuzz.go +++ /dev/null @@ -1,51 +0,0 @@ -//go:build gofuzz -// +build gofuzz - -package pattern - -import ( - "go/ast" - goparser "go/parser" - "go/token" - "os" - "path/filepath" - "strings" -) - -var files []*ast.File - -func init() { - fset := token.NewFileSet() - filepath.Walk("/usr/lib/go/src", func(path string, info os.FileInfo, err error) error { - if err != nil { - // XXX error handling - panic(err) - } - if !strings.HasSuffix(path, ".go") { - return nil - } - f, err := goparser.ParseFile(fset, path, nil, 0) - if err != nil { - return nil - } - files = append(files, f) - return nil - }) -} - -func Fuzz(data []byte) int { - p := &Parser{} - pat, err := p.Parse(string(data)) - if err != nil { - if strings.Contains(err.Error(), "internal error") { - panic(err) - } - return 0 - } - _ = pat.Root.String() - - for _, f := range files { - Match(pat.Root, f) - } - return 1 -} diff --git a/vendor/honnef.co/go/tools/pattern/match.go b/vendor/honnef.co/go/tools/pattern/match.go index b4edac94..32eb9d69 100644 --- a/vendor/honnef.co/go/tools/pattern/match.go +++ b/vendor/honnef.co/go/tools/pattern/match.go @@ -6,8 +6,6 @@ import ( "go/token" "go/types" "reflect" - - "golang.org/x/exp/typeparams" ) var tokensByString = map[string]Token{ @@ -87,35 +85,56 @@ type matcher interface { Match(*Matcher, interface{}) (interface{}, bool) } -type State = map[string]interface{} +type State = map[string]any type Matcher struct { TypesInfo *types.Info State State + + bindingsMapping []string + + setBindings []uint64 } -func (m *Matcher) fork() *Matcher { - state := make(State, len(m.State)) - for k, v := range m.State { - state[k] = v - } - return &Matcher{ - TypesInfo: m.TypesInfo, - State: state, +func (m *Matcher) set(b Binding, value interface{}) { + m.State[b.Name] = value + m.setBindings[len(m.setBindings)-1] |= 1 << b.idx +} + +func (m *Matcher) push() { + m.setBindings = append(m.setBindings, 0) +} + +func (m *Matcher) pop() { + set := m.setBindings[len(m.setBindings)-1] + if set != 0 { + for i := 0; i < len(m.bindingsMapping); i++ { + if (set & (1 << i)) != 0 { + key := m.bindingsMapping[i] + delete(m.State, key) + } + } } + m.setBindings = m.setBindings[:len(m.setBindings)-1] } -func (m *Matcher) merge(mc *Matcher) { - m.State = mc.State +func (m *Matcher) merge() { + m.setBindings = m.setBindings[:len(m.setBindings)-1] } -func (m *Matcher) Match(a Node, b ast.Node) bool { +func (m *Matcher) Match(a Pattern, b ast.Node) bool { + m.bindingsMapping = a.Bindings m.State = State{} - _, ok := match(m, a, b) + m.push() + _, ok := match(m, a.Root, b) + m.merge() + if len(m.setBindings) != 0 { + panic(fmt.Sprintf("%d entries left on the stack, expected none", len(m.setBindings))) + } return ok } -func Match(a Node, b ast.Node) (*Matcher, bool) { +func Match(a Pattern, b ast.Node) (*Matcher, bool) { m := &Matcher{} ret := m.Match(a, b) return m, ret @@ -139,7 +158,11 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { case *ast.BlockStmt: return match(m, l.List, r) case *ast.FieldList: - return match(m, l.List, r) + if l == nil { + return match(m, nil, r) + } else { + return match(m, l.List, r) + } } switch r := r.(type) { @@ -202,14 +225,24 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { } } + // TODO(dh): the three blocks handling slices can be combined into a single block if we use reflection + { ln, ok1 := l.([]ast.Expr) rn, ok2 := r.([]ast.Expr) if ok1 || ok2 { if ok1 && !ok2 { - rn = []ast.Expr{r.(ast.Expr)} + cast, ok := r.(ast.Expr) + if !ok { + return nil, false + } + rn = []ast.Expr{cast} } else if !ok1 && ok2 { - ln = []ast.Expr{l.(ast.Expr)} + cast, ok := l.(ast.Expr) + if !ok { + return nil, false + } + ln = []ast.Expr{cast} } if len(ln) != len(rn) { @@ -229,9 +262,17 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { rn, ok2 := r.([]ast.Stmt) if ok1 || ok2 { if ok1 && !ok2 { - rn = []ast.Stmt{r.(ast.Stmt)} + cast, ok := r.(ast.Stmt) + if !ok { + return nil, false + } + rn = []ast.Stmt{cast} } else if !ok1 && ok2 { - ln = []ast.Stmt{l.(ast.Stmt)} + cast, ok := l.(ast.Stmt) + if !ok { + return nil, false + } + ln = []ast.Stmt{cast} } if len(ln) != len(rn) { @@ -251,9 +292,17 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { rn, ok2 := r.([]*ast.Field) if ok1 || ok2 { if ok1 && !ok2 { - rn = []*ast.Field{r.(*ast.Field)} + cast, ok := r.(*ast.Field) + if !ok { + return nil, false + } + rn = []*ast.Field{cast} } else if !ok1 && ok2 { - ln = []*ast.Field{l.(*ast.Field)} + cast, ok := l.(*ast.Field) + if !ok { + return nil, false + } + ln = []*ast.Field{cast} } if len(ln) != len(rn) { @@ -268,7 +317,7 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { } } - panic(fmt.Sprintf("unsupported comparison: %T and %T", l, r)) + return nil, false } // Match a Node with an AST node @@ -286,6 +335,13 @@ func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) { // 'a' is not a List or we'd be using its Match // implementation. + if len(b) != 1 { + return nil, false + } + return match(m, a, b[0]) + case []*ast.Field: + // 'a' is not a List or we'd be using its Match + // implementation if len(b) != 1 { return nil, false } @@ -317,6 +373,9 @@ func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) { return b, true case nil: return nil, a == Nil{} + case string, token.Token: + // 'a' can't be a String, Token, or Binding or we'd be using their Match implementations. + return nil, false default: panic(fmt.Sprintf("unhandled type %T", b)) } @@ -393,7 +452,7 @@ func (b Binding) Match(m *Matcher, node interface{}) (interface{}, bool) { } new, ret := match(m, b.Node, node) if ret { - m.State[b.Name] = new + m.set(b, new) } return new, ret } @@ -446,7 +505,16 @@ func (tok Token) Match(m *Matcher, node interface{}) (interface{}, bool) { } func (Nil) Match(m *Matcher, node interface{}) (interface{}, bool) { - return nil, isNil(node) || reflect.ValueOf(node).IsNil() + if isNil(node) { + return nil, true + } + v := reflect.ValueOf(node) + switch v.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Pointer, reflect.Slice: + return nil, v.IsNil() + default: + return nil, false + } } func (builtin Builtin) Match(m *Matcher, node interface{}) (interface{}, bool) { @@ -496,7 +564,7 @@ func (fn Symbol) Match(m *Matcher, node interface{}) (interface{}, bool) { switch idx := fun.(type) { case *ast.IndexExpr: fun = idx.X - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: fun = idx.X } @@ -540,10 +608,12 @@ func (fn Symbol) Match(m *Matcher, node interface{}) (interface{}, bool) { func (or Or) Match(m *Matcher, node interface{}) (interface{}, bool) { for _, opt := range or.Nodes { - mc := m.fork() - if ret, ok := match(mc, opt, node); ok { - m.merge(mc) + m.push() + if ret, ok := match(m, opt, node); ok { + m.merge() return ret, true + } else { + m.pop() } } return nil, false diff --git a/vendor/honnef.co/go/tools/pattern/parser.go b/vendor/honnef.co/go/tools/pattern/parser.go index 2529051d..ba089752 100644 --- a/vendor/honnef.co/go/tools/pattern/parser.go +++ b/vendor/honnef.co/go/tools/pattern/parser.go @@ -1,6 +1,7 @@ package pattern import ( + "errors" "fmt" "go/ast" "go/token" @@ -11,7 +12,10 @@ type Pattern struct { Root Node // Relevant contains instances of ast.Node that could potentially // initiate a successful match of the pattern. - Relevant []reflect.Type + Relevant map[reflect.Type]struct{} + + // Mapping from binding index to binding name + Bindings []string } func MustParse(s string) Pattern { @@ -23,27 +27,29 @@ func MustParse(s string) Pattern { return pat } -func roots(node Node) []reflect.Type { +func roots(node Node, m map[reflect.Type]struct{}) { switch node := node.(type) { case Or: - var out []reflect.Type for _, el := range node.Nodes { - out = append(out, roots(el)...) + roots(el, m) } - return out case Not: - return roots(node.Node) + roots(node.Node, m) case Binding: - return roots(node.Node) + roots(node.Node, m) case Nil, nil: // this branch is reached via bindings - return allTypes + for _, T := range allTypes { + m[T] = struct{}{} + } default: Ts, ok := nodeToASTTypes[reflect.TypeOf(node)] if !ok { panic(fmt.Sprintf("internal error: unhandled type %T", node)) } - return Ts + for _, T := range Ts { + m[T] = struct{}{} + } } } @@ -160,6 +166,20 @@ type Parser struct { cur item last *item items chan item + + bindings map[string]int +} + +func (p *Parser) bindingIndex(name string) int { + if p.bindings == nil { + p.bindings = map[string]int{} + } + if idx, ok := p.bindings[name]; ok { + return idx + } + idx := len(p.bindings) + p.bindings[name] = idx + return idx } func (p *Parser) Parse(s string) (Pattern, error) { @@ -185,9 +205,22 @@ func (p *Parser) Parse(s string) (Pattern, error) { if item := <-p.lex.items; item.typ != itemEOF { return Pattern{}, fmt.Errorf("unexpected token %s after end of pattern", item.typ) } + + if len(p.bindings) > 64 { + return Pattern{}, errors.New("encountered more than 64 bindings") + } + + bindings := make([]string, len(p.bindings)) + for name, idx := range p.bindings { + bindings[idx] = name + } + + relevant := map[reflect.Type]struct{}{} + roots(root, relevant) return Pattern{ Root: root, - Relevant: roots(root), + Relevant: relevant, + Bindings: bindings, }, nil } @@ -263,7 +296,14 @@ func (p *Parser) node() (Node, error) { } } - return p.populateNode(typ.val, objs) + node, err := p.populateNode(typ.val, objs) + if err != nil { + return nil, err + } + if node, ok := node.(Binding); ok { + node.idx = p.bindingIndex(node.Name) + } + return node, nil } func populateNode(typ string, objs []Node, allowTypeInfo bool) (Node, error) { @@ -287,10 +327,23 @@ func populateNode(typ string, objs []Node, allowTypeInfo bool) (Node, error) { return v.Interface().(Node), nil } } - if len(objs) != v.NumField() { - return nil, fmt.Errorf("tried to initialize node %s with %d values, expected %d", typ, len(objs), v.NumField()) + + n := -1 + for i := 0; i < T.NumField(); i++ { + if !T.Field(i).IsExported() { + break + } + n = i + } + + if len(objs) != n+1 { + return nil, fmt.Errorf("tried to initialize node %s with %d values, expected %d", typ, len(objs), n+1) } + for i := 0; i < v.NumField(); i++ { + if !T.Field(i).IsExported() { + break + } f := v.Field(i) if f.Kind() == reflect.String { if obj, ok := objs[i].(String); ok { @@ -399,10 +452,14 @@ func (p *Parser) object() (Node, error) { b = Binding{ Name: v.val, Node: o, + idx: p.bindingIndex(v.val), } } else { p.rewind() - b = Binding{Name: v.val} + b = Binding{ + Name: v.val, + idx: p.bindingIndex(v.val), + } } if p.peek().typ == itemColon { p.next() diff --git a/vendor/honnef.co/go/tools/pattern/pattern.go b/vendor/honnef.co/go/tools/pattern/pattern.go index fbbafdfa..15886b1f 100644 --- a/vendor/honnef.co/go/tools/pattern/pattern.go +++ b/vendor/honnef.co/go/tools/pattern/pattern.go @@ -245,6 +245,8 @@ type SendStmt struct { type Binding struct { Name string Node Node + + idx int } type RangeStmt struct { diff --git a/vendor/honnef.co/go/tools/printf/printf.go b/vendor/honnef.co/go/tools/printf/printf.go index 754db9b1..3ce4dc01 100644 --- a/vendor/honnef.co/go/tools/printf/printf.go +++ b/vendor/honnef.co/go/tools/printf/printf.go @@ -2,25 +2,26 @@ // strings. // // It parses verbs according to the following syntax: -// Numeric -> '0'-'9' -// Letter -> 'a'-'z' | 'A'-'Z' -// Index -> '[' Numeric+ ']' -// Star -> '*' -// Star -> Index '*' // -// Precision -> Numeric+ | Star -// Width -> Numeric+ | Star +// Numeric -> '0'-'9' +// Letter -> 'a'-'z' | 'A'-'Z' +// Index -> '[' Numeric+ ']' +// Star -> '*' +// Star -> Index '*' // -// WidthAndPrecision -> Width '.' Precision -// WidthAndPrecision -> Width '.' -// WidthAndPrecision -> Width -// WidthAndPrecision -> '.' Precision -// WidthAndPrecision -> '.' +// Precision -> Numeric+ | Star +// Width -> Numeric+ | Star // -// Flag -> '+' | '-' | '#' | ' ' | '0' -// Verb -> Letter | '%' +// WidthAndPrecision -> Width '.' Precision +// WidthAndPrecision -> Width '.' +// WidthAndPrecision -> Width +// WidthAndPrecision -> '.' Precision +// WidthAndPrecision -> '.' // -// Input -> '%' [ Flag+ ] [ WidthAndPrecision ] [ Index ] Verb +// Flag -> '+' | '-' | '#' | ' ' | '0' +// Verb -> Letter | '%' +// +// Input -> '%' [ Flag+ ] [ WidthAndPrecision ] [ Index ] Verb package printf import ( diff --git a/vendor/honnef.co/go/tools/simple/analysis.go b/vendor/honnef.co/go/tools/simple/analysis.go index a67a3209..04ac2f5e 100644 --- a/vendor/honnef.co/go/tools/simple/analysis.go +++ b/vendor/honnef.co/go/tools/simple/analysis.go @@ -4,6 +4,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "honnef.co/go/tools/analysis/facts/generated" + "honnef.co/go/tools/analysis/facts/purity" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/internal/passes/buildir" ) @@ -55,7 +56,7 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ }, "S1011": { Run: CheckLoopAppend, - Requires: []*analysis.Analyzer{inspect.Analyzer, generated.Analyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, generated.Analyzer, purity.Analyzer}, }, "S1012": { Run: CheckTimeSince, diff --git a/vendor/honnef.co/go/tools/simple/doc.go b/vendor/honnef.co/go/tools/simple/doc.go index 11cc70c3..421f8513 100644 --- a/vendor/honnef.co/go/tools/simple/doc.go +++ b/vendor/honnef.co/go/tools/simple/doc.go @@ -137,8 +137,21 @@ making \'s[n:len(s)]\' and \'s[n:]\' equivalent.`, Before: ` for _, e := range y { x = append(x, e) +} + +for i := range y { + x = append(x, y[i]) +} + +for i := range y { + v := y[i] + x = append(x, v) }`, - After: `x = append(x, y...)`, + + After: ` +x = append(x, y...) +x = append(x, y...) +x = append(x, y...)`, Since: "2017.1", // MergeIfAll because y might not be a slice under all build tags. MergeIf: lint.MergeIfAll, @@ -267,9 +280,9 @@ Given the following shared definitions type T1 string type T2 int - + func (T2) String() string { return "Hello, world" } - + var x string var y T1 var z T2 diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go index 0abc5aef..08426436 100644 --- a/vendor/honnef.co/go/tools/simple/lint.go +++ b/vendor/honnef.co/go/tools/simple/lint.go @@ -13,6 +13,7 @@ import ( "honnef.co/go/tools/analysis/code" "honnef.co/go/tools/analysis/edit" + "honnef.co/go/tools/analysis/facts/purity" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/go/ast/astutil" @@ -175,20 +176,20 @@ func CheckLoopCopy(pass *analysis.Pass) (interface{}, error) { report.ShortRange(), report.Fixes(edit.Fix("replace loop with assignment", edit.ReplaceWithNode(pass.Fset, node, r)))) } else { - opts := []report.Option{ - report.ShortRange(), - report.FilterGenerated(), - } tv, err := types.Eval(pass.Fset, pass.Pkg, node.Pos(), "copy") if err == nil && tv.IsBuiltin() { + to := "to" + from := "from" src := m.State["src"].(ast.Expr) if TsrcArray { + from = "from[:]" src = &ast.SliceExpr{ X: src, } } dst := m.State["dst"].(ast.Expr) if TdstArray { + to = "to[:]" dst = &ast.SliceExpr{ X: dst, } @@ -198,9 +199,13 @@ func CheckLoopCopy(pass *analysis.Pass) (interface{}, error) { Fun: &ast.Ident{Name: "copy"}, Args: []ast.Expr{dst, src}, } - opts = append(opts, report.Fixes(edit.Fix("replace loop with call to copy()", edit.ReplaceWithNode(pass.Fset, node, r)))) + opts := []report.Option{ + report.ShortRange(), + report.FilterGenerated(), + report.Fixes(edit.Fix("replace loop with call to copy()", edit.ReplaceWithNode(pass.Fset, node, r))), + } + report.Report(pass, node, fmt.Sprintf("should use copy(%s, %s) instead of a loop", to, from), opts...) } - report.Report(pass, node, "should use copy() instead of a loop", opts...) } } code.Preorder(pass, fn, (*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)) @@ -232,7 +237,7 @@ func CheckIfBoolCmp(pass *analysis.Pass) (interface{}, error) { other = expr.X } - ok := typeutil.All(pass.TypesInfo.TypeOf(other), func(term *typeparams.Term) bool { + ok := typeutil.All(pass.TypesInfo.TypeOf(other), func(term *types.Term) bool { basic, ok := term.Type().Underlying().(*types.Basic) return ok && basic.Kind() == types.Bool }) @@ -597,12 +602,11 @@ func negate(expr ast.Expr) ast.Expr { // CheckRedundantNilCheckWithLen checks for the following redundant nil-checks: // -// if x == nil || len(x) == 0 {} -// if x != nil && len(x) != 0 {} -// if x != nil && len(x) == N {} (where N != 0) -// if x != nil && len(x) > N {} -// if x != nil && len(x) >= N {} (where N != 0) -// +// if x == nil || len(x) == 0 {} +// if x != nil && len(x) != 0 {} +// if x != nil && len(x) == N {} (where N != 0) +// if x != nil && len(x) > N {} +// if x != nil && len(x) >= N {} (where N != 0) func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) @@ -705,7 +709,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array typ := pass.TypesInfo.TypeOf(xx) - ok = typeutil.All(typ, func(term *typeparams.Term) bool { + ok = typeutil.All(typ, func(term *types.Term) bool { switch term.Type().Underlying().(type) { case *types.Slice: return true @@ -715,7 +719,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { return true case *types.Pointer: return false - case *typeparams.TypeParam: + case *types.TypeParam: return false default: lint.ExhaustiveTypeSwitch(term.Type().Underlying()) @@ -766,22 +770,44 @@ func refersTo(pass *analysis.Pass, expr ast.Expr, ident types.Object) bool { } var checkLoopAppendQ = pattern.MustParse(` +(Or (RangeStmt (Ident "_") val@(Object _) _ x - [(AssignStmt [lhs] "=" [(CallExpr (Builtin "append") [lhs val])])]) `) + [(AssignStmt [lhs] "=" [(CallExpr (Builtin "append") [lhs val])])]) + (RangeStmt + idx@(Ident _) + nil + _ + x + [(AssignStmt [lhs] "=" [(CallExpr (Builtin "append") [lhs (IndexExpr x idx)])])]) + (RangeStmt + idx@(Ident _) + nil + _ + x + [(AssignStmt val@(Object _) ":=" (IndexExpr x idx)) + (AssignStmt [lhs] "=" [(CallExpr (Builtin "append") [lhs val])])]))`) func CheckLoopAppend(pass *analysis.Pass) (interface{}, error) { + pure := pass.ResultOf[purity.Analyzer].(purity.Result) + fn := func(node ast.Node) { m, ok := code.Match(pass, checkLoopAppendQ, node) if !ok { return } - val := m.State["val"].(types.Object) - if refersTo(pass, m.State["lhs"].(ast.Expr), val) { + val, ok := m.State["val"].(types.Object) + if ok && refersTo(pass, m.State["lhs"].(ast.Expr), val) { + return + } + + if m.State["idx"] != nil && code.MayHaveSideEffects(pass, m.State["x"].(ast.Expr), pure) { + // When using an index-based loop, x gets evaluated repeatedly and thus should be pure. + // This doesn't matter for value-based loops, because x only gets evaluated once. return } @@ -985,7 +1011,7 @@ func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { return } // All fields must be initialized from the same object - if ident != nil && ident.Obj != id.Obj { + if ident != nil && pass.TypesInfo.ObjectOf(ident) != pass.TypesInfo.ObjectOf(id) { return } typ2, _ = t.(*types.Named) @@ -1046,7 +1072,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { switch node1 := node1.(type) { case *ast.Ident: - return node1.Obj == node2.(*ast.Ident).Obj + return pass.TypesInfo.ObjectOf(node1) == pass.TypesInfo.ObjectOf(node2.(*ast.Ident)) case *ast.SelectorExpr, *ast.IndexExpr: return astutil.Equal(node1, node2) case *ast.BasicLit: @@ -1372,7 +1398,7 @@ func CheckDeclareAssign(pass *analysis.Pass) (interface{}, error) { } for _, lhs := range assign.Lhs { if oident, ok := lhs.(*ast.Ident); ok { - if oident.Obj == ident.Obj { + if pass.TypesInfo.ObjectOf(oident) == pass.TypesInfo.ObjectOf(ident) { num++ } } @@ -1413,7 +1439,7 @@ func CheckDeclareAssign(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if vspec.Names[0].Obj != ident.Obj { + if pass.TypesInfo.ObjectOf(vspec.Names[0]) != pass.TypesInfo.ObjectOf(ident) { continue } @@ -1618,11 +1644,11 @@ func CheckNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { if !ok { return } - ok = typeutil.All(m.State["x"].(types.Object).Type(), func(term *typeparams.Term) bool { + ok = typeutil.All(m.State["x"].(types.Object).Type(), func(term *types.Term) bool { switch term.Type().Underlying().(type) { case *types.Slice, *types.Map: return true - case *typeparams.TypeParam, *types.Chan, *types.Pointer: + case *types.TypeParam, *types.Chan, *types.Pointer: return false default: lint.ExhaustiveTypeSwitch(term.Type().Underlying()) diff --git a/vendor/honnef.co/go/tools/staticcheck/analysis.go b/vendor/honnef.co/go/tools/staticcheck/analysis.go index 171467a8..210c348c 100644 --- a/vendor/honnef.co/go/tools/staticcheck/analysis.go +++ b/vendor/honnef.co/go/tools/staticcheck/analysis.go @@ -174,7 +174,7 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Requires: []*analysis.Analyzer{inspect.Analyzer, tokenfile.Analyzer}, }, "SA4017": { - Run: CheckPureFunctions, + Run: CheckSideEffectFreeCalls, Requires: []*analysis.Analyzer{buildir.Analyzer, purity.Analyzer}, }, "SA4018": { diff --git a/vendor/honnef.co/go/tools/staticcheck/doc.go b/vendor/honnef.co/go/tools/staticcheck/doc.go index 3e991b6c..c28fdbf0 100644 --- a/vendor/honnef.co/go/tools/staticcheck/doc.go +++ b/vendor/honnef.co/go/tools/staticcheck/doc.go @@ -506,7 +506,7 @@ falsify results.`, }, "SA4017": { - Title: `A pure function's return value is discarded, making the call pointless`, + Title: `Discarding the return values of a function without side effects, making the call pointless`, Since: "2017.1", Severity: lint.SeverityWarning, MergeIf: lint.MergeIfAll, @@ -1286,7 +1286,7 @@ the \'else\' branch. This means that in the following example if x, ok := x.(int); ok { // ... } else { - fmt.Println("unexpected type %T", x) + fmt.Printf("unexpected type %T", x) } \'x\' in the \'else\' branch will refer to the \'x\' from \'x, ok diff --git a/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go index 15e302de..f65f2ddf 100644 --- a/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go +++ b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go @@ -46,18 +46,18 @@ type UnsupportedTypeError struct { type encoder struct { // TODO we track addressable and non-addressable instances separately out of an abundance of caution. We don't know // if this is actually required for correctness. - seenCanAddr typeutil.Map - seenCantAddr typeutil.Map + seenCanAddr typeutil.Map[struct{}] + seenCantAddr typeutil.Map[struct{}] } func (enc *encoder) newTypeEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { - var m *typeutil.Map + var m *typeutil.Map[struct{}] if t.CanAddr() { m = &enc.seenCanAddr } else { m = &enc.seenCantAddr } - if ok := m.At(t.Type); ok != nil { + if _, ok := m.At(t.Type); ok { return nil } m.Set(t.Type, struct{}{}) diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go index e592bfca..64fce5f5 100644 --- a/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go @@ -28,8 +28,8 @@ func Marshal(v types.Type) error { type Encoder struct { // TODO we track addressable and non-addressable instances separately out of an abundance of caution. We don't know // if this is actually required for correctness. - seenCanAddr typeutil.Map - seenCantAddr typeutil.Map + seenCanAddr typeutil.Map[struct{}] + seenCantAddr typeutil.Map[struct{}] } func NewEncoder() *Encoder { @@ -114,13 +114,13 @@ func (err *CyclicTypeError) Error() string { // marshalValue writes one or more XML elements representing val. // If val was obtained from a struct field, finfo must have its details. func (e *Encoder) marshalValue(val fakereflect.TypeAndCanAddr, finfo *fieldInfo, startTemplate *StartElement, stack string) error { - var m *typeutil.Map + var m *typeutil.Map[struct{}] if val.CanAddr() { m = &e.seenCanAddr } else { m = &e.seenCantAddr } - if ok := m.At(val.Type); ok != nil { + if _, ok := m.At(val.Type); ok { return nil } m.Set(val.Type, struct{}{}) diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go index 3dd0fece..fb911f33 100644 --- a/vendor/honnef.co/go/tools/staticcheck/lint.go +++ b/vendor/honnef.co/go/tools/staticcheck/lint.go @@ -524,13 +524,13 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { return true } - var seen typeutil.Map + var seen typeutil.Map[struct{}] var checkType func(verb rune, T types.Type, top bool) bool checkType = func(verb rune, T types.Type, top bool) bool { if top { - seen = typeutil.Map{} + seen = typeutil.Map[struct{}]{} } - if ok := seen.At(T); ok != nil { + if _, ok := seen.At(T); ok { return true } seen.Set(T, struct{}{}) @@ -1296,7 +1296,7 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { // no terms, so floats are a possibility return true } - return tset.Any(func(term *typeparams.Term) bool { + return tset.Any(func(term *types.Term) bool { switch typ := term.Type().Underlying().(type) { case *types.Basic: kind := typ.Kind() @@ -1554,7 +1554,7 @@ func CheckEarlyDefer(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if ident.Obj != lhs.Obj { + if pass.TypesInfo.ObjectOf(ident) != pass.TypesInfo.ObjectOf(lhs) { continue } if sel.Sel.Name != "Close" { @@ -2085,7 +2085,7 @@ func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) { if !ok { return true } - if x.Obj != lhs.Obj { + if pass.TypesInfo.ObjectOf(x) != pass.TypesInfo.ObjectOf(lhs) { return true } if _, ok := loop.Post.(*ast.IncDecStmt); !ok { @@ -2225,13 +2225,13 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { if body == nil { return } - labels := map[*ast.Object]ast.Stmt{} + labels := map[types.Object]ast.Stmt{} ast.Inspect(body, func(node ast.Node) bool { label, ok := node.(*ast.LabeledStmt) if !ok { return true } - labels[label.Label.Obj] = label.Stmt + labels[pass.TypesInfo.ObjectOf(label.Label)] = label.Stmt return true }) @@ -2243,7 +2243,7 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { body = node.Body loop = node case *ast.RangeStmt: - ok := typeutil.All(pass.TypesInfo.TypeOf(node.X), func(term *typeparams.Term) bool { + ok := typeutil.All(pass.TypesInfo.TypeOf(node.X), func(term *types.Term) bool { switch term.Type().Underlying().(type) { case *types.Slice, *types.Chan, *types.Basic, *types.Pointer, *types.Array: return true @@ -2283,11 +2283,11 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { case *ast.BranchStmt: switch stmt.Tok { case token.BREAK: - if stmt.Label == nil || labels[stmt.Label.Obj] == loop { + if stmt.Label == nil || labels[pass.TypesInfo.ObjectOf(stmt.Label)] == loop { unconditionalExit = stmt } case token.CONTINUE: - if stmt.Label == nil || labels[stmt.Label.Obj] == loop { + if stmt.Label == nil || labels[pass.TypesInfo.ObjectOf(stmt.Label)] == loop { unconditionalExit = nil return false } @@ -2309,7 +2309,7 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { unconditionalExit = nil return false case token.CONTINUE: - if branch.Label != nil && labels[branch.Label.Obj] != loop { + if branch.Label != nil && labels[pass.TypesInfo.ObjectOf(branch.Label)] != loop { return true } unconditionalExit = nil @@ -2899,7 +2899,7 @@ func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) { func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { binop := node.(*ast.BinaryExpr) - if !typeutil.All(pass.TypesInfo.TypeOf(binop), func(term *typeparams.Term) bool { + if !typeutil.All(pass.TypesInfo.TypeOf(binop), func(term *types.Term) bool { b, ok := term.Type().Underlying().(*types.Basic) if !ok { return false @@ -3009,7 +3009,7 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func CheckPureFunctions(pass *analysis.Pass) (interface{}, error) { +func CheckSideEffectFreeCalls(pass *analysis.Pass) (interface{}, error) { pure := pass.ResultOf[purity.Analyzer].(purity.Result) fnLoop: @@ -3055,7 +3055,7 @@ fnLoop: // special case for benchmarks in the fmt package continue } - report.Report(pass, ins, fmt.Sprintf("%s is a pure function but its return value is ignored", callee.Object().Name())) + report.Report(pass, ins, fmt.Sprintf("%s doesn't have side effects and its return value is ignored", callee.Object().Name())) } } } @@ -3095,32 +3095,25 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { return } if ok { - switch std.AlternativeAvailableSince { - case knowledge.DeprecatedNeverUse: - // This should never be used, regardless of the - // targeted Go version. Examples include insecure - // cryptography or inherently broken APIs. - // - // We always want to flag these. - case knowledge.DeprecatedUseNoLonger: - // This should no longer be used. Using it with - // older Go versions might still make sense. - if !code.IsGoVersion(pass, std.DeprecatedSince) { - return - } - default: - if std.AlternativeAvailableSince < 0 { - panic(fmt.Sprintf("unhandled case %d", std.AlternativeAvailableSince)) - } - // Look for the first available alternative, not the first - // version something was deprecated in. If a function was - // deprecated in Go 1.6, an alternative has been available - // already in 1.0, and we're targeting 1.2, it still - // makes sense to use the alternative from 1.0, to be - // future-proof. - if !code.IsGoVersion(pass, std.AlternativeAvailableSince) { - return - } + // In the past, we made use of the AlternativeAvailableSince field. If a function was deprecated in Go + // 1.6 and an alternative had been available in Go 1.0, then we'd recommend using the alternative even + // if targeting Go 1.2. The idea was to suggest writing future-proof code by using already-existing + // alternatives. This had a major flaw, however: the user would need to use at least Go 1.6 for + // Staticcheck to know that the function had been deprecated. Thus, targeting Go 1.2 and using Go 1.2 + // would behave differently from targeting Go 1.2 and using Go 1.6. This is especially a problem if the + // user tries to ignore the warning. Depending on the Go version in use, the ignore directive may or may + // not match, causing a warning of its own. + // + // To avoid this issue, we no longer try to be smart. We now only compare the targeted version against + // the version that deprecated an object. + // + // Unfortunately, this issue also applies to AlternativeAvailableSince == DeprecatedNeverUse. Even though it + // is only applied to seriously flawed API, such as broken cryptography, users may wish to ignore those + // warnings. + // + // See also https://staticcheck.io/issues/1318. + if !code.IsGoVersion(pass, std.DeprecatedSince) { + return } } @@ -3166,6 +3159,8 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { if fn, ok := node.(*ast.FuncDecl); ok { tfn = pass.TypesInfo.ObjectOf(fn.Name) } + + // FIXME(dh): this misses dot-imported objects sel, ok := node.(*ast.SelectorExpr) if !ok { return true @@ -3178,8 +3173,26 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { if obj.Pkg() == nil { return true } - if pass.Pkg == obj.Pkg() || obj.Pkg().Path()+"_test" == pass.Pkg.Path() { - // Don't flag stuff in our own package + + if obj.Pkg() == pass.Pkg { + // A package is allowed to use its own deprecated objects + return true + } + + // A package "foo" has two related packages "foo_test" and "foo.test", for external tests and the package main + // generated by 'go test' respectively. "foo_test" can import and use "foo", "foo.test" imports and uses "foo" + // and "foo_test". + + if strings.TrimSuffix(pass.Pkg.Path(), "_test") == obj.Pkg().Path() { + // foo_test (the external tests of foo) can use objects from foo. + return true + } + if strings.TrimSuffix(pass.Pkg.Path(), ".test") == obj.Pkg().Path() { + // foo.test (the main package of foo's tests) can use objects from foo. + return true + } + if strings.TrimSuffix(pass.Pkg.Path(), ".test") == strings.TrimSuffix(obj.Pkg().Path(), "_test") { + // foo.test (the main package of foo's tests) can use objects from foo's external tests. return true } @@ -3208,6 +3221,19 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { } } + if strings.TrimSuffix(pass.Pkg.Path(), "_test") == path { + // foo_test can import foo + return + } + if strings.TrimSuffix(pass.Pkg.Path(), ".test") == path { + // foo.test can import foo + return + } + if strings.TrimSuffix(pass.Pkg.Path(), ".test") == strings.TrimSuffix(path, "_test") { + // foo.test can import foo_test + return + } + handleDeprecation(depr, spec.Path, path, path, nil) } } @@ -3421,7 +3447,7 @@ func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) { } tset := typeutil.NewTypeSet(conv.X.Type()) // If at least one of the types is []byte, then it's more efficient to inline the conversion - if !tset.Any(func(term *typeparams.Term) bool { + if !tset.Any(func(term *types.Term) bool { s, ok := term.Type().Underlying().(*types.Slice) return ok && s.Elem().Underlying() == types.Universe.Lookup("byte").Type() }) { @@ -3996,12 +4022,12 @@ func CheckImpossibleTypeAssertion(pass *analysis.Pass) (interface{}, error) { ms := msc.MethodSet(left) for i := 0; i < righti.NumMethods(); i++ { - mr := righti.Method(i) + mr := righti.Method(i).Origin() sel := ms.Lookup(mr.Pkg(), mr.Name()) if sel == nil { continue } - ml := sel.Obj().(*types.Func) + ml := sel.Obj().(*types.Func).Origin() if types.AssignableTo(ml.Type(), mr.Type()) { continue } @@ -4135,7 +4161,7 @@ func CheckMaybeNil(pass *analysis.Pass) (interface{}, error) { ptr = instr.Addr case *ir.IndexAddr: ptr = instr.X - if typeutil.All(ptr.Type(), func(term *typeparams.Term) bool { + if typeutil.All(ptr.Type(), func(term *types.Term) bool { if _, ok := term.Type().Underlying().(*types.Slice); ok { return true } diff --git a/vendor/honnef.co/go/tools/stylecheck/lint.go b/vendor/honnef.co/go/tools/stylecheck/lint.go index 4203c610..a15e6d97 100644 --- a/vendor/honnef.co/go/tools/stylecheck/lint.go +++ b/vendor/honnef.co/go/tools/stylecheck/lint.go @@ -24,7 +24,6 @@ import ( "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/pattern" - "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" @@ -417,9 +416,9 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { continue } for _, c := range word[n:] { - if unicode.IsUpper(c) { - // Word is probably an initialism or - // multi-word function name + if unicode.IsUpper(c) || unicode.IsDigit(c) { + // Word is probably an initialism or multi-word function name. Digits cover elliptic curves like + // P384. continue instrLoop } } @@ -846,7 +845,7 @@ func CheckExportedFunctionDocs(pass *analysis.Pass) (interface{}, error) { switch T := T.(type) { case *ast.IndexExpr: ident = T.X.(*ast.Ident) - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: ident = T.X.(*ast.Ident) case *ast.Ident: ident = T diff --git a/vendor/honnef.co/go/tools/unused/edge.go b/vendor/honnef.co/go/tools/unused/edge.go deleted file mode 100644 index 6d32946d..00000000 --- a/vendor/honnef.co/go/tools/unused/edge.go +++ /dev/null @@ -1,59 +0,0 @@ -package unused - -//go:generate go run golang.org/x/tools/cmd/stringer@master -type edgeKind -type edgeKind uint64 - -func (e edgeKind) is(o edgeKind) bool { - return e&o != 0 -} - -const ( - edgeAlias edgeKind = 1 << iota - edgeBlankField - edgeAnonymousStruct - edgeCgoExported - edgeConstGroup - edgeElementType - edgeEmbeddedInterface - edgeExportedConstant - edgeExportedField - edgeExportedFunction - edgeExportedMethod - edgeExportedType - edgeExportedVariable - edgeExtendsExportedFields - edgeExtendsExportedMethodSet - edgeFieldAccess - edgeFunctionArgument - edgeFunctionResult - edgeFunctionSignature - edgeImplements - edgeInstructionOperand - edgeInterfaceCall - edgeInterfaceMethod - edgeKeyType - edgeLinkname - edgeMainFunction - edgeNamedType - edgeNetRPCRegister - edgeNoCopySentinel - edgeProvidesMethod - edgeReceiver - edgeRuntimeFunction - edgeSignature - edgeStructConversion - edgeTestSink - edgeTupleElement - edgeType - edgeTypeName - edgeUnderlyingType - edgePointerType - edgeUnsafeConversion - edgeUsedConstant - edgeVarDecl - edgeIgnored - edgeSamePointer - edgeTypeParam - edgeTypeArg - edgeUnionTerm -) diff --git a/vendor/honnef.co/go/tools/unused/edgekind_string.go b/vendor/honnef.co/go/tools/unused/edgekind_string.go deleted file mode 100644 index ae27b250..00000000 --- a/vendor/honnef.co/go/tools/unused/edgekind_string.go +++ /dev/null @@ -1,119 +0,0 @@ -// Code generated by "stringer -type edgeKind"; DO NOT EDIT. - -package unused - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[edgeAlias-1] - _ = x[edgeBlankField-2] - _ = x[edgeAnonymousStruct-4] - _ = x[edgeCgoExported-8] - _ = x[edgeConstGroup-16] - _ = x[edgeElementType-32] - _ = x[edgeEmbeddedInterface-64] - _ = x[edgeExportedConstant-128] - _ = x[edgeExportedField-256] - _ = x[edgeExportedFunction-512] - _ = x[edgeExportedMethod-1024] - _ = x[edgeExportedType-2048] - _ = x[edgeExportedVariable-4096] - _ = x[edgeExtendsExportedFields-8192] - _ = x[edgeExtendsExportedMethodSet-16384] - _ = x[edgeFieldAccess-32768] - _ = x[edgeFunctionArgument-65536] - _ = x[edgeFunctionResult-131072] - _ = x[edgeFunctionSignature-262144] - _ = x[edgeImplements-524288] - _ = x[edgeInstructionOperand-1048576] - _ = x[edgeInterfaceCall-2097152] - _ = x[edgeInterfaceMethod-4194304] - _ = x[edgeKeyType-8388608] - _ = x[edgeLinkname-16777216] - _ = x[edgeMainFunction-33554432] - _ = x[edgeNamedType-67108864] - _ = x[edgeNetRPCRegister-134217728] - _ = x[edgeNoCopySentinel-268435456] - _ = x[edgeProvidesMethod-536870912] - _ = x[edgeReceiver-1073741824] - _ = x[edgeRuntimeFunction-2147483648] - _ = x[edgeSignature-4294967296] - _ = x[edgeStructConversion-8589934592] - _ = x[edgeTestSink-17179869184] - _ = x[edgeTupleElement-34359738368] - _ = x[edgeType-68719476736] - _ = x[edgeTypeName-137438953472] - _ = x[edgeUnderlyingType-274877906944] - _ = x[edgePointerType-549755813888] - _ = x[edgeUnsafeConversion-1099511627776] - _ = x[edgeUsedConstant-2199023255552] - _ = x[edgeVarDecl-4398046511104] - _ = x[edgeIgnored-8796093022208] - _ = x[edgeSamePointer-17592186044416] - _ = x[edgeTypeParam-35184372088832] - _ = x[edgeTypeArg-70368744177664] - _ = x[edgeUnionTerm-140737488355328] -} - -const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecledgeIgnorededgeSamePointeredgeTypeParamedgeTypeArgedgeUnionTerm" - -var _edgeKind_map = map[edgeKind]string{ - 1: _edgeKind_name[0:9], - 2: _edgeKind_name[9:23], - 4: _edgeKind_name[23:42], - 8: _edgeKind_name[42:57], - 16: _edgeKind_name[57:71], - 32: _edgeKind_name[71:86], - 64: _edgeKind_name[86:107], - 128: _edgeKind_name[107:127], - 256: _edgeKind_name[127:144], - 512: _edgeKind_name[144:164], - 1024: _edgeKind_name[164:182], - 2048: _edgeKind_name[182:198], - 4096: _edgeKind_name[198:218], - 8192: _edgeKind_name[218:243], - 16384: _edgeKind_name[243:271], - 32768: _edgeKind_name[271:286], - 65536: _edgeKind_name[286:306], - 131072: _edgeKind_name[306:324], - 262144: _edgeKind_name[324:345], - 524288: _edgeKind_name[345:359], - 1048576: _edgeKind_name[359:381], - 2097152: _edgeKind_name[381:398], - 4194304: _edgeKind_name[398:417], - 8388608: _edgeKind_name[417:428], - 16777216: _edgeKind_name[428:440], - 33554432: _edgeKind_name[440:456], - 67108864: _edgeKind_name[456:469], - 134217728: _edgeKind_name[469:487], - 268435456: _edgeKind_name[487:505], - 536870912: _edgeKind_name[505:523], - 1073741824: _edgeKind_name[523:535], - 2147483648: _edgeKind_name[535:554], - 4294967296: _edgeKind_name[554:567], - 8589934592: _edgeKind_name[567:587], - 17179869184: _edgeKind_name[587:599], - 34359738368: _edgeKind_name[599:615], - 68719476736: _edgeKind_name[615:623], - 137438953472: _edgeKind_name[623:635], - 274877906944: _edgeKind_name[635:653], - 549755813888: _edgeKind_name[653:668], - 1099511627776: _edgeKind_name[668:688], - 2199023255552: _edgeKind_name[688:704], - 4398046511104: _edgeKind_name[704:715], - 8796093022208: _edgeKind_name[715:726], - 17592186044416: _edgeKind_name[726:741], - 35184372088832: _edgeKind_name[741:754], - 70368744177664: _edgeKind_name[754:765], - 140737488355328: _edgeKind_name[765:778], -} - -func (i edgeKind) String() string { - if str, ok := _edgeKind_map[i]; ok { - return str - } - return "edgeKind(" + strconv.FormatInt(int64(i), 10) + ")" -} diff --git a/vendor/honnef.co/go/tools/unused/implements.go b/vendor/honnef.co/go/tools/unused/implements.go index f6201857..2a202c6d 100644 --- a/vendor/honnef.co/go/tools/unused/implements.go +++ b/vendor/honnef.co/go/tools/unused/implements.go @@ -37,7 +37,7 @@ func sameId(obj types.Object, pkg *types.Package, name string) bool { return pkg.Path() == obj.Pkg().Path() } -func (g *graph) implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { +func implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { // fast path for common case if T.Empty() { return nil, true diff --git a/vendor/honnef.co/go/tools/unused/runtime.go b/vendor/honnef.co/go/tools/unused/runtime.go new file mode 100644 index 00000000..11be4a34 --- /dev/null +++ b/vendor/honnef.co/go/tools/unused/runtime.go @@ -0,0 +1,331 @@ +package unused + +// Functions defined in the Go runtime that may be called through +// compiler magic or via assembly. +var runtimeFuncs = map[string]bool{ + // Copied from cmd/compile/internal/typecheck/builtin.go, var runtimeDecls + "newobject": true, + "panicindex": true, + "panicslice": true, + "panicdivide": true, + "panicmakeslicelen": true, + "throwinit": true, + "panicwrap": true, + "gopanic": true, + "gorecover": true, + "goschedguarded": true, + "printbool": true, + "printfloat": true, + "printint": true, + "printhex": true, + "printuint": true, + "printcomplex": true, + "printstring": true, + "printpointer": true, + "printiface": true, + "printeface": true, + "printslice": true, + "printnl": true, + "printsp": true, + "printlock": true, + "printunlock": true, + "concatstring2": true, + "concatstring3": true, + "concatstring4": true, + "concatstring5": true, + "concatstrings": true, + "cmpstring": true, + "intstring": true, + "slicebytetostring": true, + "slicebytetostringtmp": true, + "slicerunetostring": true, + "stringtoslicebyte": true, + "stringtoslicerune": true, + "slicecopy": true, + "slicestringcopy": true, + "decoderune": true, + "countrunes": true, + "convI2I": true, + "convT16": true, + "convT32": true, + "convT64": true, + "convTstring": true, + "convTslice": true, + "convT2E": true, + "convT2Enoptr": true, + "convT2I": true, + "convT2Inoptr": true, + "assertE2I": true, + "assertE2I2": true, + "assertI2I": true, + "assertI2I2": true, + "panicdottypeE": true, + "panicdottypeI": true, + "panicnildottype": true, + "ifaceeq": true, + "efaceeq": true, + "fastrand": true, + "makemap64": true, + "makemap": true, + "makemap_small": true, + "mapaccess1": true, + "mapaccess1_fast32": true, + "mapaccess1_fast64": true, + "mapaccess1_faststr": true, + "mapaccess1_fat": true, + "mapaccess2": true, + "mapaccess2_fast32": true, + "mapaccess2_fast64": true, + "mapaccess2_faststr": true, + "mapaccess2_fat": true, + "mapassign": true, + "mapassign_fast32": true, + "mapassign_fast32ptr": true, + "mapassign_fast64": true, + "mapassign_fast64ptr": true, + "mapassign_faststr": true, + "mapiterinit": true, + "mapdelete": true, + "mapdelete_fast32": true, + "mapdelete_fast64": true, + "mapdelete_faststr": true, + "mapiternext": true, + "mapclear": true, + "makechan64": true, + "makechan": true, + "chanrecv1": true, + "chanrecv2": true, + "chansend1": true, + "closechan": true, + "writeBarrier": true, + "typedmemmove": true, + "typedmemclr": true, + "typedslicecopy": true, + "selectnbsend": true, + "selectnbrecv": true, + "selectnbrecv2": true, + "selectsetpc": true, + "selectgo": true, + "block": true, + "makeslice": true, + "makeslice64": true, + "growslice": true, + "memmove": true, + "memclrNoHeapPointers": true, + "memclrHasPointers": true, + "memequal": true, + "memequal8": true, + "memequal16": true, + "memequal32": true, + "memequal64": true, + "memequal128": true, + "int64div": true, + "uint64div": true, + "int64mod": true, + "uint64mod": true, + "float64toint64": true, + "float64touint64": true, + "float64touint32": true, + "int64tofloat64": true, + "uint64tofloat64": true, + "uint32tofloat64": true, + "complex128div": true, + "racefuncenter": true, + "racefuncenterfp": true, + "racefuncexit": true, + "raceread": true, + "racewrite": true, + "racereadrange": true, + "racewriterange": true, + "msanread": true, + "msanwrite": true, + "x86HasPOPCNT": true, + "x86HasSSE41": true, + "arm64HasATOMICS": true, + "mallocgc": true, + "panicshift": true, + "panicmakeslicecap": true, + "goPanicIndex": true, + "goPanicIndexU": true, + "goPanicSliceAlen": true, + "goPanicSliceAlenU": true, + "goPanicSliceAcap": true, + "goPanicSliceAcapU": true, + "goPanicSliceB": true, + "goPanicSliceBU": true, + "goPanicSlice3Alen": true, + "goPanicSlice3AlenU": true, + "goPanicSlice3Acap": true, + "goPanicSlice3AcapU": true, + "goPanicSlice3B": true, + "goPanicSlice3BU": true, + "goPanicSlice3C": true, + "goPanicSlice3CU": true, + "goPanicSliceConvert": true, + "printuintptr": true, + "convT": true, + "convTnoptr": true, + "makeslicecopy": true, + "unsafeslicecheckptr": true, + "panicunsafeslicelen": true, + "panicunsafeslicenilptr": true, + "unsafestringcheckptr": true, + "panicunsafestringlen": true, + "panicunsafestringnilptr": true, + "mulUintptr": true, + "memequal0": true, + "f32equal": true, + "f64equal": true, + "c64equal": true, + "c128equal": true, + "strequal": true, + "interequal": true, + "nilinterequal": true, + "memhash": true, + "memhash0": true, + "memhash8": true, + "memhash16": true, + "memhash32": true, + "memhash64": true, + "memhash128": true, + "f32hash": true, + "f64hash": true, + "c64hash": true, + "c128hash": true, + "strhash": true, + "interhash": true, + "nilinterhash": true, + "int64tofloat32": true, + "uint64tofloat32": true, + "getcallerpc": true, + "getcallersp": true, + "msanmove": true, + "asanread": true, + "asanwrite": true, + "checkptrAlignment": true, + "checkptrArithmetic": true, + "libfuzzerTraceCmp1": true, + "libfuzzerTraceCmp2": true, + "libfuzzerTraceCmp4": true, + "libfuzzerTraceCmp8": true, + "libfuzzerTraceConstCmp1": true, + "libfuzzerTraceConstCmp2": true, + "libfuzzerTraceConstCmp4": true, + "libfuzzerTraceConstCmp8": true, + "libfuzzerHookStrCmp": true, + "libfuzzerHookEqualFold": true, + "addCovMeta": true, + "x86HasFMA": true, + "armHasVFPv4": true, + + // Extracted from assembly code in the standard library, with the exception of the runtime package itself + "abort": true, + "aeshashbody": true, + "args": true, + "asminit": true, + "badctxt": true, + "badmcall2": true, + "badmcall": true, + "badmorestackg0": true, + "badmorestackgsignal": true, + "badsignal2": true, + "callbackasm1": true, + "callCfunction": true, + "cgocallback_gofunc": true, + "cgocallbackg": true, + "checkgoarm": true, + "check": true, + "debugCallCheck": true, + "debugCallWrap": true, + "emptyfunc": true, + "entersyscall": true, + "exit": true, + "exits": true, + "exitsyscall": true, + "externalthreadhandler": true, + "findnull": true, + "goexit1": true, + "gostring": true, + "i386_set_ldt": true, + "_initcgo": true, + "init_thread_tls": true, + "ldt0setup": true, + "libpreinit": true, + "load_g": true, + "morestack": true, + "mstart": true, + "nacl_sysinfo": true, + "nanotimeQPC": true, + "nanotime": true, + "newosproc0": true, + "newproc": true, + "newstack": true, + "noted": true, + "nowQPC": true, + "osinit": true, + "printf": true, + "racecallback": true, + "reflectcallmove": true, + "reginit": true, + "rt0_go": true, + "save_g": true, + "schedinit": true, + "setldt": true, + "settls": true, + "sighandler": true, + "sigprofNonGo": true, + "sigtrampgo": true, + "_sigtramp": true, + "sigtramp": true, + "stackcheck": true, + "syscall_chdir": true, + "syscall_chroot": true, + "syscall_close": true, + "syscall_dup2": true, + "syscall_execve": true, + "syscall_exit": true, + "syscall_fcntl": true, + "syscall_forkx": true, + "syscall_gethostname": true, + "syscall_getpid": true, + "syscall_ioctl": true, + "syscall_pipe": true, + "syscall_rawsyscall6": true, + "syscall_rawSyscall6": true, + "syscall_rawsyscall": true, + "syscall_RawSyscall": true, + "syscall_rawsysvicall6": true, + "syscall_setgid": true, + "syscall_setgroups": true, + "syscall_setpgid": true, + "syscall_setsid": true, + "syscall_setuid": true, + "syscall_syscall6": true, + "syscall_syscall": true, + "syscall_Syscall": true, + "syscall_sysvicall6": true, + "syscall_wait4": true, + "syscall_write": true, + "traceback": true, + "tstart": true, + "usplitR0": true, + "wbBufFlush": true, + "write": true, + + // Other runtime functions that can get called in non-standard ways + "bgsweep": true, + "memhash_varlen": true, + "strhashFallback": true, + "asanregisterglobals": true, + "cgoUse": true, + "cgoCheckPointer": true, + "cgoCheckResult": true, + "_cgo_panic_internal": true, + "addExitHook": true, +} + +var runtimeCoverageFuncs = map[string]bool{ + "initHook": true, + "markProfileEmitted": true, + "processCoverTestDir": true, +} diff --git a/vendor/honnef.co/go/tools/unused/serialize.go b/vendor/honnef.co/go/tools/unused/serialize.go new file mode 100644 index 00000000..126e7400 --- /dev/null +++ b/vendor/honnef.co/go/tools/unused/serialize.go @@ -0,0 +1,99 @@ +package unused + +import ( + "fmt" + "go/token" + "os" + + "golang.org/x/tools/go/types/objectpath" +) + +type ObjectPath struct { + PkgPath string + ObjPath objectpath.Path +} + +// XXX make sure that node 0 always exists and is always the root + +type SerializedGraph struct { + nodes []Node + nodesByPath map[ObjectPath]NodeID + // XXX deduplicating on position is dubious for `switch x := foo.(type)`, where x will be declared many times for + // the different types, but all at the same position. On the other hand, merging these nodes is probably fine. + nodesByPosition map[token.Position]NodeID +} + +func trace(f string, args ...interface{}) { + fmt.Fprintf(os.Stderr, f, args...) + fmt.Fprintln(os.Stderr) +} + +func (g *SerializedGraph) Merge(nodes []Node) { + if g.nodesByPath == nil { + g.nodesByPath = map[ObjectPath]NodeID{} + } + if g.nodesByPosition == nil { + g.nodesByPosition = map[token.Position]NodeID{} + } + if len(g.nodes) == 0 { + // Seed nodes with a root node + g.nodes = append(g.nodes, Node{}) + } + // OPT(dh): reuse storage between calls to Merge + remapping := make([]NodeID, len(nodes)) + + // First pass: compute remapping of IDs of to-be-merged nodes + for _, n := range nodes { + // XXX Column is never 0. it's 1 if there is no column information in the export data. which sucks, because + // objects can also genuinely be in column 1. + if n.id != 0 && n.obj.Path == (ObjectPath{}) && n.obj.Position.Column == 0 { + // If the object has no path, then it couldn't have come from export data, which means it needs to have full + // position information including a column. + panic(fmt.Sprintf("object %q has no path but also no column information", n.obj.Name)) + } + + if orig, ok := g.nodesByPath[n.obj.Path]; ok { + // We already have a node for this object + trace("deduplicating %d -> %d based on path %s", n.id, orig, n.obj.Path) + remapping[n.id] = orig + } else if orig, ok := g.nodesByPosition[n.obj.Position]; ok && n.obj.Position.Column != 0 { + // We already have a node for this object + trace("deduplicating %d -> %d based on position %s", n.id, orig, n.obj.Position) + remapping[n.id] = orig + } else { + // This object is new to us; change ID to avoid collision + newID := NodeID(len(g.nodes)) + trace("new node, remapping %d -> %d", n.id, newID) + remapping[n.id] = newID + g.nodes = append(g.nodes, Node{ + id: newID, + obj: n.obj, + uses: make([]NodeID, 0, len(n.uses)), + owns: make([]NodeID, 0, len(n.owns)), + }) + if n.id == 0 { + // Our root uses all the roots of the subgraphs + g.nodes[0].uses = append(g.nodes[0].uses, newID) + } + if n.obj.Path != (ObjectPath{}) { + g.nodesByPath[n.obj.Path] = newID + } + if n.obj.Position.Column != 0 { + g.nodesByPosition[n.obj.Position] = newID + } + } + } + + // Second step: apply remapping + for _, n := range nodes { + n.id = remapping[n.id] + for i := range n.uses { + n.uses[i] = remapping[n.uses[i]] + } + for i := range n.owns { + n.owns[i] = remapping[n.owns[i]] + } + g.nodes[n.id].uses = append(g.nodes[n.id].uses, n.uses...) + g.nodes[n.id].owns = append(g.nodes[n.id].owns, n.owns...) + } +} diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go index f703ed3d..614d5675 100644 --- a/vendor/honnef.co/go/tools/unused/unused.go +++ b/vendor/honnef.co/go/tools/unused/unused.go @@ -1,8 +1,6 @@ // Package unused contains code for finding unused code. package unused -// TODO(dh): don't add instantiated types/methods to the graph. add the origin types/methods. - import ( "fmt" "go/ast" @@ -12,66 +10,61 @@ import ( "reflect" "strings" - "honnef.co/go/tools/analysis/code" "honnef.co/go/tools/analysis/facts/directives" "honnef.co/go/tools/analysis/facts/generated" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/go/ast/astutil" - "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/types/typeutil" - "honnef.co/go/tools/internal/passes/buildir" "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/types/objectpath" ) -var Debug io.Writer +// OPT(dh): don't track local variables that can't have any interesting outgoing edges. For example, using a local +// variable of type int is meaningless; we don't care if `int` is used or not. +// +// Note that we do have to track variables with for example array types, because the array type could have involved a +// named constant. +// +// We probably have different culling needs depending on the mode of operation, too. If we analyze multiple packages in +// one graph (unused's "whole program" mode), we could remove further useless edges (e.g. into nodes that themselves +// have no outgoing edges and aren't meaningful objects on their own) after having analyzed a package, to keep the +// in-memory representation small on average. If we only analyze a single package, that step would just waste cycles, as +// we're about to throw the entire graph away, anyway. -// The graph we construct omits nodes along a path that do not -// contribute any new information to the solution. For example, the -// full graph for a function with a receiver would be Func -> -// Signature -> Var -> Type. However, since signatures cannot be -// unused, and receivers are always considered used, we can compact -// the graph down to Func -> Type. This makes the graph smaller, but -// harder to debug. +// TODO(dh): currently, types use methods that implement interfaces. However, this makes a method used even if the +// relevant interface is never used. What if instead interfaces used those methods? Right now we cannot do that, because +// methods use their receivers, so using a method uses the type. But do we need that edge? Is there a way to refer to a +// method without explicitly mentioning the type somewhere? If not, the edge from method to receiver is superfluous. -// TODO(dh): conversions between structs mark fields as used, but the -// conversion itself isn't part of that subgraph. even if the function -// containing the conversion is unused, the fields will be marked as -// used. +// XXX vet all code for proper use of core types // TODO(dh): we cannot observe function calls in assembly files. /* +This overview is true when using the default options. Different options may change individual behaviors. + - packages use: - (1.1) exported named types - - (1.2) exported functions + - (1.2) exported functions (but not methods!) - (1.3) exported variables - (1.4) exported constants - (1.5) init functions - (1.6) functions exported to cgo - (1.7) the main function iff in the main package - (1.8) symbols linked via go:linkname + - (1.9) objects in generated files - named types use: - (2.1) exported methods - (2.2) the type they're based on - - (2.3) all their aliases. we can't easily track uses of aliases - because go/types turns them into uses of the aliased types. assume - that if a type is used, so are all of its aliases. - - (2.4) the pointer type. this aids with eagerly implementing - interfaces. if a method that implements an interface is defined on - a pointer receiver, and the pointer type is never used, but the - named type is, then we still want to mark the method as used. - (2.5) all their type parameters. Unused type parameters are probably useless, but they're a brand new feature and we don't want to introduce false positives because we couldn't anticipate some novel use-case. - (2.6) all their type arguments -- variables and constants use: - - their types - - functions use: - (4.1) all their arguments, return parameters and receivers - (4.2) anonymous functions defined beneath them @@ -80,11 +73,14 @@ var Debug io.Writer that way we don't have to keep track of closures escaping functions. - (4.4) functions they return. we assume that someone else will call the returned function - (4.5) functions/interface methods they call - - types they instantiate or convert to + - (4.6) types they instantiate or convert to - (4.7) fields they access - - (4.8) types of all instructions - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) - (4.10) all their type parameters. See 2.5 for reasoning. + - (4.11) local variables + - Note that the majority of this is handled implicitly by seeing idents be used. In particular, unlike the old + IR-based implementation, the AST-based one doesn't care about closures, bound methods or anonymous functions. + They're all just additional nodes in the AST. - conversions use: - (5.1) when converting between two equivalent structs, the fields in @@ -107,7 +103,7 @@ var Debug io.Writer - (8.1) We do not technically care about interfaces that only consist of exported methods. Exported methods on concrete types are always marked as used. - - Any concrete type implements all known interfaces. Even if it isn't + - (8.2) Any concrete type implements all known interfaces. Even if it isn't assigned to any interfaces in our code, the user may receive a value of the type and expect to pass it back to us through an interface. @@ -126,20 +122,22 @@ var Debug io.Writer used by 8.3 just because it contributes A's methods to C. - Inherent uses: - - thunks and other generated wrappers call the real function - (9.2) variables use their types - (9.3) types use their underlying and element types - (9.4) conversions use the type they convert to - - (9.5) instructions use their operands - - (9.6) instructions use their operands' types - (9.7) variable _reads_ use variables, writes do not, except in tests - (9.8) runtime functions that may be called from user code via the compiler + - (9.9) objects named the blank identifier are used. They cannot be referred to and are usually used explicitly to + use something that would otherwise be unused. + - The majority of idents get marked as read by virtue of being in the AST. - const groups: - (10.1) if one constant out of a block of constants is used, mark all - of them used. a lot of the time, unused constants exist for the sake - of completeness. See also - https://github.com/dominikh/go-tools/issues/365 + - (10.1) if one constant out of a block of constants is used, mark all + of them used. a lot of the time, unused constants exist for the sake + of completeness. See also + https://github.com/dominikh/go-tools/issues/365 + + Do not, however, include constants named _ in constant groups. - (11.1) anonymous struct types use all their fields. we cannot @@ -155,274 +153,19 @@ var Debug io.Writer */ +var Debug io.Writer + func assert(b bool) { if !b { panic("failed assertion") } } -// /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) - -// Functions defined in the Go runtime that may be called through -// compiler magic or via assembly. -var runtimeFuncs = map[string]bool{ - // The first part of the list is copied from - // cmd/compile/internal/gc/builtin.go, var runtimeDecls - "newobject": true, - "panicindex": true, - "panicslice": true, - "panicdivide": true, - "panicmakeslicelen": true, - "throwinit": true, - "panicwrap": true, - "gopanic": true, - "gorecover": true, - "goschedguarded": true, - "printbool": true, - "printfloat": true, - "printint": true, - "printhex": true, - "printuint": true, - "printcomplex": true, - "printstring": true, - "printpointer": true, - "printiface": true, - "printeface": true, - "printslice": true, - "printnl": true, - "printsp": true, - "printlock": true, - "printunlock": true, - "concatstring2": true, - "concatstring3": true, - "concatstring4": true, - "concatstring5": true, - "concatstrings": true, - "cmpstring": true, - "intstring": true, - "slicebytetostring": true, - "slicebytetostringtmp": true, - "slicerunetostring": true, - "stringtoslicebyte": true, - "stringtoslicerune": true, - "slicecopy": true, - "slicestringcopy": true, - "decoderune": true, - "countrunes": true, - "convI2I": true, - "convT16": true, - "convT32": true, - "convT64": true, - "convTstring": true, - "convTslice": true, - "convT2E": true, - "convT2Enoptr": true, - "convT2I": true, - "convT2Inoptr": true, - "assertE2I": true, - "assertE2I2": true, - "assertI2I": true, - "assertI2I2": true, - "panicdottypeE": true, - "panicdottypeI": true, - "panicnildottype": true, - "ifaceeq": true, - "efaceeq": true, - "fastrand": true, - "makemap64": true, - "makemap": true, - "makemap_small": true, - "mapaccess1": true, - "mapaccess1_fast32": true, - "mapaccess1_fast64": true, - "mapaccess1_faststr": true, - "mapaccess1_fat": true, - "mapaccess2": true, - "mapaccess2_fast32": true, - "mapaccess2_fast64": true, - "mapaccess2_faststr": true, - "mapaccess2_fat": true, - "mapassign": true, - "mapassign_fast32": true, - "mapassign_fast32ptr": true, - "mapassign_fast64": true, - "mapassign_fast64ptr": true, - "mapassign_faststr": true, - "mapiterinit": true, - "mapdelete": true, - "mapdelete_fast32": true, - "mapdelete_fast64": true, - "mapdelete_faststr": true, - "mapiternext": true, - "mapclear": true, - "makechan64": true, - "makechan": true, - "chanrecv1": true, - "chanrecv2": true, - "chansend1": true, - "closechan": true, - "writeBarrier": true, - "typedmemmove": true, - "typedmemclr": true, - "typedslicecopy": true, - "selectnbsend": true, - "selectnbrecv": true, - "selectnbrecv2": true, - "selectsetpc": true, - "selectgo": true, - "block": true, - "makeslice": true, - "makeslice64": true, - "growslice": true, - "memmove": true, - "memclrNoHeapPointers": true, - "memclrHasPointers": true, - "memequal": true, - "memequal8": true, - "memequal16": true, - "memequal32": true, - "memequal64": true, - "memequal128": true, - "int64div": true, - "uint64div": true, - "int64mod": true, - "uint64mod": true, - "float64toint64": true, - "float64touint64": true, - "float64touint32": true, - "int64tofloat64": true, - "uint64tofloat64": true, - "uint32tofloat64": true, - "complex128div": true, - "racefuncenter": true, - "racefuncenterfp": true, - "racefuncexit": true, - "raceread": true, - "racewrite": true, - "racereadrange": true, - "racewriterange": true, - "msanread": true, - "msanwrite": true, - "x86HasPOPCNT": true, - "x86HasSSE41": true, - "arm64HasATOMICS": true, - - // The second part of the list is extracted from assembly code in - // the standard library, with the exception of the runtime package itself - "abort": true, - "aeshashbody": true, - "args": true, - "asminit": true, - "badctxt": true, - "badmcall2": true, - "badmcall": true, - "badmorestackg0": true, - "badmorestackgsignal": true, - "badsignal2": true, - "callbackasm1": true, - "callCfunction": true, - "cgocallback_gofunc": true, - "cgocallbackg": true, - "checkgoarm": true, - "check": true, - "debugCallCheck": true, - "debugCallWrap": true, - "emptyfunc": true, - "entersyscall": true, - "exit": true, - "exits": true, - "exitsyscall": true, - "externalthreadhandler": true, - "findnull": true, - "goexit1": true, - "gostring": true, - "i386_set_ldt": true, - "_initcgo": true, - "init_thread_tls": true, - "ldt0setup": true, - "libpreinit": true, - "load_g": true, - "morestack": true, - "mstart": true, - "nacl_sysinfo": true, - "nanotimeQPC": true, - "nanotime": true, - "newosproc0": true, - "newproc": true, - "newstack": true, - "noted": true, - "nowQPC": true, - "osinit": true, - "printf": true, - "racecallback": true, - "reflectcallmove": true, - "reginit": true, - "rt0_go": true, - "save_g": true, - "schedinit": true, - "setldt": true, - "settls": true, - "sighandler": true, - "sigprofNonGo": true, - "sigtrampgo": true, - "_sigtramp": true, - "sigtramp": true, - "stackcheck": true, - "syscall_chdir": true, - "syscall_chroot": true, - "syscall_close": true, - "syscall_dup2": true, - "syscall_execve": true, - "syscall_exit": true, - "syscall_fcntl": true, - "syscall_forkx": true, - "syscall_gethostname": true, - "syscall_getpid": true, - "syscall_ioctl": true, - "syscall_pipe": true, - "syscall_rawsyscall6": true, - "syscall_rawSyscall6": true, - "syscall_rawsyscall": true, - "syscall_RawSyscall": true, - "syscall_rawsysvicall6": true, - "syscall_setgid": true, - "syscall_setgroups": true, - "syscall_setpgid": true, - "syscall_setsid": true, - "syscall_setuid": true, - "syscall_syscall6": true, - "syscall_syscall": true, - "syscall_Syscall": true, - "syscall_sysvicall6": true, - "syscall_wait4": true, - "syscall_write": true, - "traceback": true, - "tstart": true, - "usplitR0": true, - "wbBufFlush": true, - "write": true, -} - -type pkg struct { - Fset *token.FileSet - Files []*ast.File - Pkg *types.Package - TypesInfo *types.Info - TypesSizes types.Sizes - IR *ir.Package - SrcFuncs []*ir.Function - Directives []lint.Directive -} - // TODO(dh): should we return a map instead of two slices? type Result struct { - Used []types.Object - Unused []types.Object -} - -type SerializedResult struct { - Used []SerializedObject - Unused []SerializedObject + Used []Object + Unused []Object + Quiet []Object } var Analyzer = &lint.Analyzer{ @@ -433,483 +176,326 @@ var Analyzer = &lint.Analyzer{ Name: "U1000", Doc: "Unused code", Run: run, - Requires: []*analysis.Analyzer{buildir.Analyzer, generated.Analyzer, directives.Analyzer}, + Requires: []*analysis.Analyzer{generated.Analyzer, directives.Analyzer}, ResultType: reflect.TypeOf(Result{}), }, } -type SerializedObject struct { - Name string - Position token.Position - DisplayPosition token.Position - Kind string - InGenerated bool -} - -func typString(obj types.Object) string { - switch obj := obj.(type) { - case *types.Func: - return "func" - case *types.Var: - if obj.IsField() { - return "field" - } - return "var" - case *types.Const: - return "const" - case *types.TypeName: - if _, ok := obj.Type().(*typeparams.TypeParam); ok { - return "type param" - } else { - return "type" - } - default: - return "identifier" - } +func newGraph( + fset *token.FileSet, + files []*ast.File, + pkg *types.Package, + info *types.Info, + directives []lint.Directive, + generated map[string]generated.Generator, + opts Options, +) *graph { + g := graph{ + pkg: pkg, + info: info, + files: files, + directives: directives, + generated: generated, + fset: fset, + nodes: []Node{{}}, + edges: map[edge]struct{}{}, + objects: map[types.Object]NodeID{}, + opts: opts, + } + + return &g } -func Serialize(pass *analysis.Pass, res Result, fset *token.FileSet) SerializedResult { - // OPT(dh): there's no point in serializing Used objects that are - // always used, such as exported names, blank identifiers, or - // anonymous struct fields. Used only exists to overrule Unused of - // a different package. If something can never be unused, then its - // presence in Used is useless. - // - // I'm not sure if this should happen when serializing, or when - // returning Result. - - out := SerializedResult{ - Used: make([]SerializedObject, len(res.Used)), - Unused: make([]SerializedObject, len(res.Unused)), - } - for i, obj := range res.Used { - out.Used[i] = serializeObject(pass, fset, obj) - } - for i, obj := range res.Unused { - out.Unused[i] = serializeObject(pass, fset, obj) - } - return out -} +func run(pass *analysis.Pass) (interface{}, error) { + g := newGraph( + pass.Fset, + pass.Files, + pass.Pkg, + pass.TypesInfo, + pass.ResultOf[directives.Analyzer].([]lint.Directive), + pass.ResultOf[generated.Analyzer].(map[string]generated.Generator), + DefaultOptions, + ) + g.entry() -func serializeObject(pass *analysis.Pass, fset *token.FileSet, obj types.Object) SerializedObject { - name := obj.Name() - if sig, ok := obj.Type().(*types.Signature); ok && sig.Recv() != nil { - switch sig.Recv().Type().(type) { - case *types.Named, *types.Pointer: - typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) - if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, obj.Name()) - } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, obj.Name()) - } - } - } - return SerializedObject{ - Name: name, - Position: fset.PositionFor(obj.Pos(), false), - DisplayPosition: report.DisplayPosition(fset, obj.Pos()), - Kind: typString(obj), - InGenerated: code.IsGenerated(pass, obj.Pos()), + sg := &SerializedGraph{ + nodes: g.nodes, } -} -func debugf(f string, v ...interface{}) { if Debug != nil { - fmt.Fprintf(Debug, f, v...) + Debug.Write([]byte(sg.Dot())) } -} -func run(pass *analysis.Pass) (interface{}, error) { - irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) - dirs := pass.ResultOf[directives.Analyzer].([]lint.Directive) - pkg := &pkg{ - Fset: pass.Fset, - Files: pass.Files, - Pkg: pass.Pkg, - TypesInfo: pass.TypesInfo, - TypesSizes: pass.TypesSizes, - IR: irpkg.Pkg, - SrcFuncs: irpkg.SrcFuncs, - Directives: dirs, - } + return sg.Results(), nil +} - g := newGraph() - g.entry(pkg) - used, unused := results(g) +type Options struct { + FieldWritesAreUses bool + PostStatementsAreReads bool + ExportedIsUsed bool + ExportedFieldsAreUsed bool + ParametersAreUsed bool + LocalVariablesAreUsed bool + GeneratedIsUsed bool +} - if Debug != nil { - debugNode := func(n *node) { - if n.obj == nil { - debugf("n%d [label=\"Root\"];\n", n.id) - } else { - color := "red" - if n.seen { - color = "green" - } - debugf("n%d [label=%q, color=%q];\n", n.id, fmt.Sprintf("(%T) %s", n.obj, n.obj), color) - } - for _, e := range n.used { - for i := edgeKind(1); i < 64; i++ { - if e.kind.is(1 << i) { - debugf("n%d -> n%d [label=%q];\n", n.id, e.node.id, edgeKind(1< 0 && typ[0] == '*' { + name = fmt.Sprintf("(%s).%s", typ, obj.Name()) + } else if len(typ) > 0 { + name = fmt.Sprintf("%s.%s", typ, obj.Name()) + } + } + } + return Object{ + Name: name, + ShortName: obj.Name(), + Kind: typString(obj), + Path: path, + Position: g.fset.PositionFor(obj.Pos(), false), + DisplayPosition: report.DisplayPosition(g.fset, obj.Pos()), } - g.Root = g.newNode(nil) - return g } -func (g *graph) newPointer(typ types.Type) *types.Pointer { - if p, ok := g.pointers[typ]; ok { - return p - } else { - p := types.NewPointer(typ) - g.pointers[typ] = p - g.see(p) - return p +func typString(obj types.Object) string { + switch obj := obj.(type) { + case *types.Func: + return "func" + case *types.Var: + if obj.IsField() { + return "field" + } + return "var" + case *types.Const: + return "const" + case *types.TypeName: + if _, ok := obj.Type().(*types.TypeParam); ok { + return "type param" + } else { + return "type" + } + default: + return "identifier" } } -func (g *graph) color(root *node) { - if root.seen { - return +func (g *graph) newNode(obj types.Object) NodeID { + id := NodeID(len(g.nodes)) + n := Node{ + id: id, + obj: g.objectToObject(obj), } - root.seen = true - for _, e := range root.used { - g.color(e.node) + g.nodes = append(g.nodes, n) + if _, ok := g.objects[obj]; ok { + panic(fmt.Sprintf("already had a node for %s", obj)) } + g.objects[obj] = id + return id } -type constGroup struct { - // give the struct a size to get unique pointers - _ byte -} - -func (constGroup) String() string { return "const group" } - -type edge struct { - node *node - kind edgeKind -} - -type node struct { - obj interface{} - id uint64 - - // OPT(dh): evaluate using a map instead of a slice to avoid - // duplicate edges. - used []edge - - // set during final graph walk if node is reachable - seen bool - quiet bool -} - -func (g *graph) nodeMaybe(obj types.Object) (*node, bool) { - if node, ok := g.Nodes[obj]; ok { - return node, true +func (g *graph) node(obj types.Object) NodeID { + if obj == nil { + return 0 + } + obj = origin(obj) + if n, ok := g.objects[obj]; ok { + return n } - return nil, false + n := g.newNode(obj) + return n } -func (g *graph) node(obj interface{}) (n *node, new bool) { +func origin(obj types.Object) types.Object { switch obj := obj.(type) { - case types.Type: - if v := g.TypeNodes[obj]; v != nil { - return v, false - } - n = g.newNode(obj) - g.TypeNodes[obj] = n - return n, true - case types.Object: - // OPT(dh): the types.Object and default cases are identical - if node, ok := g.Nodes[obj]; ok { - return node, false - } - - n = g.newNode(obj) - g.Nodes[obj] = n - return n, true + case *types.Var: + return obj.Origin() + case *types.Func: + return obj.Origin() default: - if node, ok := g.Nodes[obj]; ok { - return node, false - } - - n = g.newNode(obj) - g.Nodes[obj] = n - return n, true + return obj } } -func (g *graph) newNode(obj interface{}) *node { - g.nodeCounter++ - return &node{ - obj: obj, - id: g.nodeCounter, +func (g *graph) addEdge(e edge) bool { + if _, ok := g.edges[e]; ok { + return false } + g.edges[e] = struct{}{} + return true } -func (n *node) use(n2 *node, kind edgeKind) { - assert(n2 != nil) - n.used = append(n.used, edge{node: n2, kind: kind}) +func (g *graph) addOwned(owner, owned NodeID) { + e := edge{owner, owned, edgeKindOwn} + if !g.addEdge(e) { + return + } + n := &g.nodes[owner] + n.owns = append(n.owns, owned) } -// isIrrelevant reports whether an object's presence in the graph is -// of any relevance. A lot of objects will never have outgoing edges, -// nor meaningful incoming ones. Examples are basic types and empty -// signatures, among many others. -// -// Dropping these objects should have no effect on correctness, but -// may improve performance. It also helps with debugging, as it -// greatly reduces the size of the graph. -func isIrrelevant(obj interface{}) bool { - if obj, ok := obj.(types.Object); ok { - switch obj := obj.(type) { - case *types.Var: - if obj.IsField() { - // We need to track package fields - return false - } - if obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() { - // We need to track package-level variables - return false - } - return isIrrelevant(obj.Type()) - default: - return false - } - } - if T, ok := obj.(types.Type); ok { - switch T := T.(type) { - case *types.Array: - return isIrrelevant(T.Elem()) - case *types.Slice: - return isIrrelevant(T.Elem()) - case *types.Basic: - return true - case *types.Tuple: - for i := 0; i < T.Len(); i++ { - if !isIrrelevant(T.At(i).Type()) { - return false - } - } - return true - case *types.Signature: - if T.Recv() != nil { - return false - } - for i := 0; i < T.Params().Len(); i++ { - if !isIrrelevant(T.Params().At(i)) { - return false - } - } - for i := 0; i < T.Results().Len(); i++ { - if !isIrrelevant(T.Results().At(i)) { - return false - } - } - return true - case *types.Interface: - return T.NumMethods() == 0 && T.NumEmbeddeds() == 0 - case *types.Pointer: - return isIrrelevant(T.Elem()) - case *types.Map: - return isIrrelevant(T.Key()) && isIrrelevant(T.Elem()) - case *types.Struct: - return T.NumFields() == 0 - case *types.Chan: - return isIrrelevant(T.Elem()) - default: - return false - } +func (g *graph) addUse(by, used NodeID) { + e := edge{by, used, edgeKindUse} + if !g.addEdge(e) { + return } - return false + nBy := &g.nodes[by] + nBy.uses = append(nBy.uses, used) } -func (g *graph) see(obj interface{}) *node { - if isIrrelevant(obj) { - return nil +func (g *graph) see(obj, owner types.Object) { + if obj == nil { + panic("saw nil object") } - assert(obj != nil) - - if fn, ok := obj.(*types.Func); ok { - obj = typeparams.OriginMethod(fn) - } - if t, ok := obj.(*types.Named); ok { - obj = typeparams.NamedTypeOrigin(t) + if g.opts.ExportedIsUsed && obj.Pkg() != g.pkg || obj.Pkg() == nil { + return } - // add new node to graph - node, _ := g.node(obj) - - if p, ok := obj.(*types.Pointer); ok { - if pt, ok := g.pointers[p.Elem()]; ok { - // We've used graph.newPointer before we saw this pointer; add an edge that marks the two pointers as being - // identical - if p != pt { - g.use(p, pt, edgeSamePointer) - g.use(pt, p, edgeSamePointer) - } - } else { - g.pointers[p.Elem()] = p - } + nObj := g.node(obj) + if owner != nil { + nOwner := g.node(owner) + g.addOwned(nOwner, nObj) } - - return node } -func (g *graph) use(used, by interface{}, kind edgeKind) { - if isIrrelevant(used) { - return +func isIrrelevant(obj types.Object) bool { + switch obj.(type) { + case *types.PkgName: + return true + default: + return false } +} - assert(used != nil) - if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { +func (g *graph) use(used, by types.Object) { + if g.opts.ExportedIsUsed { + if used.Pkg() != g.pkg || used.Pkg() == nil { + return + } + if by != nil && by.Pkg() != g.pkg { return } } - if fn, ok := used.(*types.Func); ok { - used = typeparams.OriginMethod(fn) - } - if fn, ok := by.(*types.Func); ok { - by = typeparams.OriginMethod(fn) - } - - if t, ok := used.(*types.Named); ok { - used = typeparams.NamedTypeOrigin(t) - } - if t, ok := by.(*types.Named); ok { - by = typeparams.NamedTypeOrigin(t) - } - - usedNode, new := g.node(used) - assert(!new) - if by == nil { - g.Root.use(usedNode, kind) - } else { - byNode, new := g.node(by) - assert(!new) - byNode.use(usedNode, kind) + if isIrrelevant(used) { + return } -} -func (g *graph) seeAndUse(used, by interface{}, kind edgeKind) *node { - n := g.see(used) - g.use(used, by, kind) - return n + nUsed := g.node(used) + nBy := g.node(by) + g.addUse(nBy, nUsed) } -func (g *graph) entry(pkg *pkg) { - g.pkg = pkg - scopes := map[*types.Scope]*ir.Function{} - for _, fn := range pkg.SrcFuncs { - if fn.Object() != nil { - scope := fn.Object().(*types.Func).Scope() - scopes[scope] = fn - } - } - - for _, f := range pkg.Files { +func (g *graph) entry() { + for _, f := range g.files { for _, cg := range f.Comments { for _, c := range cg.List { if strings.HasPrefix(c.Text, "//go:linkname ") { @@ -921,300 +507,69 @@ func (g *graph) entry(pkg *pkg) { // (1.8) packages use symbols linked via go:linkname fields := strings.Fields(c.Text) if len(fields) == 3 { - if m, ok := pkg.IR.Members[fields[1]]; ok { - var obj types.Object - switch m := m.(type) { - case *ir.Global: - obj = m.Object() - case *ir.Function: - obj = m.Object() - default: - panic(fmt.Sprintf("unhandled type: %T", m)) - } - assert(obj != nil) - g.seeAndUse(obj, nil, edgeLinkname) + obj := g.pkg.Scope().Lookup(fields[1]) + if obj == nil { + continue } + g.use(obj, nil) } } } } } - surroundingFunc := func(obj types.Object) *ir.Function { - scope := obj.Parent() - for scope != nil { - if fn := scopes[scope]; fn != nil { - return fn - } - scope = scope.Parent() + for _, f := range g.files { + for _, decl := range f.Decls { + g.decl(decl, nil) } - return nil } - // IR form won't tell us about locally scoped types that aren't - // being used. Walk the list of Defs to get all named types. - // - // IR form also won't tell us about constants; use Defs and Uses - // to determine which constants exist and which are being used. - for _, obj := range pkg.TypesInfo.Defs { - switch obj := obj.(type) { - case *types.TypeName: - // types are being handled by walking the AST - case *types.Const: - g.see(obj) - fn := surroundingFunc(obj) - if fn == nil && obj.Exported() { - // (1.4) packages use exported constants - g.use(obj, nil, edgeExportedConstant) + if g.opts.GeneratedIsUsed { + // OPT(dh): depending on the options used, we do not need to track all objects. For example, if local variables + // are always used, then it is enough to use their surrounding function. + for obj := range g.objects { + path := g.fset.PositionFor(obj.Pos(), false).Filename + if _, ok := g.generated[path]; ok { + g.use(obj, nil) } - g.typ(obj.Type(), nil) - g.seeAndUse(obj.Type(), obj, edgeType) } } - // Find constants being used inside functions, find sinks in tests - for _, fn := range pkg.SrcFuncs { - if fn.Object() != nil { - g.see(fn.Object()) - } - n := fn.Source() - if n == nil { - continue - } - ast.Inspect(n, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - obj, ok := pkg.TypesInfo.Uses[n] - if !ok { - return true - } - switch obj := obj.(type) { - case *types.Const: - g.seeAndUse(obj, owningObject(fn), edgeUsedConstant) - } - case *ast.AssignStmt: - for _, expr := range n.Lhs { - ident, ok := expr.(*ast.Ident) - if !ok { - continue - } - obj := pkg.TypesInfo.ObjectOf(ident) - if obj == nil { - continue - } - path := pkg.Fset.File(obj.Pos()).Name() - if strings.HasSuffix(path, "_test.go") { - if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { - // object's scope is the package, whose - // parent is the file, whose parent is nil - - // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) - // (9.7) variable _reads_ use variables, writes do not, except in tests - g.seeAndUse(obj, owningObject(fn), edgeTestSink) - } - } + processMethodSet := func(named *types.TypeName, ms *types.MethodSet) { + if g.opts.ExportedIsUsed { + for i := 0; i < ms.Len(); i++ { + m := ms.At(i) + if token.IsExported(m.Obj().Name()) { + // (2.1) named types use exported methods + // (6.4) structs use embedded fields that have exported methods + // + // By reading the selection, we read all embedded fields that are part of the path + g.readSelection(m, named) } } - - return true - }) - } - // Find constants being used in non-function contexts - for _, obj := range pkg.TypesInfo.Uses { - _, ok := obj.(*types.Const) - if !ok { - continue } - g.seeAndUse(obj, nil, edgeUsedConstant) - } - var fns []*types.Func - var fn *types.Func - var stack []ast.Node - for _, f := range pkg.Files { - ast.Inspect(f, func(n ast.Node) bool { - if n == nil { - pop := stack[len(stack)-1] - stack = stack[:len(stack)-1] - if _, ok := pop.(*ast.FuncDecl); ok { - fns = fns[:len(fns)-1] - if len(fns) == 0 { - fn = nil - } else { - fn = fns[len(fns)-1] - } - } - return true - } - stack = append(stack, n) - switch n := n.(type) { - case *ast.FuncDecl: - fn = pkg.TypesInfo.ObjectOf(n.Name).(*types.Func) - fns = append(fns, fn) - g.see(fn) - case *ast.GenDecl: - switch n.Tok { - case token.CONST: - groups := astutil.GroupSpecs(pkg.Fset, n.Specs) - for _, specs := range groups { - if len(specs) > 1 { - cg := &constGroup{} - g.see(cg) - for _, spec := range specs { - for _, name := range spec.(*ast.ValueSpec).Names { - obj := pkg.TypesInfo.ObjectOf(name) - // (10.1) const groups - g.seeAndUse(obj, cg, edgeConstGroup) - g.use(cg, obj, edgeConstGroup) - } - } - } - } - case token.VAR: - for _, spec := range n.Specs { - v := spec.(*ast.ValueSpec) - for _, name := range v.Names { - T := pkg.TypesInfo.TypeOf(name) - if fn != nil { - g.seeAndUse(T, fn, edgeVarDecl) - } else { - // TODO(dh): we likely want to make - // the type used by the variable, not - // the package containing the - // variable. But then we have to take - // special care of blank identifiers. - g.seeAndUse(T, nil, edgeVarDecl) - } - g.typ(T, nil) - } - } - case token.TYPE: - for _, spec := range n.Specs { - // go/types doesn't provide a way to go from a - // types.Named to the named type it was based on - // (the t1 in type t2 t1). Therefore we walk the - // AST and process GenDecls. - // - // (2.2) named types use the type they're based on - v := spec.(*ast.TypeSpec) - T := pkg.TypesInfo.TypeOf(v.Type) - obj := pkg.TypesInfo.ObjectOf(v.Name) - g.see(obj) - g.see(T) - g.use(T, obj, edgeType) - g.typ(obj.Type(), nil) - g.typ(T, nil) - - if v.Assign != 0 { - aliasFor := obj.(*types.TypeName).Type() - // (2.3) named types use all their aliases. we can't easily track uses of aliases - if isIrrelevant(aliasFor) { - // We do not track the type this is an - // alias for (for example builtins), so - // just mark the alias used. - // - // FIXME(dh): what about aliases declared inside functions? - g.use(obj, nil, edgeAlias) - } else { - g.see(aliasFor) - g.seeAndUse(obj, aliasFor, edgeAlias) - } - } + if _, ok := named.Type().Underlying().(*types.Interface); !ok { + // (8.0) handle interfaces + // + // We don't care about interfaces implementing interfaces; all their methods are already used, anyway + for _, iface := range g.interfaceTypes { + if sels, ok := implements(named.Type(), iface, ms); ok { + for _, sel := range sels { + // (8.2) any concrete type implements all known interfaces + // (6.3) structs use embedded fields that help implement interfaces + g.readSelection(sel, named) } } } - return true - }) + } } - for _, m := range pkg.IR.Members { - switch m := m.(type) { - case *ir.NamedConst: - // nothing to do, we collect all constants from Defs - case *ir.Global: - if m.Object() != nil { - g.see(m.Object()) - if m.Object().Exported() { - // (1.3) packages use exported variables - g.use(m.Object(), nil, edgeExportedVariable) - } - } - case *ir.Function: - mObj := owningObject(m) - if mObj != nil { - g.see(mObj) - } - //lint:ignore SA9003 handled implicitly - if m.Name() == "init" { - // (1.5) packages use init functions - // - // This is handled implicitly. The generated init - // function has no object, thus everything in it will - // be owned by the package. - } - // This branch catches top-level functions, not methods. - if m.Object() != nil && m.Object().Exported() { - // (1.2) packages use exported functions - g.use(mObj, nil, edgeExportedFunction) - } - if m.Name() == "main" && pkg.Pkg.Name() == "main" { - // (1.7) packages use the main function iff in the main package - g.use(mObj, nil, edgeMainFunction) - } - if pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { - // (9.8) runtime functions that may be called from user code via the compiler - g.use(mObj, nil, edgeRuntimeFunction) - } - if m.Source() != nil { - doc := m.Source().(*ast.FuncDecl).Doc - if doc != nil { - for _, cmt := range doc.List { - if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { - // (1.6) packages use functions exported to cgo - g.use(mObj, nil, edgeCgoExported) - } - } - } - } - g.function(m) - case *ir.Type: - g.see(m.Object()) - if m.Object().Exported() { - // (1.1) packages use exported named types - g.use(m.Object(), nil, edgeExportedType) - } - g.typ(m.Type(), nil) - default: - panic(fmt.Sprintf("unreachable: %T", m)) - } - } - - // OPT(dh): can we find meaningful initial capacities for these slices? - var ifaces []*types.Interface - var notIfaces []types.Type - - for t := range g.seenTypes { - switch t := t.(type) { - case *types.Interface: - // OPT(dh): (8.1) we only need interfaces that have unexported methods - ifaces = append(ifaces, t) - default: - if _, ok := t.Underlying().(*types.Interface); !ok { - notIfaces = append(notIfaces, t) - } - } - } + for _, named := range g.namedTypes { + // OPT(dh): do we already have the method set available? + processMethodSet(named, types.NewMethodSet(named.Type())) + processMethodSet(named, types.NewMethodSet(types.NewPointer(named.Type()))) - // (8.0) handle interfaces - for _, t := range notIfaces { - ms := pkg.IR.Prog.MethodSets.MethodSet(t) - for _, iface := range ifaces { - if sels, ok := g.implements(t, iface, ms); ok { - for _, sel := range sels { - g.useMethod(t, sel, t, edgeImplements) - } - } - } } type ignoredKey struct { @@ -1222,7 +577,7 @@ func (g *graph) entry(pkg *pkg) { line int } ignores := map[ignoredKey]struct{}{} - for _, dir := range pkg.Directives { + for _, dir := range g.directives { if dir.Command != "ignore" && dir.Command != "file-ignore" { continue } @@ -1231,7 +586,7 @@ func (g *graph) entry(pkg *pkg) { } for _, check := range strings.Split(dir.Arguments[0], ",") { if check == "U1000" { - pos := pkg.Fset.PositionFor(dir.Node.Pos(), false) + pos := g.fset.PositionFor(dir.Node.Pos(), false) var key ignoredKey switch dir.Command { case "ignore": @@ -1254,46 +609,43 @@ func (g *graph) entry(pkg *pkg) { if len(ignores) > 0 { // all objects annotated with a //lint:ignore U1000 are considered used - for obj := range g.Nodes { - if obj, ok := obj.(types.Object); ok { - pos := pkg.Fset.PositionFor(obj.Pos(), false) - key1 := ignoredKey{ - pos.Filename, - pos.Line, - } - key2 := ignoredKey{ - pos.Filename, - -1, - } - _, ok := ignores[key1] - if !ok { - _, ok = ignores[key2] - } - if ok { - g.use(obj, nil, edgeIgnored) - - // use methods and fields of ignored types - if obj, ok := obj.(*types.TypeName); ok { - if obj.IsAlias() { - if typ, ok := obj.Type().(*types.Named); ok && typ.Obj().Pkg() != obj.Pkg() { - // This is an alias of a named type in another package. - // Don't walk its fields or methods; we don't have to, - // and it breaks an assertion in graph.use because we're using an object that we haven't seen before. - // - // For aliases to types in the same package, we do want to ignore the fields and methods, - // because ignoring the alias should ignore the aliased type. - continue - } + for obj := range g.objects { + pos := g.fset.PositionFor(obj.Pos(), false) + key1 := ignoredKey{ + pos.Filename, + pos.Line, + } + key2 := ignoredKey{ + pos.Filename, + -1, + } + _, ok := ignores[key1] + if !ok { + _, ok = ignores[key2] + } + if ok { + g.use(obj, nil) + + // use methods and fields of ignored types + if obj, ok := obj.(*types.TypeName); ok { + if obj.IsAlias() { + if typ, ok := obj.Type().(*types.Named); ok && (g.opts.ExportedIsUsed && typ.Obj().Pkg() != obj.Pkg() || typ.Obj().Pkg() == nil) { + // This is an alias of a named type in another package. + // Don't walk its fields or methods; we don't have to. + // + // For aliases to types in the same package, we do want to ignore the fields and methods, + // because ignoring the alias should ignore the aliased type. + continue } - if typ, ok := obj.Type().(*types.Named); ok { - for i := 0; i < typ.NumMethods(); i++ { - g.use(typ.Method(i), nil, edgeIgnored) - } + } + if typ, ok := obj.Type().(*types.Named); ok { + for i := 0; i < typ.NumMethods(); i++ { + g.use(typ.Method(i), nil) } - if typ, ok := obj.Type().Underlying().(*types.Struct); ok { - for i := 0; i < typ.NumFields(); i++ { - g.use(typ.Field(i), nil, edgeIgnored) - } + } + if typ, ok := obj.Type().Underlying().(*types.Struct); ok { + for i := 0; i < typ.NumFields(); i++ { + g.use(typ.Field(i), nil) } } } @@ -1302,496 +654,782 @@ func (g *graph) entry(pkg *pkg) { } } -func (g *graph) useMethod(t types.Type, sel *types.Selection, by interface{}, kind edgeKind) { - obj := sel.Obj().(*types.Func) - path := sel.Index() - assert(obj != nil) - if len(path) > 1 { - base := typeutil.Dereference(t).Underlying().(*types.Struct) - for _, idx := range path[:len(path)-1] { - next := base.Field(idx) - // (6.3) structs use embedded fields that help implement interfaces - g.see(base) - g.seeAndUse(next, base, edgeProvidesMethod) - base, _ = typeutil.Dereference(next.Type()).Underlying().(*types.Struct) - } - } - g.seeAndUse(obj, by, kind) -} - -func owningObject(fn *ir.Function) types.Object { - if fn.Object() != nil { - return fn.Object() - } - if fn.Parent() != nil { - return owningObject(fn.Parent()) - } - return nil +func isOfType[T any](x any) bool { + _, ok := x.(T) + return ok } -func (g *graph) function(fn *ir.Function) { - assert(fn != nil) - if fn.Package() != nil && fn.Package() != g.pkg.IR { +func (g *graph) read(node ast.Node, by types.Object) { + if node == nil { return } - if _, ok := g.seenFns[fn]; ok { - return - } - g.seenFns[fn] = struct{}{} + switch node := node.(type) { + case *ast.Ident: + // Among many other things, this handles + // (7.1) field accesses use fields - // (4.1) functions use all their arguments, return parameters and receivers - g.signature(fn.Signature, owningObject(fn)) - g.instructions(fn) - for _, anon := range fn.AnonFuncs { - // (4.2) functions use anonymous functions defined beneath them - // - // This fact is expressed implicitly. Anonymous functions have - // no types.Object, so their owner is the surrounding - // function. - g.function(anon) - } -} + obj := g.info.ObjectOf(node) + g.use(obj, by) -func (g *graph) typ(t types.Type, parent types.Type) { - if _, ok := g.seenTypes[t]; ok { - return - } + case *ast.BasicLit: + // Nothing to do - if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { - if t.Obj().Pkg() != g.pkg.Pkg { - return + case *ast.SliceExpr: + g.read(node.X, by) + g.read(node.Low, by) + g.read(node.High, by) + g.read(node.Max, by) + + case *ast.UnaryExpr: + g.read(node.X, by) + + case *ast.ParenExpr: + g.read(node.X, by) + + case *ast.ArrayType: + g.read(node.Len, by) + g.read(node.Elt, by) + + case *ast.SelectorExpr: + g.readSelectorExpr(node, by) + + case *ast.IndexExpr: + // Among many other things, this handles + // (2.6) named types use all their type arguments + g.read(node.X, by) + g.read(node.Index, by) + + case *ast.IndexListExpr: + // Among many other things, this handles + // (2.6) named types use all their type arguments + g.read(node.X, by) + for _, index := range node.Indices { + g.read(index, by) } - } - g.seenTypes[t] = struct{}{} - if isIrrelevant(t) { - return - } - - g.see(t) - switch t := t.(type) { - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - g.see(t.Field(i)) - if t.Field(i).Exported() { - // (6.2) structs use exported fields - g.use(t.Field(i), t, edgeExportedField) - } else if t.Field(i).Name() == "_" { - g.use(t.Field(i), t, edgeBlankField) - } else if isNoCopyType(t.Field(i).Type()) { - // (6.1) structs use fields of type NoCopy sentinel - g.use(t.Field(i), t, edgeNoCopySentinel) - } else if parent == nil { - // (11.1) anonymous struct types use all their fields. - g.use(t.Field(i), t, edgeAnonymousStruct) + case *ast.BinaryExpr: + g.read(node.X, by) + g.read(node.Y, by) + + case *ast.CompositeLit: + g.read(node.Type, by) + // We get the type of the node itself, not of node.Type, to handle nested composite literals of the kind + // T{{...}} + typ, isStruct := typeutil.CoreType(g.info.TypeOf(node)).(*types.Struct) + + if isStruct { + unkeyed := len(node.Elts) != 0 && !isOfType[*ast.KeyValueExpr](node.Elts[0]) + if g.opts.FieldWritesAreUses && unkeyed { + // Untagged struct literal that specifies all fields. We have to manually use the fields in the type, + // because the unkeyd literal doesn't contain any nodes referring to the fields. + for i := 0; i < typ.NumFields(); i++ { + g.use(typ.Field(i), by) + } } - if t.Field(i).Anonymous() { - // does the embedded field contribute exported methods to the method set? - T := t.Field(i).Type() - if _, ok := T.Underlying().(*types.Pointer); !ok { - // An embedded field is addressable, so check - // the pointer type to get the full method set - T = g.newPointer(T) + if g.opts.FieldWritesAreUses || unkeyed { + for _, elt := range node.Elts { + g.read(elt, by) } - ms := g.pkg.IR.Prog.MethodSets.MethodSet(T) - for j := 0; j < ms.Len(); j++ { - if ms.At(j).Obj().Exported() { - // (6.4) structs use embedded fields that have exported methods (recursively) - g.use(t.Field(i), t, edgeExtendsExportedMethodSet) - break - } + } else { + for _, elt := range node.Elts { + kv := elt.(*ast.KeyValueExpr) + g.write(kv.Key, by) + g.read(kv.Value, by) } + } + } else { + for _, elt := range node.Elts { + g.read(elt, by) + } + } - seen := map[*types.Struct]struct{}{} - var hasExportedField func(t types.Type) bool - hasExportedField = func(T types.Type) bool { - t, ok := typeutil.Dereference(T).Underlying().(*types.Struct) - if !ok { - return false - } - if _, ok := seen[t]; ok { - return false - } - seen[t] = struct{}{} - for i := 0; i < t.NumFields(); i++ { - field := t.Field(i) - if field.Exported() { - return true - } - if field.Embedded() && hasExportedField(field.Type()) { - return true - } - } - return false - } - // does the embedded field contribute exported fields? - if hasExportedField(t.Field(i).Type()) { - // (6.5) structs use embedded structs that have exported fields (recursively) - g.use(t.Field(i), t, edgeExtendsExportedFields) - } + case *ast.KeyValueExpr: + g.read(node.Key, by) + g.read(node.Value, by) + + case *ast.StarExpr: + g.read(node.X, by) + + case *ast.MapType: + g.read(node.Key, by) + g.read(node.Value, by) + case *ast.FuncLit: + g.read(node.Type, by) + + // See graph.decl's handling of ast.FuncDecl for why this bit of code is necessary. + fn := g.info.TypeOf(node).(*types.Signature) + for params, i := fn.Params(), 0; i < params.Len(); i++ { + g.see(params.At(i), by) + if params.At(i).Name() == "" { + g.use(params.At(i), by) } - g.variable(t.Field(i)) } - case *types.Basic: - // Nothing to do - case *types.Named: - // (9.3) types use their underlying and element types - origin := typeparams.NamedTypeOrigin(t) - g.seeAndUse(origin.Underlying(), t, edgeUnderlyingType) - g.seeAndUse(t.Obj(), t, edgeTypeName) - g.seeAndUse(t, t.Obj(), edgeNamedType) - - // (2.4) named types use the pointer type - if _, ok := t.Underlying().(*types.Interface); !ok && t.NumMethods() > 0 { - g.seeAndUse(g.newPointer(origin), t, edgePointerType) + + g.block(node.Body, by) + + case *ast.FuncType: + m := map[*types.Var]struct{}{} + if !g.opts.ParametersAreUsed { + m = map[*types.Var]struct{}{} + // seeScope marks all local variables in the scope as used, but we don't want to unconditionally use + // parameters, as this is controlled by Options.ParametersAreUsed. Pass seeScope a list of variables it + // should skip. + for _, f := range node.Params.List { + for _, name := range f.Names { + m[g.info.ObjectOf(name).(*types.Var)] = struct{}{} + } + } } + g.seeScope(node, by, m) - // (2.5) named types use their type parameters + // (4.1) functions use all their arguments, return parameters and receivers + // (12.1) type parameters use their constraint type + g.read(node.TypeParams, by) + if g.opts.ParametersAreUsed { + g.read(node.Params, by) + } + g.read(node.Results, by) - for i := 0; i < typeparams.ForNamed(t).Len(); i++ { - tparam := typeparams.ForNamed(t).At(i) - g.seeAndUse(tparam, t, edgeTypeParam) - g.typ(tparam, nil) + case *ast.FieldList: + if node == nil { + return } - // (2.6) named types use their type arguments - for i := 0; i < typeparams.NamedTypeArgs(t).Len(); i++ { - targ := typeparams.NamedTypeArgs(t).At(i) - g.seeAndUse(targ, t, edgeTypeArg) - g.typ(t, nil) + // This branch is only hit for field lists enclosed by parentheses or square brackets, i.e. parameters. Fields + // (for structs) and method lists (for interfaces) are handled elsewhere. + + for _, field := range node.List { + if len(field.Names) == 0 { + g.read(field.Type, by) + } else { + for _, name := range field.Names { + // OPT(dh): instead of by -> name -> type, we could just emit by -> type. We don't care about the + // (un)usedness of parameters of any kind. + obj := g.info.ObjectOf(name) + g.use(obj, by) + g.read(field.Type, obj) + } + } } - for i := 0; i < t.NumMethods(); i++ { - g.see(t.Method(i)) - // don't use trackExportedIdentifier here, we care about - // all exported methods, even in package main or in tests. - if t.Method(i).Exported() { - // (2.1) named types use exported methods - g.use(t.Method(i), t, edgeExportedMethod) + case *ast.ChanType: + g.read(node.Value, by) + + case *ast.StructType: + // This is only used for anonymous struct types, not named ones. + + for _, field := range node.Fields.List { + if len(field.Names) == 0 { + // embedded field + + f := g.embeddedField(field.Type, by) + g.use(f, by) + } else { + for _, name := range field.Names { + // (11.1) anonymous struct types use all their fields + // OPT(dh): instead of by -> name -> type, we could just emit by -> type. If the type is used, then the fields are used. + obj := g.info.ObjectOf(name) + g.see(obj, by) + g.use(obj, by) + g.read(field.Type, g.info.ObjectOf(name)) + } } - g.function(g.pkg.IR.Prog.FuncValue(t.Method(i))) } - g.typ(origin.Underlying(), t) - case *types.Slice: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem(), nil) - case *types.Map: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - // (9.3) types use their underlying and element types - g.seeAndUse(t.Key(), t, edgeKeyType) - g.typ(t.Elem(), nil) - g.typ(t.Key(), nil) - case *types.Signature: - g.signature(t, nil) - case *types.Interface: - for i := 0; i < t.NumMethods(); i++ { - m := t.Method(i) - // (8.3) All interface methods are marked as used - g.seeAndUse(m, t, edgeInterfaceMethod) - g.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) - g.signature(m.Type().(*types.Signature), nil) + case *ast.TypeAssertExpr: + g.read(node.X, by) + g.read(node.Type, by) + + case *ast.InterfaceType: + if len(node.Methods.List) != 0 { + g.interfaceTypes = append(g.interfaceTypes, g.info.TypeOf(node).(*types.Interface)) } - for i := 0; i < t.NumEmbeddeds(); i++ { - tt := t.EmbeddedType(i) - // (8.4) All embedded interfaces are marked as used - g.typ(tt, nil) - g.seeAndUse(tt, t, edgeEmbeddedInterface) + for _, meth := range node.Methods.List { + switch len(meth.Names) { + case 0: + // Embedded type or type union + // (8.4) all embedded interfaces are marked as used + // (this also covers type sets) + + g.read(meth.Type, by) + case 1: + // Method + // (8.3) all interface methods are marked as used + obj := g.info.ObjectOf(meth.Names[0]) + g.see(obj, by) + g.use(obj, by) + g.read(meth.Type, obj) + default: + panic(fmt.Sprintf("unexpected number of names: %d", len(meth.Names))) + } } - case *types.Array: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem(), nil) - case *types.Pointer: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem(), nil) - case *types.Chan: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem(), nil) - case *types.Tuple: - for i := 0; i < t.Len(); i++ { - // (9.3) types use their underlying and element types - g.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) - g.typ(t.At(i).Type(), nil) + + case *ast.Ellipsis: + g.read(node.Elt, by) + + case *ast.CallExpr: + g.read(node.Fun, by) + for _, arg := range node.Args { + g.read(arg, by) } - case *typeutil.Iterator: - // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem(), nil) - case *typeparams.TypeParam: - // (9.3) types use their underlying and element types - - g.seeAndUse(t.Obj(), t, edgeTypeName) - g.seeAndUse(t, t.Obj(), edgeNamedType) - g.seeAndUse(t.Constraint(), t, edgeElementType) - g.typ(t.Constraint(), t) - case *typeparams.Union: - for i := 0; i < t.Len(); i++ { - g.seeAndUse(t.Term(i).Type(), t, edgeUnionTerm) - g.typ(t.Term(i).Type(), nil) + + // Handle conversiosn + conv := node + if len(conv.Args) != 1 || conv.Ellipsis.IsValid() { + return + } + + dst := g.info.TypeOf(conv.Fun) + src := g.info.TypeOf(conv.Args[0]) + + // XXX use DereferenceR instead + // XXX guard against infinite recursion in DereferenceR + tSrc := typeutil.CoreType(typeutil.Dereference(src)) + tDst := typeutil.CoreType(typeutil.Dereference(dst)) + stSrc, okSrc := tSrc.(*types.Struct) + stDst, okDst := tDst.(*types.Struct) + if okDst && okSrc { + // Converting between two structs. The fields are + // relevant for the conversion, but only if the + // fields are also used outside of the conversion. + // Mark fields as used by each other. + + assert(stDst.NumFields() == stSrc.NumFields()) + for i := 0; i < stDst.NumFields(); i++ { + // (5.1) when converting between two equivalent structs, the fields in + // either struct use each other. the fields are relevant for the + // conversion, but only if the fields are also accessed outside the + // conversion. + g.use(stDst.Field(i), stSrc.Field(i)) + g.use(stSrc.Field(i), stDst.Field(i)) + } + } else if okSrc && tDst == types.Typ[types.UnsafePointer] { + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. + g.useAllFieldsRecursively(stSrc, by) + } else if okDst && tSrc == types.Typ[types.UnsafePointer] { + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. + g.useAllFieldsRecursively(stDst, by) } + default: - panic(fmt.Sprintf("unreachable: %T", t)) + lint.ExhaustiveTypeSwitch(node) } } -func (g *graph) variable(v *types.Var) { - // (9.2) variables use their types - g.seeAndUse(v.Type(), v, edgeType) - g.typ(v.Type(), nil) +func (g *graph) useAllFieldsRecursively(typ types.Type, by types.Object) { + switch typ := typ.Underlying().(type) { + case *types.Struct: + for i := 0; i < typ.NumFields(); i++ { + field := typ.Field(i) + g.use(field, by) + g.useAllFieldsRecursively(field.Type(), by) + } + case *types.Array: + g.useAllFieldsRecursively(typ.Elem(), by) + default: + return + } } -func (g *graph) signature(sig *types.Signature, fn types.Object) { - var user interface{} = fn - if fn == nil { - user = sig - g.see(sig) +func (g *graph) write(node ast.Node, by types.Object) { + if node == nil { + return } - if sig.Recv() != nil { - g.seeAndUse(sig.Recv().Type(), user, edgeReceiver|edgeType) - g.typ(sig.Recv().Type(), nil) + + switch node := node.(type) { + case *ast.Ident: + obj := g.info.ObjectOf(node) + if obj == nil { + // This can happen for `switch x := v.(type)`, where that x doesn't have an object + return + } + + // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) + // (9.7) variable _reads_ use variables, writes do not, except in tests + path := g.fset.File(obj.Pos()).Name() + if strings.HasSuffix(path, "_test.go") { + if isGlobal(obj) { + g.use(obj, by) + } + } + + case *ast.IndexExpr: + g.read(node.X, by) + g.read(node.Index, by) + + case *ast.SelectorExpr: + if g.opts.FieldWritesAreUses { + // Writing to a field constitutes a use. See https://staticcheck.io/issues/288 for some discussion on that. + // + // This code can also get triggered by qualified package variables, in which case it doesn't matter what we do, + // because the object is in another package. + // + // FIXME(dh): ^ isn't true if we track usedness of exported identifiers + g.readSelectorExpr(node, by) + } else { + g.read(node.X, by) + g.write(node.Sel, by) + } + + case *ast.StarExpr: + g.read(node.X, by) + + case *ast.ParenExpr: + g.write(node.X, by) + + default: + lint.ExhaustiveTypeSwitch(node) } - for i := 0; i < sig.Params().Len(); i++ { - param := sig.Params().At(i) - g.seeAndUse(param.Type(), user, edgeFunctionArgument|edgeType) - g.typ(param.Type(), nil) +} + +// readSelectorExpr reads all elements of a selector expression, including implicit fields. +func (g *graph) readSelectorExpr(sel *ast.SelectorExpr, by types.Object) { + // cover AST-based accesses + g.read(sel.X, by) + g.read(sel.Sel, by) + + tsel, ok := g.info.Selections[sel] + if !ok { + return } - for i := 0; i < sig.Results().Len(); i++ { - param := sig.Results().At(i) - g.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) - g.typ(param.Type(), nil) + g.readSelection(tsel, by) +} + +func (g *graph) readSelection(sel *types.Selection, by types.Object) { + indices := sel.Index() + base := sel.Recv() + for _, idx := range indices[:len(indices)-1] { + // XXX do we need core types here? + field := typeutil.Dereference(base.Underlying()).Underlying().(*types.Struct).Field(idx) + g.use(field, by) + base = field.Type() } - for i := 0; i < typeparams.RecvTypeParams(sig).Len(); i++ { - // We track the type parameter's constraint, not the type parameter itself. - // We never want to flag an unused type parameter. - param := typeparams.RecvTypeParams(sig).At(i).Constraint() - g.seeAndUse(param, user, edgeFunctionArgument|edgeType) - g.typ(param, nil) + + g.use(sel.Obj(), by) +} + +func (g *graph) block(block *ast.BlockStmt, by types.Object) { + if block == nil { + return } - for i := 0; i < typeparams.ForSignature(sig).Len(); i++ { - // We track the type parameter's constraint, not the type parameter itself. - // We never want to flag an unused type parameter. - param := typeparams.ForSignature(sig).At(i).Constraint() - g.seeAndUse(param, user, edgeFunctionArgument|edgeType) - g.typ(param, nil) + + g.seeScope(block, by, nil) + for _, stmt := range block.List { + g.stmt(stmt, by) } } -func (g *graph) instructions(fn *ir.Function) { - fnObj := owningObject(fn) - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - ops := instr.Operands(nil) - switch instr.(type) { - case *ir.Store: - // (9.7) variable _reads_ use variables, writes do not - ops = ops[1:] - case *ir.DebugRef: - ops = nil +func isGlobal(obj types.Object) bool { + return obj.Parent() == obj.Pkg().Scope() +} + +func (g *graph) decl(decl ast.Decl, by types.Object) { + switch decl := decl.(type) { + case *ast.GenDecl: + switch decl.Tok { + case token.IMPORT: + // Nothing to do + + case token.CONST: + for _, spec := range decl.Specs { + vspec := spec.(*ast.ValueSpec) + assert(len(vspec.Values) == 0 || len(vspec.Values) == len(vspec.Names)) + for i, name := range vspec.Names { + obj := g.info.ObjectOf(name) + g.see(obj, by) + g.read(vspec.Type, obj) + + if len(vspec.Values) != 0 { + g.read(vspec.Values[i], obj) + } + + if name.Name == "_" { + // (9.9) objects named the blank identifier are used + g.use(obj, by) + } else if token.IsExported(name.Name) && isGlobal(obj) && g.opts.ExportedIsUsed { + g.use(obj, nil) + } + } } - for _, arg := range ops { - walkPhi(*arg, func(v ir.Value) { - switch v := v.(type) { - case *ir.Function: - // (4.3) functions use closures and bound methods. - // (4.5) functions use functions they call - // (9.5) instructions use their operands - // (4.4) functions use functions they return. we assume that someone else will call the returned function - if owningObject(v) != nil { - g.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) + + groups := astutil.GroupSpecs(g.fset, decl.Specs) + for _, group := range groups { + // (10.1) if one constant out of a block of constants is used, mark all of them used + // + // We encode this as a ring. If we have a constant group 'const ( a; b; c )', then we'll produce the + // following graph: a -> b -> c -> a. + + var first, prev, last types.Object + for _, spec := range group { + for _, name := range spec.(*ast.ValueSpec).Names { + if name.Name == "_" { + // Having a blank constant in a group doesn't mark the whole group as used + continue } - g.function(v) - case *ir.Const: - // (9.6) instructions use their operands' types - g.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(v.Type(), nil) - case *ir.Global: - if v.Object() != nil { - // (9.5) instructions use their operands - g.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) + + obj := g.info.ObjectOf(name) + if first == nil { + first = obj + } else { + g.use(obj, prev) } + prev = obj + last = obj } - }) - } - if v, ok := instr.(ir.Value); ok { - if _, ok := v.(*ir.Range); !ok { - // See https://github.com/golang/go/issues/19670 - - // (4.8) instructions use their types - // (9.4) conversions use the type they convert to - g.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(v.Type(), nil) + } + if first != nil && first != last { + g.use(first, last) } } - switch instr := instr.(type) { - case *ir.Field: - // Can't access fields via generics, for now. - - st := instr.X.Type().Underlying().(*types.Struct) - field := st.Field(instr.Field) - // (4.7) functions use fields they access - g.seeAndUse(field, fnObj, edgeFieldAccess) - case *ir.FieldAddr: - // User code can't access fields on type parameters, but composite literals are still possible, which - // compile to FieldAddr + Store. - - st := typeutil.CoreType(typeutil.Dereference(instr.X.Type())).(*types.Struct) - field := st.Field(instr.Field) - // (4.7) functions use fields they access - g.seeAndUse(field, fnObj, edgeFieldAccess) - case *ir.Store: - // nothing to do, handled generically by operands - case ir.CallInstruction: - c := instr.Common() - for _, targ := range c.TypeArgs { - g.seeAndUse(targ, fnObj, edgeTypeArg) + + case token.TYPE: + for _, spec := range decl.Specs { + tspec := spec.(*ast.TypeSpec) + obj := g.info.ObjectOf(tspec.Name).(*types.TypeName) + g.see(obj, by) + g.seeScope(tspec, obj, nil) + if !tspec.Assign.IsValid() { + g.namedTypes = append(g.namedTypes, obj) } - if !c.IsInvoke() { - // handled generically as an instruction operand - } else { - // (4.5) functions use functions/interface methods they call - g.seeAndUse(c.Method, fnObj, edgeInterfaceCall) + if token.IsExported(tspec.Name.Name) && isGlobal(obj) && g.opts.ExportedIsUsed { + // (1.1) packages use exported named types + g.use(g.info.ObjectOf(tspec.Name), nil) } - case *ir.Return: - // nothing to do, handled generically by operands - case *ir.ChangeType: - // conversion type handled generically - - s1, ok1 := typeutil.CoreType(typeutil.Dereference(instr.Type())).(*types.Struct) - s2, ok2 := typeutil.CoreType(typeutil.Dereference(instr.X.Type())).(*types.Struct) - if ok1 && ok2 { - // Converting between two structs. The fields are - // relevant for the conversion, but only if the - // fields are also used outside of the conversion. - // Mark fields as used by each other. - - assert(s1.NumFields() == s2.NumFields()) - for i := 0; i < s1.NumFields(); i++ { - g.see(s1.Field(i)) - g.see(s2.Field(i)) - // (5.1) when converting between two equivalent structs, the fields in - // either struct use each other. the fields are relevant for the - // conversion, but only if the fields are also accessed outside the - // conversion. - g.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) - g.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) - } + + // (2.5) named types use all their type parameters + g.read(tspec.TypeParams, obj) + + g.namedType(obj, tspec.Type) + + if tspec.Name.Name == "_" { + // (9.9) objects named the blank identifier are used + g.use(obj, by) } - case *ir.MakeInterface: - // nothing to do, handled generically by operands - case *ir.Slice: - // nothing to do, handled generically by operands - case *ir.RunDefers: - // nothing to do, the deferred functions are already marked use by deferring them. - case *ir.Convert: - // to unsafe.Pointer - if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { - if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { - if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { - for i := 0; i < st.NumFields(); i++ { - // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) - } + } + + case token.VAR: + // We cannot rely on types.Initializer for package-level variables because + // - initializers are only tracked for variables that are actually initialized + // - we want to see the AST of the type, if specified, not just the rhs + + for _, spec := range decl.Specs { + vspec := spec.(*ast.ValueSpec) + for i, name := range vspec.Names { + obj := g.info.ObjectOf(name) + g.see(obj, by) + // variables and constants use their types + g.read(vspec.Type, obj) + + if len(vspec.Names) == len(vspec.Values) { + // One value per variable + g.read(vspec.Values[i], obj) + } else if len(vspec.Values) != 0 { + // Multiple variables initialized with a single rhs + // assert(len(vspec.Values) == 1) + if len(vspec.Values) != 1 { + panic(g.fset.PositionFor(vspec.Pos(), false)) } + g.read(vspec.Values[0], obj) } - } - // from unsafe.Pointer - if typ, ok := instr.X.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { - if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { - if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { - for i := 0; i < st.NumFields(); i++ { - // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) - } - } + + if token.IsExported(name.Name) && isGlobal(obj) && g.opts.ExportedIsUsed { + // (1.3) packages use exported variables + g.use(obj, nil) + } + + if name.Name == "_" { + // (9.9) objects named the blank identifier are used + g.use(obj, by) } } - case *ir.TypeAssert: - // nothing to do, handled generically by instruction - // type (possibly a tuple, which contains the asserted - // to type). redundantly handled by the type of - // ir.Extract, too - case *ir.MakeClosure: - // nothing to do, handled generically by operands - case *ir.Alloc: - // nothing to do - case *ir.UnOp: - // nothing to do - case *ir.BinOp: - // nothing to do - case *ir.If: - // nothing to do - case *ir.Jump: - // nothing to do - case *ir.Unreachable: - // nothing to do - case *ir.IndexAddr: - // nothing to do - case *ir.Extract: - // nothing to do - case *ir.Panic: - // nothing to do - case *ir.DebugRef: - // nothing to do - case *ir.BlankStore: - // nothing to do - case *ir.Phi: - // nothing to do - case *ir.Sigma: - // nothing to do - case *ir.MakeMap: - // nothing to do - case *ir.MapUpdate: - // nothing to do - case *ir.MapLookup: - // nothing to do - case *ir.StringLookup: - // nothing to do - case *ir.MakeSlice: - // nothing to do - case *ir.Send: - // nothing to do - case *ir.MakeChan: - // nothing to do - case *ir.Range: - // nothing to do - case *ir.Next: - // nothing to do - case *ir.Index: - // nothing to do - case *ir.Select: - // nothing to do - case *ir.ChangeInterface: - // nothing to do - case *ir.Load: - // nothing to do - case *ir.Parameter: - // nothing to do - case *ir.Const: - // nothing to do - case *ir.ArrayConst: - // nothing to do - case *ir.AggregateConst: - // nothing to do - case *ir.GenericConst: - // nothing to do - case *ir.Recv: - // nothing to do - case *ir.TypeSwitch: - // nothing to do - case *ir.ConstantSwitch: - // nothing to do - case *ir.SliceToArrayPointer: - // nothing to do + } + + default: + panic(fmt.Sprintf("unexpected token %s", decl.Tok)) + } + + case *ast.FuncDecl: + // XXX calling OriginMethod is unnecessary if we use types.Func.Origin + obj := typeparams.OriginMethod(g.info.ObjectOf(decl.Name).(*types.Func)) + g.see(obj, nil) + + if token.IsExported(decl.Name.Name) && g.opts.ExportedIsUsed { + if decl.Recv == nil { + // (1.2) packages use exported functions + g.use(obj, nil) + } + } else if decl.Name.Name == "init" { + // (1.5) packages use init functions + g.use(obj, nil) + } else if decl.Name.Name == "main" && g.pkg.Name() == "main" { + // (1.7) packages use the main function iff in the main package + g.use(obj, nil) + } else if g.pkg.Path() == "runtime" && runtimeFuncs[decl.Name.Name] { + // (9.8) runtime functions that may be called from user code via the compiler + g.use(obj, nil) + } else if g.pkg.Path() == "runtime/coverage" && runtimeCoverageFuncs[decl.Name.Name] { + // (9.8) runtime functions that may be called from user code via the compiler + g.use(obj, nil) + } + + // (4.1) functions use their receivers + g.read(decl.Recv, obj) + g.read(decl.Type, obj) + g.block(decl.Body, obj) + + // g.read(decl.Type) will ultimately call g.seeScopes and see parameters that way. But because it relies + // entirely on the AST, it cannot resolve unnamed parameters to types.Object. For that reason we explicitly + // handle arguments here, as well as for FuncLits elsewhere. + // + // g.seeScopes can't get to the types.Signature for this function because there is no mapping from ast.FuncType to + // types.Signature, only from ast.Ident to types.Signature. + // + // This code is only really relevant when Options.ParametersAreUsed is false. Otherwise, all parameters are + // considered used, and if we never see a parameter then no harm done (we still see its type separately). + fn := g.info.TypeOf(decl.Name).(*types.Signature) + for params, i := fn.Params(), 0; i < params.Len(); i++ { + g.see(params.At(i), obj) + if params.At(i).Name() == "" { + g.use(params.At(i), obj) + } + } + + if decl.Name.Name == "_" { + // (9.9) objects named the blank identifier are used + g.use(obj, nil) + } + + if decl.Doc != nil { + for _, cmt := range decl.Doc.List { + if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { + // (1.6) packages use functions exported to cgo + g.use(obj, nil) + } + } + } + + default: + // We do not cover BadDecl, but we shouldn't ever see one of those + lint.ExhaustiveTypeSwitch(decl) + } +} + +// seeScope sees all objects in node's scope. If Options.LocalVariablesAreUsed is true, all objects that aren't fields +// are marked as used. Variables set in skipLvars will not be marked as used. +func (g *graph) seeScope(node ast.Node, by types.Object, skipLvars map[*types.Var]struct{}) { + // A note on functions and scopes: for a function declaration, the body's BlockStmt can't be found in + // types.Info.Scopes. Instead, the FuncType can, and that scope will contain receivers, parameters, return + // parameters and immediate local variables. + + scope := g.info.Scopes[node] + if scope == nil { + return + } + for _, name := range scope.Names() { + obj := scope.Lookup(name) + g.see(obj, by) + + if g.opts.LocalVariablesAreUsed { + if obj, ok := obj.(*types.Var); ok && !obj.IsField() { + if _, ok := skipLvars[obj]; !ok { + g.use(obj, by) + } + } + } + } +} + +func (g *graph) stmt(stmt ast.Stmt, by types.Object) { + if stmt == nil { + return + } + + for { + // We don't care about labels, so unwrap LabeledStmts. Note that a label can itself be labeled. + if labeled, ok := stmt.(*ast.LabeledStmt); ok { + stmt = labeled.Stmt + } else { + break + } + } + + switch stmt := stmt.(type) { + case *ast.AssignStmt: + for _, lhs := range stmt.Lhs { + g.write(lhs, by) + } + for _, rhs := range stmt.Rhs { + // Note: it would be more accurate to have the rhs used by the lhs, but it ultimately doesn't matter, + // because local variables always end up used, anyway. + // + // TODO(dh): we'll have to change that once we allow tracking the usedness of parameters + g.read(rhs, by) + } + + case *ast.BlockStmt: + g.block(stmt, by) + + case *ast.BranchStmt: + // Nothing to do + + case *ast.DeclStmt: + g.decl(stmt.Decl, by) + + case *ast.DeferStmt: + g.read(stmt.Call, by) + + case *ast.ExprStmt: + g.read(stmt.X, by) + + case *ast.ForStmt: + g.seeScope(stmt, by, nil) + g.stmt(stmt.Init, by) + g.read(stmt.Cond, by) + g.stmt(stmt.Post, by) + g.block(stmt.Body, by) + + case *ast.GoStmt: + g.read(stmt.Call, by) + + case *ast.IfStmt: + g.seeScope(stmt, by, nil) + g.stmt(stmt.Init, by) + g.read(stmt.Cond, by) + g.block(stmt.Body, by) + g.stmt(stmt.Else, by) + + case *ast.IncDecStmt: + if g.opts.PostStatementsAreReads { + g.read(stmt.X, by) + g.write(stmt.X, by) + } else { + // We treat post-increment as a write only. This ends up using fields, and sinks in tests, but not other + // variables. + g.write(stmt.X, by) + } + + case *ast.RangeStmt: + g.seeScope(stmt, by, nil) + + g.write(stmt.Key, by) + g.write(stmt.Value, by) + g.read(stmt.X, by) + g.block(stmt.Body, by) + + case *ast.ReturnStmt: + for _, ret := range stmt.Results { + g.read(ret, by) + } + + case *ast.SelectStmt: + for _, clause_ := range stmt.Body.List { + clause := clause_.(*ast.CommClause) + g.seeScope(clause, by, nil) + switch comm := clause.Comm.(type) { + case *ast.SendStmt: + g.read(comm.Chan, by) + g.read(comm.Value, by) + case *ast.ExprStmt: + g.read(comm.X.(*ast.UnaryExpr).X, by) + case *ast.AssignStmt: + for _, lhs := range comm.Lhs { + g.write(lhs, by) + } + for _, rhs := range comm.Rhs { + g.read(rhs, by) + } + case nil: + default: + lint.ExhaustiveTypeSwitch(comm) + } + for _, body := range clause.Body { + g.stmt(body, by) + } + } + + case *ast.SendStmt: + g.read(stmt.Chan, by) + g.read(stmt.Value, by) + + case *ast.SwitchStmt: + g.seeScope(stmt, by, nil) + g.stmt(stmt.Init, by) + g.read(stmt.Tag, by) + for _, clause_ := range stmt.Body.List { + clause := clause_.(*ast.CaseClause) + g.seeScope(clause, by, nil) + for _, expr := range clause.List { + g.read(expr, by) + } + for _, body := range clause.Body { + g.stmt(body, by) + } + } + + case *ast.TypeSwitchStmt: + g.seeScope(stmt, by, nil) + g.stmt(stmt.Init, by) + g.stmt(stmt.Assign, by) + for _, clause_ := range stmt.Body.List { + clause := clause_.(*ast.CaseClause) + g.seeScope(clause, by, nil) + for _, expr := range clause.List { + g.read(expr, by) + } + for _, body := range clause.Body { + g.stmt(body, by) + } + } + + case *ast.EmptyStmt: + // Nothing to do + + default: + lint.ExhaustiveTypeSwitch(stmt) + } +} + +// embeddedField sees the field declared by the embedded field node, and marks the type as used by the field. +// +// Embedded fields are special in two ways: they don't have names, so we don't have immediate access to an ast.Ident to +// resolve to the field's types.Var, and we cannot use g.read on the type because eventually we do get to an ast.Ident, +// and ObjectOf resolves embedded fields to the field they declare, not the type. That's why we have code specially for +// handling embedded fields. +func (g *graph) embeddedField(node ast.Node, by types.Object) *types.Var { + // We need to traverse the tree to find the ast.Ident, but all the nodes we traverse should be used by the object we + // get once we resolve the ident. Collect the nodes and process them once we've found the ident. + nodes := make([]ast.Node, 0, 4) + for { + switch node_ := node.(type) { + case *ast.Ident: + obj := g.info.ObjectOf(node_).(*types.Var) + g.see(obj, by) + for _, n := range nodes { + g.read(n, obj) + } + switch typ := typeutil.Dereference(g.info.TypeOf(node_)).(type) { + case *types.Named: + g.use(typ.Obj(), obj) + case *types.Basic: + // Nothing to do default: - lint.ExhaustiveTypeSwitch(instr) + lint.ExhaustiveTypeSwitch(typ) } + return obj + case *ast.StarExpr: + node = node_.X + case *ast.SelectorExpr: + node = node_.Sel + nodes = append(nodes, node_.X) + case *ast.IndexExpr: + node = node_.X + nodes = append(nodes, node_.Index) + case *ast.IndexListExpr: + node = node_.X + default: + lint.ExhaustiveTypeSwitch(node_) } } } @@ -1815,41 +1453,216 @@ func isNoCopyType(typ types.Type) bool { if !ok { return false } - if named.NumMethods() != 1 { - return false - } - meth := named.Method(0) - if meth.Name() != "Lock" { - return false - } - sig := meth.Type().(*types.Signature) - if sig.Params().Len() != 0 || sig.Results().Len() != 0 { + switch num := named.NumMethods(); num { + case 1, 2: + for i := 0; i < num; i++ { + meth := named.Method(i) + if meth.Name() != "Lock" && meth.Name() != "Unlock" { + return false + } + sig := meth.Type().(*types.Signature) + if sig.Params().Len() != 0 || sig.Results().Len() != 0 { + return false + } + } + default: return false } return true } -func walkPhi(v ir.Value, fn func(v ir.Value)) { - phi, ok := v.(*ir.Phi) - if !ok { - fn(v) +func (g *graph) namedType(typ *types.TypeName, spec ast.Expr) { + // (2.2) named types use the type they're based on + + if st, ok := spec.(*ast.StructType); ok { + // Named structs are special in that its unexported fields are only used if they're being written to. That is, + // the fields are not used by the named type itself, nor are the types of the fields. + for _, field := range st.Fields.List { + seen := map[*types.Struct]struct{}{} + // For `type x struct { *x; F int }`, don't visit the embedded x + seen[g.info.TypeOf(st).(*types.Struct)] = struct{}{} + var hasExportedField func(t types.Type) bool + hasExportedField = func(T types.Type) bool { + t, ok := typeutil.Dereference(T).Underlying().(*types.Struct) + if !ok { + return false + } + if _, ok := seen[t]; ok { + return false + } + seen[t] = struct{}{} + for i := 0; i < t.NumFields(); i++ { + field := t.Field(i) + if field.Exported() { + return true + } + if field.Embedded() && hasExportedField(field.Type()) { + return true + } + } + return false + } + + if len(field.Names) == 0 { + fieldVar := g.embeddedField(field.Type, typ) + if token.IsExported(fieldVar.Name()) && g.opts.ExportedIsUsed { + // (6.2) structs use exported fields + g.use(fieldVar, typ) + } + if g.opts.ExportedIsUsed && g.opts.ExportedFieldsAreUsed && hasExportedField(fieldVar.Type()) { + // (6.5) structs use embedded structs that have exported fields (recursively) + g.use(fieldVar, typ) + } + } else { + for _, name := range field.Names { + obj := g.info.ObjectOf(name) + g.see(obj, typ) + // (7.2) fields use their types + g.read(field.Type, obj) + if name.Name == "_" { + // (9.9) objects named the blank identifier are used + g.use(obj, typ) + } else if token.IsExported(name.Name) && g.opts.ExportedIsUsed { + // (6.2) structs use exported fields + g.use(obj, typ) + } + + if isNoCopyType(obj.Type()) { + // (6.1) structs use fields of type NoCopy sentinel + g.use(obj, typ) + } + } + } + + } + } else { + g.read(spec, typ) + } +} + +func (g *SerializedGraph) color(rootID NodeID, states []nodeState) { + root := g.nodes[rootID] + if states[rootID].seen() { return } + states[rootID] |= nodeStateSeen + for _, n := range root.uses { + g.color(n, states) + } +} - seen := map[ir.Value]struct{}{} - var impl func(v *ir.Phi) - impl = func(v *ir.Phi) { - if _, ok := seen[v]; ok { - return +type Object struct { + Name string + ShortName string + // OPT(dh): use an enum for the kind + Kind string + Path ObjectPath + Position token.Position + DisplayPosition token.Position +} + +func (g *SerializedGraph) Results() Result { + // XXX objectpath does not return paths for unexported objects, which means that if we analyze the same code twice + // (e.g. normal and test variant), then some objects will appear multiple times, but may not be used identically. we + // have to deduplicate based on the token.Position. Actually we have to do that, anyway, because we may flag types + // local to functions. Those are probably always both used or both unused, but we don't want to flag them twice, + // either. + // + // Note, however, that we still need objectpaths to deduplicate exported identifiers when analyzing independent + // packages in whole-program mode, because if package A uses an object from package B, B will have been imported + // from export data, and we will not have column information. + // + // XXX ^ document that design requirement. + + states := g.colorAndQuieten() + + var res Result + // OPT(dh): can we find meaningful initial capacities for the used and unused slices? + for _, n := range g.nodes[1:] { + state := states[n.id] + if state.seen() { + res.Used = append(res.Used, n.obj) + } else if state.quiet() { + res.Quiet = append(res.Quiet, n.obj) + } else { + res.Unused = append(res.Unused, n.obj) } - seen[v] = struct{}{} - for _, e := range v.Edges { - if ev, ok := e.(*ir.Phi); ok { - impl(ev) - } else { - fn(e) + } + + return res +} + +func (g *SerializedGraph) colorAndQuieten() []nodeState { + states := make([]nodeState, len(g.nodes)+1) + g.color(0, states) + + var quieten func(id NodeID) + quieten = func(id NodeID) { + states[id] |= nodeStateQuiet + for _, owned := range g.nodes[id].owns { + quieten(owned) + } + } + + for _, n := range g.nodes { + if states[n.id].seen() { + continue + } + for _, owned := range n.owns { + quieten(owned) + } + } + + return states +} + +// Dot formats a graph in Graphviz dot format. +func (g *SerializedGraph) Dot() string { + b := &strings.Builder{} + states := g.colorAndQuieten() + // Note: We use addresses in our node names. This only works as long as Go's garbage collector doesn't move + // memory around in the middle of our debug printing. + debugNode := func(n Node) { + if n.id == 0 { + fmt.Fprintf(b, "n%d [label=\"Root\"];\n", n.id) + } else { + color := "red" + if states[n.id].seen() { + color = "green" + } else if states[n.id].quiet() { + color = "grey" } + label := fmt.Sprintf("%s %s\n%s", n.obj.Kind, n.obj.Name, n.obj.Position) + fmt.Fprintf(b, "n%d [label=%q, color=%q];\n", n.id, label, color) } + for _, e := range n.uses { + fmt.Fprintf(b, "n%d -> n%d;\n", n.id, e) + } + + for _, owned := range n.owns { + fmt.Fprintf(b, "n%d -> n%d [style=dashed];\n", n.id, owned) + } + } + + fmt.Fprintf(b, "digraph{\n") + for _, v := range g.nodes { + debugNode(v) } - impl(phi) + + fmt.Fprintf(b, "}\n") + + return b.String() +} + +func Graph(fset *token.FileSet, + files []*ast.File, + pkg *types.Package, + info *types.Info, + directives []lint.Directive, + generated map[string]generated.Generator, + opts Options, +) []Node { + g := newGraph(fset, files, pkg, info, directives, generated, opts) + g.entry() + return g.nodes } diff --git a/vendor/modules.txt b/vendor/modules.txt index 95f07f4e..229ebf5d 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,7 +1,10 @@ -# 4d63.com/gochecknoglobals v0.1.0 +# 4d63.com/gocheckcompilerdirectives v1.2.1 +## explicit; go 1.19 +4d63.com/gocheckcompilerdirectives/checkcompilerdirectives +# 4d63.com/gochecknoglobals v0.2.1 ## explicit; go 1.15 4d63.com/gochecknoglobals/checknoglobals -# github.com/Abirdcfly/dupword v0.0.7 +# github.com/Abirdcfly/dupword v0.0.9 ## explicit; go 1.19 github.com/Abirdcfly/dupword # github.com/Antonboom/errname v0.1.7 @@ -10,7 +13,7 @@ github.com/Antonboom/errname/pkg/analyzer # github.com/Antonboom/nilnil v0.1.1 ## explicit; go 1.18 github.com/Antonboom/nilnil/pkg/analyzer -# github.com/BurntSushi/toml v1.2.0 +# github.com/BurntSushi/toml v1.2.1 ## explicit; go 1.16 github.com/BurntSushi/toml github.com/BurntSushi/toml/internal @@ -38,7 +41,7 @@ github.com/alingse/asasalint # github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d ## explicit; go 1.13 github.com/asaskevich/govalidator -# github.com/ashanbrown/forbidigo v1.3.0 +# github.com/ashanbrown/forbidigo v1.4.0 ## explicit; go 1.12 github.com/ashanbrown/forbidigo/forbidigo # github.com/ashanbrown/makezero v1.1.1 @@ -144,8 +147,8 @@ github.com/bkielbasa/cyclop/pkg/analyzer # github.com/blizzy78/varnamelen v0.8.0 ## explicit; go 1.16 github.com/blizzy78/varnamelen -# github.com/bombsimon/wsl/v3 v3.3.0 -## explicit; go 1.12 +# github.com/bombsimon/wsl/v3 v3.4.0 +## explicit; go 1.19 github.com/bombsimon/wsl/v3 # github.com/breml/bidichk v0.2.3 ## explicit; go 1.17 @@ -164,14 +167,14 @@ github.com/cespare/xxhash/v2 # github.com/charithe/durationcheck v0.0.9 ## explicit; go 1.14 github.com/charithe/durationcheck -# github.com/chavacava/garif v0.0.0-20220630083739-93517212f375 +# github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 ## explicit; go 1.16 github.com/chavacava/garif # github.com/curioswitch/go-reassign v0.2.0 ## explicit; go 1.18 github.com/curioswitch/go-reassign github.com/curioswitch/go-reassign/internal/analyzer -# github.com/daixiang0/gci v0.8.0 +# github.com/daixiang0/gci v0.9.1 ## explicit; go 1.18 github.com/daixiang0/gci/pkg/config github.com/daixiang0/gci/pkg/format @@ -207,8 +210,8 @@ github.com/evanphx/json-patch # github.com/evanphx/json-patch/v5 v5.6.0 ## explicit; go 1.12 github.com/evanphx/json-patch/v5 -# github.com/fatih/color v1.13.0 -## explicit; go 1.13 +# github.com/fatih/color v1.14.1 +## explicit; go 1.17 github.com/fatih/color # github.com/fatih/structtag v1.2.0 ## explicit; go 1.12 @@ -222,8 +225,8 @@ github.com/fsnotify/fsnotify # github.com/fzipp/gocyclo v0.6.0 ## explicit; go 1.18 github.com/fzipp/gocyclo -# github.com/go-critic/go-critic v0.6.5 -## explicit; go 1.16 +# github.com/go-critic/go-critic v0.6.7 +## explicit; go 1.18 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk github.com/go-critic/go-critic/checkers/internal/lintutil @@ -248,28 +251,28 @@ github.com/go-openapi/jsonreference/internal # github.com/go-openapi/swag v0.22.3 ## explicit; go 1.18 github.com/go-openapi/swag -# github.com/go-toolsmith/astcast v1.0.0 -## explicit +# github.com/go-toolsmith/astcast v1.1.0 +## explicit; go 1.16 github.com/go-toolsmith/astcast -# github.com/go-toolsmith/astcopy v1.0.2 +# github.com/go-toolsmith/astcopy v1.0.3 ## explicit; go 1.16 github.com/go-toolsmith/astcopy -# github.com/go-toolsmith/astequal v1.0.3 +# github.com/go-toolsmith/astequal v1.1.0 ## explicit; go 1.16 github.com/go-toolsmith/astequal -# github.com/go-toolsmith/astfmt v1.0.0 -## explicit +# github.com/go-toolsmith/astfmt v1.1.0 +## explicit; go 1.16 github.com/go-toolsmith/astfmt -# github.com/go-toolsmith/astp v1.0.0 -## explicit +# github.com/go-toolsmith/astp v1.1.0 +## explicit; go 1.16 github.com/go-toolsmith/astp -# github.com/go-toolsmith/strparse v1.0.0 -## explicit +# github.com/go-toolsmith/strparse v1.1.0 +## explicit; go 1.16 github.com/go-toolsmith/strparse -# github.com/go-toolsmith/typep v1.0.2 -## explicit +# github.com/go-toolsmith/typep v1.1.0 +## explicit; go 1.16 github.com/go-toolsmith/typep -# github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b +# github.com/go-xmlfmt/xmlfmt v1.1.2 ## explicit github.com/go-xmlfmt/xmlfmt # github.com/gobuffalo/flect v0.2.5 @@ -330,7 +333,7 @@ github.com/golangci/gofmt/gofmt github.com/golangci/gofmt/gofmt/internal/diff github.com/golangci/gofmt/gofmt/internal/execabs github.com/golangci/gofmt/goimports -# github.com/golangci/golangci-lint v1.50.0 +# github.com/golangci/golangci-lint v1.51.2 ## explicit; go 1.19 github.com/golangci/golangci-lint/cmd/golangci-lint github.com/golangci/golangci-lint/internal/cache @@ -364,8 +367,8 @@ github.com/golangci/lint-1 # github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca ## explicit github.com/golangci/maligned -# github.com/golangci/misspell v0.3.5 -## explicit +# github.com/golangci/misspell v0.4.0 +## explicit; go 1.18 github.com/golangci/misspell # github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 ## explicit; go 1.17 @@ -398,7 +401,7 @@ github.com/google/shlex # github.com/google/uuid v1.3.0 ## explicit github.com/google/uuid -# github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 +# github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 ## explicit; go 1.14 github.com/gordonklaus/ineffassign/pkg/ineffassign # github.com/gostaticanalysis/analysisutil v0.7.1 @@ -470,14 +473,17 @@ github.com/json-iterator/go # github.com/julz/importas v0.1.0 ## explicit; go 1.15 github.com/julz/importas -# github.com/kisielk/errcheck v1.6.2 +# github.com/junk1tm/musttag v0.4.5 +## explicit; go 1.18 +github.com/junk1tm/musttag +# github.com/kisielk/errcheck v1.6.3 ## explicit; go 1.14 github.com/kisielk/errcheck/errcheck # github.com/kisielk/gotool v1.0.0 ## explicit github.com/kisielk/gotool github.com/kisielk/gotool/internal/load -# github.com/kkHAIKE/contextcheck v1.1.2 +# github.com/kkHAIKE/contextcheck v1.1.3 ## explicit; go 1.15 github.com/kkHAIKE/contextcheck # github.com/kulti/thelper v0.6.3 @@ -486,16 +492,16 @@ github.com/kulti/thelper/pkg/analyzer # github.com/kunwardeep/paralleltest v1.0.6 ## explicit; go 1.17 github.com/kunwardeep/paralleltest/pkg/paralleltest -# github.com/kyoh86/exportloopref v0.1.8 -## explicit; go 1.14 +# github.com/kyoh86/exportloopref v0.1.11 +## explicit; go 1.18 github.com/kyoh86/exportloopref # github.com/ldez/gomoddirectives v0.2.3 ## explicit; go 1.16 github.com/ldez/gomoddirectives -# github.com/ldez/tagliatelle v0.3.1 -## explicit; go 1.16 +# github.com/ldez/tagliatelle v0.4.0 +## explicit; go 1.19 github.com/ldez/tagliatelle -# github.com/leonklingele/grouper v1.1.0 +# github.com/leonklingele/grouper v1.1.1 ## explicit; go 1.17 github.com/leonklingele/grouper/pkg/analyzer github.com/leonklingele/grouper/pkg/analyzer/consts @@ -526,7 +532,7 @@ github.com/matoous/godox # github.com/mattn/go-colorable v0.1.13 ## explicit; go 1.15 github.com/mattn/go-colorable -# github.com/mattn/go-isatty v0.0.16 +# github.com/mattn/go-isatty v0.0.17 ## explicit; go 1.15 github.com/mattn/go-isatty # github.com/mattn/go-runewidth v0.0.9 @@ -538,7 +544,7 @@ github.com/matttproud/golang_protobuf_extensions/pbutil # github.com/mbilski/exhaustivestruct v1.2.0 ## explicit; go 1.15 github.com/mbilski/exhaustivestruct/pkg/analyzer -# github.com/mgechev/revive v1.2.4 +# github.com/mgechev/revive v1.2.5 ## explicit; go 1.19 github.com/mgechev/revive/config github.com/mgechev/revive/formatter @@ -587,12 +593,18 @@ github.com/nbutton23/zxcvbn-go/match github.com/nbutton23/zxcvbn-go/matching github.com/nbutton23/zxcvbn-go/scoring github.com/nbutton23/zxcvbn-go/utils/math -# github.com/nishanths/exhaustive v0.8.3 +# github.com/nishanths/exhaustive v0.9.5 ## explicit; go 1.14 github.com/nishanths/exhaustive # github.com/nishanths/predeclared v0.2.2 ## explicit; go 1.14 github.com/nishanths/predeclared/passes/predeclared +# github.com/nunnatsa/ginkgolinter v0.8.1 +## explicit; go 1.19 +github.com/nunnatsa/ginkgolinter +github.com/nunnatsa/ginkgolinter/gomegahandler +github.com/nunnatsa/ginkgolinter/reverseassertion +github.com/nunnatsa/ginkgolinter/types # github.com/nxadm/tail v1.4.8 ## explicit; go 1.13 github.com/nxadm/tail @@ -657,16 +669,13 @@ github.com/pelletier/go-toml/v2 github.com/pelletier/go-toml/v2/internal/ast github.com/pelletier/go-toml/v2/internal/danger github.com/pelletier/go-toml/v2/internal/tracker -# github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d -## explicit -github.com/phayes/checkstyle # github.com/pkg/errors v0.9.1 ## explicit github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/polyfloyd/go-errorlint v1.0.5 +# github.com/polyfloyd/go-errorlint v1.1.0 ## explicit; go 1.13 github.com/polyfloyd/go-errorlint/errorlint # github.com/prometheus/client_golang v1.15.1 @@ -689,7 +698,7 @@ github.com/prometheus/common/model github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/quasilyte/go-ruleguard v0.3.18 +# github.com/quasilyte/go-ruleguard v0.3.19 ## explicit; go 1.17 github.com/quasilyte/go-ruleguard/internal/goenv github.com/quasilyte/go-ruleguard/internal/golist @@ -706,7 +715,7 @@ github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings github.com/quasilyte/go-ruleguard/ruleguard/textmatch github.com/quasilyte/go-ruleguard/ruleguard/typematch -# github.com/quasilyte/gogrep v0.0.0-20220828223005-86e4605de09f +# github.com/quasilyte/gogrep v0.5.0 ## explicit; go 1.16 github.com/quasilyte/gogrep github.com/quasilyte/gogrep/internal/stdinfo @@ -717,23 +726,23 @@ github.com/quasilyte/regex/syntax # github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 ## explicit; go 1.17 github.com/quasilyte/stdinfo -# github.com/ryancurrah/gomodguard v1.2.4 -## explicit; go 1.16 +# github.com/ryancurrah/gomodguard v1.3.0 +## explicit; go 1.19 github.com/ryancurrah/gomodguard -# github.com/ryanrolds/sqlclosecheck v0.3.0 -## explicit; go 1.13 +# github.com/ryanrolds/sqlclosecheck v0.4.0 +## explicit; go 1.19 github.com/ryanrolds/sqlclosecheck/pkg/analyzer -# github.com/sanposhiho/wastedassign/v2 v2.0.6 +# github.com/sanposhiho/wastedassign/v2 v2.0.7 ## explicit; go 1.14 github.com/sanposhiho/wastedassign/v2 # github.com/sashamelentyev/interfacebloat v1.1.0 ## explicit; go 1.18 github.com/sashamelentyev/interfacebloat/pkg/analyzer -# github.com/sashamelentyev/usestdlibvars v1.20.0 +# github.com/sashamelentyev/usestdlibvars v1.23.0 ## explicit; go 1.19 github.com/sashamelentyev/usestdlibvars/pkg/analyzer github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping -# github.com/securego/gosec/v2 v2.13.1 +# github.com/securego/gosec/v2 v2.15.0 ## explicit; go 1.19 github.com/securego/gosec/v2 github.com/securego/gosec/v2/cwe @@ -750,7 +759,7 @@ github.com/sivchari/containedctx # github.com/sivchari/nosnakecase v1.7.0 ## explicit; go 1.18 github.com/sivchari/nosnakecase -# github.com/sivchari/tenv v1.7.0 +# github.com/sivchari/tenv v1.7.1 ## explicit; go 1.18 github.com/sivchari/tenv # github.com/sonatard/noctx v0.0.1 @@ -758,7 +767,7 @@ github.com/sivchari/tenv github.com/sonatard/noctx github.com/sonatard/noctx/ngfunc github.com/sonatard/noctx/reqwithoutctx -# github.com/sourcegraph/go-diff v0.6.1 +# github.com/sourcegraph/go-diff v0.7.0 ## explicit; go 1.14 github.com/sourcegraph/go-diff/diff # github.com/spf13/afero v1.8.2 @@ -804,13 +813,16 @@ github.com/stretchr/testify/mock # github.com/subosito/gotenv v1.4.1 ## explicit; go 1.18 github.com/subosito/gotenv +# github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c +## explicit +github.com/t-yuki/gocover-cobertura # github.com/tdakkota/asciicheck v0.1.1 ## explicit; go 1.13 github.com/tdakkota/asciicheck # github.com/tetafro/godot v1.4.11 ## explicit; go 1.16 github.com/tetafro/godot -# github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 +# github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e ## explicit; go 1.12 github.com/timakin/bodyclose/passes/bodyclose # github.com/timonwong/loggercheck v0.9.3 @@ -841,10 +853,10 @@ github.com/timtadh/lexmachine/frontend github.com/timtadh/lexmachine/inst github.com/timtadh/lexmachine/machines github.com/timtadh/lexmachine/queue -# github.com/tomarrell/wrapcheck/v2 v2.6.2 +# github.com/tomarrell/wrapcheck/v2 v2.8.0 ## explicit; go 1.18 github.com/tomarrell/wrapcheck/v2/wrapcheck -# github.com/tommy-muehle/go-mnd/v2 v2.5.0 +# github.com/tommy-muehle/go-mnd/v2 v2.5.1 ## explicit; go 1.12 github.com/tommy-muehle/go-mnd/v2 github.com/tommy-muehle/go-mnd/v2/checks @@ -899,7 +911,7 @@ go.uber.org/zap/zapcore ## explicit; go 1.18 golang.org/x/exp/constraints golang.org/x/exp/slices -# golang.org/x/exp/typeparams v0.0.0-20220827204233-334a2380cb91 +# golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 ## explicit; go 1.18 golang.org/x/exp/typeparams # golang.org/x/mod v0.10.0 @@ -1003,6 +1015,7 @@ golang.org/x/tools/go/analysis/passes/stringintconv golang.org/x/tools/go/analysis/passes/structtag golang.org/x/tools/go/analysis/passes/testinggoroutine golang.org/x/tools/go/analysis/passes/tests +golang.org/x/tools/go/analysis/passes/timeformat golang.org/x/tools/go/analysis/passes/unmarshal golang.org/x/tools/go/analysis/passes/unreachable golang.org/x/tools/go/analysis/passes/unsafeptr @@ -1103,8 +1116,8 @@ gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# honnef.co/go/tools v0.3.3 -## explicit; go 1.17 +# honnef.co/go/tools v0.4.2 +## explicit; go 1.19 honnef.co/go/tools/analysis/code honnef.co/go/tools/analysis/edit honnef.co/go/tools/analysis/facts/deprecated @@ -1443,8 +1456,8 @@ mvdan.cc/interfacer/check # mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b ## explicit mvdan.cc/lint -# mvdan.cc/unparam v0.0.0-20220706161116-678bad134442 -## explicit; go 1.17 +# mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d +## explicit; go 1.18 mvdan.cc/unparam/check # sigs.k8s.io/aws-load-balancer-controller v0.0.0-20220923211742-8d282339857c ## explicit; go 1.18 diff --git a/vendor/mvdan.cc/unparam/check/check.go b/vendor/mvdan.cc/unparam/check/check.go index dcc54106..8f9a3cb0 100644 --- a/vendor/mvdan.cc/unparam/check/check.go +++ b/vendor/mvdan.cc/unparam/check/check.go @@ -21,7 +21,6 @@ import ( "sort" "strings" - "golang.org/x/exp/typeparams" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" @@ -582,6 +581,8 @@ resLoop: c.addIssue(fn, res.Pos(), "result %s is never used", name) } + fnIsGeneric := fn.TypeParams().Len() > 0 + for i, par := range fn.Params { if paramsBy != "" { continue // we can't change the params @@ -590,14 +591,19 @@ resLoop: continue } c.debug("%s\n", par.String()) - switch par.Object().Name() { - case "", "_": // unnamed - c.debug(" skip - unnamed\n") + if name := par.Object().Name(); name == "" || name[0] == '_' { + c.debug(" skip - no name or underscore name\n") continue } - if stdSizes.Sizeof(par.Type()) == 0 { - c.debug(" skip - zero size\n") - continue + t := par.Type() + // asking for the size of a type param would panic, as it is unknowable + if !fnIsGeneric || !containsTypeParam(t) { + if stdSizes.Sizeof(par.Type()) == 0 { + c.debug(" skip - zero size\n") + continue + } + } else { + c.debug(" examine - type parameter\n") } reason := "is unused" constStr := c.alwaysReceivedConst(callSites, par, i) @@ -611,6 +617,30 @@ resLoop: } } +func containsTypeParam(t types.Type) bool { + switch t := t.(type) { + case *types.TypeParam, *types.Union: + return true + case *types.Struct: + nf := t.NumFields() + for i := 0; i < nf; i++ { + if containsTypeParam(t.Field(nf).Type()) { + return true + } + } + case *types.Array: + return containsTypeParam(t.Elem()) + case *types.Named: + args := t.TypeArgs() + for i := 0; i < args.Len(); i++ { + if containsTypeParam(args.At(i)) { + return true + } + } + } + return false +} + // nodeStr stringifies a syntax tree node. It is only meant for simple nodes, // such as short value expressions. func nodeStr(node ast.Node) string { @@ -881,19 +911,12 @@ func recvPrefix(recv *ast.FieldList) string { switch expr := expr.(type) { case *ast.Ident: return expr.Name + "." + case *ast.IndexExpr: + return expr.X.(*ast.Ident).Name + "." + case *ast.IndexListExpr: + return expr.X.(*ast.Ident).Name + "." default: - x, _, _, _ := typeparams.UnpackIndexExpr(expr) - if x == nil { - panic(fmt.Sprintf("unexepected receiver AST node: %T", expr)) - } - return x.(*ast.Ident).Name + "." - // TODO: remove the use of x/exp/typeparams once we drop Go 1.17 - // case *ast.IndexExpr: - // return expr.X.(*ast.Ident).Name + "." - // case *ast.IndexListExpr: - // return expr.X.(*ast.Ident).Name + "." - // default: - // panic(fmt.Sprintf("unexepected receiver AST node: %T", expr)) + panic(fmt.Sprintf("unexepected receiver AST node: %T", expr)) } }